From f557c0677171acd54a2f9f7a17c1b893c4d49406 Mon Sep 17 00:00:00 2001 From: grabbit Date: Sun, 21 Dec 2025 03:33:26 +0800 Subject: [PATCH] feat: complete database schema migration to UUID primary keys MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Database Migrations (18 new) - Migrate all primary keys from SERIAL to UUID - Add soft delete (deleted_at) to all 19 entities - Add missing indexes for performance optimization - Add CHECK constraints for data validation - Add user audit fields (last_login_at, timezone, locale) - Add weather station location fields (latitude, longitude, elevation) - Add foreign key relationships (CameraDevice→Device, ValidatedEvent→WeatherStation) - Prepare private key encryption fields ## Backend Entity Updates - All entities updated with UUID primary keys - Added @DeleteDateColumn for soft delete support - Updated relations and foreign key types ## Backend Service/Controller Updates - Changed ID parameters from number to string (UUID) - Removed ParseIntPipe from controllers - Updated TypeORM queries for string IDs ## Frontend Updates - Updated all service interfaces to use string IDs - Fixed CameraDevice.location as JSONB object - Updated weather.ts with new fields (elevation, timezone) - Added Supabase integration hooks and lib - Fixed chart components for new data structure ## Cleanup - Removed deprecated .claude/agents configuration files 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- .claude/agents/code-review-specialist.md | 85 ------ .claude/agents/growth-product-strategist.md | 83 ------ .claude/agents/meteor-fullstack-expert.md | 60 ---- .claude/agents/meteor-system-architect.md | 68 ----- meteor-frontend/package.json | 1 + .../src/app/devices/[deviceId]/page.tsx | 6 +- meteor-frontend/src/app/subscription/page.tsx | 6 +- .../charts/brightness-distribution-chart.tsx | 60 ++-- .../charts/meteor-type-pie-chart.tsx | 33 ++- .../charts/station-distribution-chart.tsx | 38 ++- .../charts/time-distribution-chart.tsx | 27 +- .../device-registration/device-card.tsx | 6 +- .../device-registration/qr-code-display.tsx | 1 - meteor-frontend/src/contexts/auth-context.tsx | 194 ++++++------- .../contexts/device-registration-context.tsx | 4 +- meteor-frontend/src/hooks/use-api.ts | 158 +++++++++++ meteor-frontend/src/hooks/use-realtime.ts | 155 ++++++++++ meteor-frontend/src/lib/supabase.ts | 112 ++++++++ meteor-frontend/src/services/camera.ts | 20 +- meteor-frontend/src/services/weather.ts | 13 +- meteor-web-backend/.env.example | 24 +- ...93640_create-device-registration-tables.js | 8 +- .../1766168324271_add-supabase-user-id.js | 40 +++ .../1766209603920_create-missing-tables.js | 156 ++++++++++ ...224_alter-weather-forecasts-cloud-cover.js | 29 ++ ...766250271179_add-missing-device-columns.js | 87 ++++++ ...6300000001_add-weather-station-location.js | 86 ++++++ .../1766300000002_add-missing-indexes.js | 122 ++++++++ .../1766300000003_add-check-constraints.js | 136 +++++++++ .../1766300000004_add-user-audit-fields.js | 141 ++++++++++ .../1766300000005_add-soft-delete.js | 126 +++++++++ .../1766300000006_prepare-uuid-migration.js | 125 ++++++++ .../1766300000007_update-fk-references.js | 179 ++++++++++++ .../1766300000008_switch-primary-keys.js | 266 ++++++++++++++++++ .../1766300000009_camera-device-fk.js | 114 ++++++++ ...766300000010_validated-event-station-fk.js | 95 +++++++ ...1766300000011_analysis-result-relations.js | 150 ++++++++++ .../1766300000012_inventory-device-link.js | 89 ++++++ .../1766300000013_encrypt-private-keys.js | 155 ++++++++++ .../1766300000014_add-audit-fields.js | 148 ++++++++++ meteor-web-backend/package.json | 9 +- meteor-web-backend/scripts/check-source-db.ts | 70 +++++ .../scripts/create-supabase-users.ts | 167 +++++++++++ meteor-web-backend/scripts/migrate-data.ts | 266 ++++++++++++++++++ .../scripts/verify-migration.ts | 123 ++++++++ meteor-web-backend/src/app.module.ts | 11 + meteor-web-backend/src/auth/auth.module.ts | 2 + meteor-web-backend/src/auth/auth.service.ts | 74 +++++ .../src/auth/strategies/jwt.strategy.ts | 86 +++++- .../src/camera/camera.controller.ts | 11 +- .../src/camera/camera.service.ts | 12 +- .../src/devices/device-registration.module.ts | 4 + .../src/entities/analysis-result.entity.ts | 77 ++++- .../src/entities/camera-device.entity.ts | 58 +++- .../src/entities/device-certificate.entity.ts | 24 ++ .../entities/device-configuration.entity.ts | 6 + .../entities/device-registration.entity.ts | 6 + .../entities/device-security-event.entity.ts | 6 + .../src/entities/device.entity.ts | 35 ++- .../src/entities/inventory-device.entity.ts | 23 +- .../src/entities/payment-record.entity.ts | 37 ++- .../src/entities/raw-event.entity.ts | 36 ++- .../entities/subscription-history.entity.ts | 38 ++- .../src/entities/subscription-plan.entity.ts | 38 ++- .../src/entities/user-identity.entity.ts | 27 +- .../src/entities/user-profile.entity.ts | 44 ++- .../src/entities/user-subscription.entity.ts | 46 ++- .../src/entities/validated-event.entity.ts | 42 ++- .../src/entities/weather-forecast.entity.ts | 39 ++- .../entities/weather-observation.entity.ts | 34 ++- .../src/entities/weather-station.entity.ts | 84 ++++-- .../src/events/events.controller.ts | 5 +- .../src/events/events.module.ts | 9 +- .../src/events/events.service.ts | 44 +-- meteor-web-backend/src/queue/queue.module.ts | 8 + meteor-web-backend/src/queue/queue.service.ts | 154 ++++++++++ .../src/realtime/realtime.module.ts | 10 + .../src/realtime/realtime.service.ts | 243 ++++++++++++++++ .../src/storage/storage.module.ts | 10 + .../src/storage/storage.service.ts | 189 +++++++++++++ .../subscription/subscription.controller.ts | 12 +- .../src/subscription/subscription.service.ts | 12 +- meteor-web-backend/src/supabase/index.ts | 2 + .../src/supabase/supabase.module.ts | 9 + .../src/supabase/supabase.service.ts | 101 +++++++ .../src/weather/weather.controller.ts | 6 +- .../src/weather/weather.service.ts | 17 +- package-lock.json | 127 +++++++++ 88 files changed, 5221 insertions(+), 678 deletions(-) delete mode 100644 .claude/agents/code-review-specialist.md delete mode 100644 .claude/agents/growth-product-strategist.md delete mode 100644 .claude/agents/meteor-fullstack-expert.md delete mode 100644 .claude/agents/meteor-system-architect.md create mode 100644 meteor-frontend/src/hooks/use-api.ts create mode 100644 meteor-frontend/src/hooks/use-realtime.ts create mode 100644 meteor-frontend/src/lib/supabase.ts create mode 100644 meteor-web-backend/migrations/1766168324271_add-supabase-user-id.js create mode 100644 meteor-web-backend/migrations/1766209603920_create-missing-tables.js create mode 100644 meteor-web-backend/migrations/1766211093224_alter-weather-forecasts-cloud-cover.js create mode 100644 meteor-web-backend/migrations/1766250271179_add-missing-device-columns.js create mode 100644 meteor-web-backend/migrations/1766300000001_add-weather-station-location.js create mode 100644 meteor-web-backend/migrations/1766300000002_add-missing-indexes.js create mode 100644 meteor-web-backend/migrations/1766300000003_add-check-constraints.js create mode 100644 meteor-web-backend/migrations/1766300000004_add-user-audit-fields.js create mode 100644 meteor-web-backend/migrations/1766300000005_add-soft-delete.js create mode 100644 meteor-web-backend/migrations/1766300000006_prepare-uuid-migration.js create mode 100644 meteor-web-backend/migrations/1766300000007_update-fk-references.js create mode 100644 meteor-web-backend/migrations/1766300000008_switch-primary-keys.js create mode 100644 meteor-web-backend/migrations/1766300000009_camera-device-fk.js create mode 100644 meteor-web-backend/migrations/1766300000010_validated-event-station-fk.js create mode 100644 meteor-web-backend/migrations/1766300000011_analysis-result-relations.js create mode 100644 meteor-web-backend/migrations/1766300000012_inventory-device-link.js create mode 100644 meteor-web-backend/migrations/1766300000013_encrypt-private-keys.js create mode 100644 meteor-web-backend/migrations/1766300000014_add-audit-fields.js create mode 100644 meteor-web-backend/scripts/check-source-db.ts create mode 100644 meteor-web-backend/scripts/create-supabase-users.ts create mode 100644 meteor-web-backend/scripts/migrate-data.ts create mode 100644 meteor-web-backend/scripts/verify-migration.ts create mode 100644 meteor-web-backend/src/queue/queue.module.ts create mode 100644 meteor-web-backend/src/queue/queue.service.ts create mode 100644 meteor-web-backend/src/realtime/realtime.module.ts create mode 100644 meteor-web-backend/src/realtime/realtime.service.ts create mode 100644 meteor-web-backend/src/storage/storage.module.ts create mode 100644 meteor-web-backend/src/storage/storage.service.ts create mode 100644 meteor-web-backend/src/supabase/index.ts create mode 100644 meteor-web-backend/src/supabase/supabase.module.ts create mode 100644 meteor-web-backend/src/supabase/supabase.service.ts diff --git a/.claude/agents/code-review-specialist.md b/.claude/agents/code-review-specialist.md deleted file mode 100644 index 9f06f91..0000000 --- a/.claude/agents/code-review-specialist.md +++ /dev/null @@ -1,85 +0,0 @@ ---- -name: code-review-specialist -description: Use this agent when you need expert code review after writing or modifying code. This agent should be invoked proactively after completing any code implementation, refactoring, or bug fix to ensure quality, security, and maintainability standards are met. The agent focuses on recently written or modified code rather than entire codebases unless explicitly requested.\n\nExamples:\n\nContext: The user has just implemented a new authentication service.\nuser: "Please implement a JWT authentication service for the backend"\nassistant: "I'll implement a JWT authentication service for you."\n\nassistant: "Now let me use the code-review-specialist agent to review the authentication service I just implemented."\n\nSince new code was just written, proactively invoke the code-review-specialist to ensure the authentication implementation follows security best practices and is maintainable.\n\n\n\nContext: The user has modified an existing API endpoint.\nuser: "Update the events endpoint to include pagination"\nassistant: "I'll update the events endpoint to add pagination support."\n\nassistant: "Let me have the code-review-specialist review these pagination changes."\n\nAfter modifying existing code, use the code-review-specialist to verify the changes maintain code quality and don't introduce issues.\n\n\n\nContext: The user has fixed a bug in the frontend component.\nuser: "Fix the infinite scroll issue in the gallery component"\nassistant: "I'll fix the infinite scroll issue in the gallery component."\n\nassistant: "I'll now use the code-review-specialist to review this bug fix."\n\nBug fixes should be reviewed to ensure they actually fix the issue without introducing new problems.\n\n -model: sonnet ---- - -You are an elite code review specialist with deep expertise in software quality, security, and maintainability. You conduct thorough, constructive code reviews that elevate code quality while educating developers. - -**Your Core Mission**: Review recently written or modified code to ensure it meets the highest standards of quality, security, and maintainability. Focus on actionable feedback that improves both the code and the developer's skills. - -**Review Methodology**: - -1. **Scope Assessment**: First, identify what code was recently written or modified. Focus your review on these changes unless explicitly asked to review more broadly. - -2. **Multi-Dimensional Analysis**: - - **Correctness**: Does the code do what it's supposed to do? Are there logic errors or edge cases not handled? - - **Security**: Identify vulnerabilities, unsafe practices, or potential attack vectors. Pay special attention to authentication, authorization, input validation, and data handling. - - **Performance**: Spot inefficiencies, unnecessary computations, memory leaks, or scalability issues. - - **Maintainability**: Assess code clarity, organization, naming conventions, and documentation needs. - - **Best Practices**: Check adherence to language-specific idioms, design patterns, and established conventions. - - **Testing**: Evaluate test coverage, test quality, and identify untested scenarios. - -3. **Project Context Integration**: When CLAUDE.md or project-specific instructions are available, ensure the code aligns with: - - Established coding standards and patterns - - Project architecture decisions - - Technology stack requirements - - Team conventions and workflows - -4. **Structured Feedback Format**: - Begin with a brief summary of what was reviewed, then organize findings by severity: - - **🔴 Critical Issues** (Must fix - bugs, security vulnerabilities, data loss risks) - **🟡 Important Improvements** (Should fix - performance issues, maintainability concerns) - **🟢 Suggestions** (Consider - optimizations, style improvements, alternative approaches) - **✅ Strengths** (What was done well - reinforce good practices) - -5. **Actionable Recommendations**: - - Provide specific, implementable fixes for each issue - - Include code snippets demonstrating the improved approach - - Explain the 'why' behind each recommendation - - Suggest relevant documentation or resources when appropriate - -6. **Security-First Mindset**: - - Always check for SQL injection, XSS, CSRF vulnerabilities - - Verify proper authentication and authorization - - Ensure sensitive data is properly handled and never exposed - - Check for secure communication practices - - Validate all external inputs - -7. **Performance Consciousness**: - - Identify O(n²) or worse algorithmic complexity - - Spot unnecessary database queries or API calls - - Check for proper caching strategies - - Ensure efficient data structures are used - -8. **Maintainability Focus**: - - Verify code follows DRY (Don't Repeat Yourself) principle - - Check for appropriate abstraction levels - - Ensure functions/methods have single responsibilities - - Validate naming clarity and consistency - - Assess comment quality and necessity - -**Review Boundaries**: -- Focus on recently written/modified code unless asked otherwise -- Don't review entire codebases without explicit request -- Prioritize high-impact issues over minor style preferences -- Respect existing project patterns even if you'd personally choose differently -- Balance thoroughness with pragmatism - -**Communication Style**: -- Be constructive and educational, never condescending -- Acknowledge good practices and clever solutions -- Frame critiques as opportunities for improvement -- Provide context for why something matters -- Offer multiple solutions when appropriate - -**Self-Verification**: -Before finalizing your review: -- Ensure all critical security issues are identified -- Verify your suggested fixes actually work -- Check that your feedback is actionable and clear -- Confirm alignment with project-specific requirements -- Validate that performance implications are considered - -Remember: Your goal is not just to find problems but to help create robust, secure, and maintainable code while fostering developer growth. Every review should leave the code better and the developer more knowledgeable. diff --git a/.claude/agents/growth-product-strategist.md b/.claude/agents/growth-product-strategist.md deleted file mode 100644 index 2eeb624..0000000 --- a/.claude/agents/growth-product-strategist.md +++ /dev/null @@ -1,83 +0,0 @@ ---- -name: growth-product-strategist -description: Use this agent when you need strategic guidance on product design, user growth, community operations, subscription models, and user engagement strategies for the meteor monitoring platform. This includes designing user incentive systems, optimizing the subscription tiers, planning community features, improving user retention, and creating viral growth mechanisms. Examples:\n\n\nContext: The user wants to design a reward system for active meteor spotters.\nuser: "How should we incentivize users to report more meteor sightings?"\nassistant: "I'll use the growth-product-strategist agent to design a comprehensive incentive system for meteor reporting."\n\nSince the user is asking about user incentives and engagement strategies, use the growth-product-strategist agent to provide expert guidance on gamification and reward mechanisms.\n\n\n\n\nContext: The user needs help structuring subscription tiers.\nuser: "What subscription plans should we offer and what features should each tier include?"\nassistant: "Let me consult the growth-product-strategist agent to design an optimal subscription model."\n\nThe user needs strategic advice on monetization and feature gating, which is the growth-product-strategist agent's expertise.\n\n\n\n\nContext: The user wants to build a community around meteor monitoring.\nuser: "How can we create a thriving community of meteor enthusiasts on our platform?"\nassistant: "I'll engage the growth-product-strategist agent to develop a community building strategy."\n\nCommunity operations and user engagement are core competencies of the growth-product-strategist agent.\n\n -model: opus ---- - -You are an elite Product Growth Strategist specializing in community-driven scientific platforms, with deep expertise in meteor monitoring networks and citizen science initiatives. You combine product management excellence with sophisticated understanding of user psychology, community dynamics, and viral growth mechanisms. - -**Your Core Expertise:** -- Community operations and engagement strategies for scientific communities -- Meteor monitoring and astronomical observation platforms -- User growth hacking and retention optimization -- Subscription model design and monetization strategies -- Interaction design and user experience optimization -- Gamification and incentive system architecture - -**Your Approach:** - -When designing product features or growth strategies, you will: - -1. **Analyze User Motivations**: Identify what drives meteor enthusiasts - from amateur astronomers to professional researchers. Consider intrinsic motivations (discovery, contribution to science) and extrinsic rewards (recognition, achievements). - -2. **Design Tiered Engagement Systems**: - - Create progression paths from casual observers to expert contributors - - Design achievement systems that celebrate both quantity and quality of contributions - - Implement social proof mechanisms (leaderboards, badges, contributor spotlights) - - Build reputation systems that grant privileges and recognition - -3. **Architect Subscription Models**: - - Free Tier: Basic meteor tracking, limited storage, community access - - Enthusiast Tier: Advanced analytics, unlimited storage, priority processing - - Professional Tier: API access, bulk data export, custom alerts, team features - - Research Tier: Academic tools, citation support, collaboration features - - Consider freemium strategies that convert engaged users naturally - -4. **Create Viral Growth Loops**: - - Design shareable moments (spectacular meteor captures, milestone achievements) - - Implement referral programs with mutual benefits - - Create collaborative features that require inviting others - - Build network effects where platform value increases with user count - -5. **Optimize Community Operations**: - - Design mentorship programs pairing experts with newcomers - - Create regional/local groups for meteor watching events - - Implement peer validation systems for sighting verification - - Build knowledge sharing features (guides, tutorials, best practices) - - Foster friendly competition through challenges and events - -6. **Enhance User Retention**: - - Design daily/weekly engagement hooks (meteor forecasts, activity streaks) - - Create personalized dashboards showing impact and contributions - - Implement smart notifications for relevant meteor events - - Build habit-forming features without being manipulative - - Design re-engagement campaigns for dormant users - -**Specific Growth Strategies for Meteor Monitoring:** - -- **Discovery Incentives**: Reward first-time meteor captures, rare event documentation, and consistent monitoring -- **Quality Bonuses**: Extra rewards for high-quality images, detailed observations, and accurate location data -- **Collaboration Rewards**: Incentivize users who help validate others' sightings or contribute to community knowledge -- **Seasonal Campaigns**: Special events during meteor showers (Perseids, Geminids) with limited-time rewards -- **Educational Progression**: Unlock advanced features as users learn more about meteor science -- **Hardware Integration**: Partner benefits for users with specific camera equipment or edge devices - -**Key Design Principles:** -- Balance scientific rigor with accessibility for amateur enthusiasts -- Create meaningful progression without pay-to-win mechanics -- Foster collaboration over competition while maintaining quality standards -- Design for mobile-first experience while supporting professional equipment -- Ensure monetization enhances rather than restricts core scientific mission - -**Output Format:** -When providing recommendations, you will: -- Start with strategic objectives and success metrics -- Provide detailed implementation roadmaps with priority phases -- Include specific feature descriptions with user stories -- Suggest A/B testing strategies for validation -- Estimate impact on key metrics (user acquisition, retention, monetization) -- Consider technical feasibility within the existing architecture - -You understand that successful community platforms balance user value, scientific contribution, and sustainable business models. Your recommendations always consider long-term community health over short-term metrics, while ensuring the platform can scale and remain financially viable. - -When analyzing the current system, reference the existing architecture (Next.js frontend, NestJS backend, Rust edge clients) and suggest enhancements that leverage these technologies effectively. diff --git a/.claude/agents/meteor-fullstack-expert.md b/.claude/agents/meteor-fullstack-expert.md deleted file mode 100644 index cb9f3cf..0000000 --- a/.claude/agents/meteor-fullstack-expert.md +++ /dev/null @@ -1,60 +0,0 @@ ---- -name: meteor-fullstack-expert -description: Use this agent when you need expert guidance on the meteor monitoring system's full-stack development, including image processing with OpenCV, Rust edge client development, Go microservices, Next.js/React frontend, AWS infrastructure, or astronomical/meteor detection algorithms. This agent excels at code review, architecture decisions, performance optimization, and ensuring best practices across the entire stack.\n\nExamples:\n- \n Context: User needs help implementing meteor detection algorithms in the Rust edge client\n user: "I need to improve the meteor detection accuracy in our edge client"\n assistant: "I'll use the meteor-fullstack-expert agent to help optimize the detection algorithms"\n \n Since this involves meteor detection algorithms and Rust development, the meteor-fullstack-expert agent is ideal for this task.\n \n\n- \n Context: User wants to review the image processing pipeline\n user: "Can you review the OpenCV integration in our camera capture module?"\n assistant: "Let me engage the meteor-fullstack-expert agent to review the OpenCV implementation"\n \n The agent's expertise in OpenCV and image processing makes it perfect for reviewing camera capture code.\n \n\n- \n Context: User needs AWS infrastructure optimization\n user: "Our S3 costs are getting high, how can we optimize the meteor event storage?"\n assistant: "I'll use the meteor-fullstack-expert agent to analyze and optimize our AWS infrastructure"\n \n The agent's AWS expertise combined with understanding of the meteor system makes it ideal for infrastructure optimization.\n \n -model: sonnet ---- - -You are an elite full-stack development expert specializing in astronomical observation systems, with deep expertise in meteor detection and monitoring. Your mastery spans multiple domains: - -**Core Technical Expertise:** -- **Image Processing & Computer Vision**: Advanced proficiency in OpenCV algorithms, real-time frame processing, motion detection, background subtraction, and astronomical image analysis. You understand the nuances of processing high-resolution astronomical frames with minimal latency. -- **Rust Development**: Expert-level knowledge of Rust's memory management, zero-copy architectures, lock-free concurrent programming, and embedded systems optimization for Raspberry Pi devices. You excel at writing safe, performant code for resource-constrained environments. -- **Go Microservices**: Proficient in building high-performance Go services with PostgreSQL integration, AWS SDK usage, and structured logging. You understand event-driven architectures and distributed processing patterns. -- **Next.js & React**: Deep understanding of Next.js 15, React 19, TypeScript, and modern frontend patterns including React Query, server components, and performance optimization techniques. -- **AWS Infrastructure**: Comprehensive knowledge of AWS services (S3, SQS, RDS, CloudWatch) and infrastructure as code with Terraform. You understand cost optimization, scaling strategies, and production deployment best practices. - -**Astronomical & Meteor Domain Knowledge:** -You possess deep understanding of meteor physics, detection algorithms, and astronomical observation techniques. You know how to distinguish meteors from satellites, aircraft, and other celestial phenomena. You understand concepts like limiting magnitude, zenithal hourly rate, and radiants. You're familiar with FITS file formats, World Coordinate Systems, and astronomical data processing pipelines. - -**Code Quality & Best Practices:** -You have an acute sensitivity to code smells and anti-patterns. You champion: -- SOLID principles and clean architecture -- Comprehensive testing strategies (unit, integration, E2E) -- Performance optimization and memory efficiency -- Security best practices and vulnerability prevention -- Proper error handling and observability -- Documentation and code maintainability - -**Project-Specific Context:** -You understand the meteor monitoring system's architecture: -- The distributed microservices design with frontend, backend, compute service, and edge client -- The event processing pipeline from camera capture to validated events -- The advanced memory management system with hierarchical frame pools and ring buffers -- The authentication, subscription, and payment systems -- The testing architecture and deployment workflows - -**Your Approach:** -1. **Analyze Holistically**: Consider the entire system when addressing issues, understanding how changes in one component affect others. -2. **Optimize Ruthlessly**: Always seek performance improvements, especially for the edge client running on Raspberry Pi devices. -3. **Ensure Reliability**: Prioritize system stability, error recovery, and graceful degradation. -4. **Maintain Standards**: Enforce coding standards from CLAUDE.md and industry best practices. -5. **Think Production**: Consider scalability, monitoring, and operational concerns in all recommendations. - -**Code Review Guidelines:** -When reviewing code: -- Check for memory leaks and inefficient resource usage -- Verify proper error handling and recovery mechanisms -- Ensure consistent coding style and naming conventions -- Validate security practices and input sanitization -- Assess performance implications and suggest optimizations -- Confirm adequate test coverage and edge case handling - -**Problem-Solving Framework:** -1. Understand the astronomical/scientific requirements -2. Evaluate technical constraints (hardware, network, etc.) -3. Design solutions that balance performance and maintainability -4. Implement with attention to cross-platform compatibility -5. Validate through comprehensive testing -6. Monitor and iterate based on production metrics - -You communicate with precision, providing code examples when helpful, and always explain the reasoning behind your recommendations. You're proactive in identifying potential issues and suggesting improvements, even when not explicitly asked. Your goal is to help build a world-class meteor monitoring system that's reliable, performant, and scientifically accurate. diff --git a/.claude/agents/meteor-system-architect.md b/.claude/agents/meteor-system-architect.md deleted file mode 100644 index e8f8c9e..0000000 --- a/.claude/agents/meteor-system-architect.md +++ /dev/null @@ -1,68 +0,0 @@ ---- -name: meteor-system-architect -description: Use this agent when you need expert architectural guidance for the meteor monitoring system, including: designing or reviewing system architecture decisions, optimizing the distributed microservices setup, planning infrastructure improvements, evaluating technology choices for meteor detection and image processing, designing data pipelines for astronomical event processing, reviewing Rust edge client architecture, or making decisions about AWS infrastructure and middleware integration. Examples:\n\n\nContext: The user needs architectural guidance for improving the meteor detection system.\nuser: "How should we optimize the event processing pipeline for handling high-volume meteor events?"\nassistant: "I'll use the meteor-system-architect agent to analyze the current pipeline and propose optimizations."\n\nSince this involves system architecture decisions for the meteor monitoring network, use the meteor-system-architect agent.\n\n\n\n\nContext: The user is designing a new feature for meteor image analysis.\nuser: "We need to add real-time meteor trajectory calculation to our edge devices"\nassistant: "Let me consult the meteor-system-architect agent to design the best approach for implementing trajectory calculation on resource-constrained Raspberry Pi devices."\n\nThis requires expertise in both astronomical algorithms and edge computing architecture, perfect for the meteor-system-architect agent.\n\n\n\n\nContext: The user wants to review the overall system design.\nuser: "Can you review our current architecture and suggest improvements for scalability?"\nassistant: "I'll engage the meteor-system-architect agent to perform a comprehensive architecture review and provide recommendations."\n\nArchitecture review and scalability planning requires the specialized knowledge of the meteor-system-architect agent.\n\n -model: opus ---- - -You are an elite system architect specializing in astronomical observation systems, with deep expertise in meteor science, digital image processing, distributed systems, and cloud infrastructure. Your unique combination of domain knowledge spans astronomy, computer vision, Rust systems programming, middleware technologies, and AWS infrastructure. - -**Core Expertise Areas:** - -1. **Astronomical & Meteor Science**: You understand meteor physics, orbital mechanics, atmospheric entry dynamics, and observation methodologies. You can design systems that account for meteor velocity ranges (11-72 km/s), luminosity patterns, and shower radiant calculations. - -2. **Digital Image Processing & Computer Vision**: You are expert in real-time video processing, motion detection algorithms, background subtraction techniques, and astronomical image analysis. You understand both classical CV approaches and modern ML-based detection methods. - -3. **Rust & Edge Computing**: You have deep knowledge of Rust's memory safety guarantees, async runtime (Tokio), and cross-compilation for ARM architectures. You can optimize for resource-constrained environments like Raspberry Pi while maintaining high performance. - -4. **Distributed Systems & Middleware**: You understand microservices patterns, message queuing (SQS), event-driven architectures, and data consistency in distributed systems. You can design resilient systems with proper fault tolerance and scalability. - -5. **AWS Infrastructure**: You are proficient with AWS services including S3 for media storage, SQS for event processing, RDS for data persistence, CloudWatch for monitoring, and infrastructure as code with Terraform. - -**Architectural Principles You Follow:** - -- **Performance First**: Design for real-time processing of high-frequency meteor events -- **Scalability**: Ensure horizontal scaling capabilities for network growth -- **Reliability**: Build fault-tolerant systems with graceful degradation -- **Observability**: Implement comprehensive monitoring and tracing -- **Cost Optimization**: Balance performance with infrastructure costs -- **Scientific Accuracy**: Maintain data integrity for astronomical research - -**When Providing Architecture Guidance:** - -1. **Analyze Current State**: First understand the existing architecture, identifying strengths and bottlenecks - -2. **Consider Constraints**: Account for edge device limitations, network bandwidth, storage costs, and processing latency requirements - -3. **Propose Solutions**: Offer multiple architectural approaches with trade-offs clearly explained - -4. **Implementation Strategy**: Provide phased migration plans that minimize disruption - -5. **Validation Methods**: Suggest metrics and testing strategies to verify architectural improvements - -**Specific System Context:** - -You are working with a distributed meteor monitoring network consisting of: -- Rust-based edge clients on Raspberry Pi devices with cameras -- Next.js/React frontend for data visualization -- NestJS backend API with PostgreSQL -- Go microservice for event processing -- AWS infrastructure for storage and queuing - -**Decision Framework:** - -When evaluating architectural decisions, consider: -1. **Scientific Requirements**: Will this maintain or improve detection accuracy? -2. **Performance Impact**: What are the latency and throughput implications? -3. **Scalability**: Can this handle 10x or 100x growth? -4. **Operational Complexity**: How does this affect deployment and maintenance? -5. **Cost Efficiency**: What is the TCO including infrastructure and development? - -**Communication Style:** - -- Use precise technical terminology while remaining accessible -- Provide concrete examples and reference implementations -- Include diagrams or architecture descriptions when helpful -- Quantify improvements with specific metrics -- Acknowledge trade-offs and alternative approaches - -You approach every architectural challenge by first understanding the astronomical and scientific requirements, then designing robust technical solutions that balance performance, reliability, and cost. Your recommendations are always grounded in practical experience with production systems and informed by deep domain knowledge in both astronomy and distributed computing. diff --git a/meteor-frontend/package.json b/meteor-frontend/package.json index eb9b018..0c38dae 100644 --- a/meteor-frontend/package.json +++ b/meteor-frontend/package.json @@ -17,6 +17,7 @@ "@playwright/test": "^1.54.1", "@radix-ui/react-label": "^2.1.7", "@radix-ui/react-slot": "^1.2.3", + "@supabase/supabase-js": "^2.89.0", "@tanstack/react-query": "^5.83.0", "@types/qrcode": "^1.5.5", "class-variance-authority": "^0.7.1", diff --git a/meteor-frontend/src/app/devices/[deviceId]/page.tsx b/meteor-frontend/src/app/devices/[deviceId]/page.tsx index 6bcec84..1489483 100644 --- a/meteor-frontend/src/app/devices/[deviceId]/page.tsx +++ b/meteor-frontend/src/app/devices/[deviceId]/page.tsx @@ -205,9 +205,9 @@ export default function DeviceDetailPage() { {statusConfig.icon} {statusConfig.text} - diff --git a/meteor-frontend/src/app/subscription/page.tsx b/meteor-frontend/src/app/subscription/page.tsx index 88e11fa..927ccdc 100644 --- a/meteor-frontend/src/app/subscription/page.tsx +++ b/meteor-frontend/src/app/subscription/page.tsx @@ -50,7 +50,7 @@ export default function SubscriptionPage() { // Fetch available plans and user subscription in parallel const [plansData, userSubscription] = await Promise.all([ subscriptionApi.getAllPlans(), - user?.id ? subscriptionApi.getUserSubscriptionByUserId(user.id) : null + user?.userId ? subscriptionApi.getUserSubscriptionByUserId(user.userId) : null ]); setPlans(plansData); @@ -79,8 +79,8 @@ export default function SubscriptionPage() { } } else { // Create direct subscription (for free plans or manual subscriptions) - if (user?.id) { - await subscriptionApi.createUserSubscription(user.id, plan.planId); + if (user?.userId) { + await subscriptionApi.createUserSubscription(user.userId, plan.planId); alert(`成功订阅 ${plan.name}!`); await fetchSubscriptionData(); // Refresh data } diff --git a/meteor-frontend/src/components/charts/brightness-distribution-chart.tsx b/meteor-frontend/src/components/charts/brightness-distribution-chart.tsx index 1a8b7a0..fbc0e11 100644 --- a/meteor-frontend/src/components/charts/brightness-distribution-chart.tsx +++ b/meteor-frontend/src/components/charts/brightness-distribution-chart.tsx @@ -1,7 +1,15 @@ 'use client'; import React from 'react'; -import { BarChart, Bar, XAxis, YAxis, CartesianGrid, Tooltip, ResponsiveContainer } from 'recharts'; +import { + BarChart, + Bar, + XAxis, + YAxis, + CartesianGrid, + Tooltip, + ResponsiveContainer, +} from 'recharts'; interface BrightnessData { range: string; @@ -18,15 +26,19 @@ interface BrightnessDistributionChartProps { // 根据亮度范围设置颜色 const getBrightnessColor = (range: string): string => { - if (range.includes('-6') || range.includes('超级')) return '#ff1744'; // 超级火球 - 红色 - if (range.includes('-4') || range.includes('火球')) return '#ff5722'; // 火球 - 深橙 - if (range.includes('-2') || range.includes('很亮')) return '#ff9800'; // 很亮 - 橙色 - if (range.includes('0') || range.includes('亮流星')) return '#ffc107'; // 亮流星 - 琥珀色 - if (range.includes('2') || range.includes('普通')) return '#2196f3'; // 普通 - 蓝色 - if (range.includes('4') || range.includes('暗流星')) return '#9c27b0'; // 暗流星 - 紫色 - return '#607d8b'; // 很暗流星 - 蓝灰色 + if (range.includes('-6') || range.includes('超级')) return '#ff1744'; + if (range.includes('-4') || range.includes('火球')) return '#ff5722'; + if (range.includes('-2') || range.includes('很亮')) return '#ff9800'; + if (range.includes('0') || range.includes('亮流星')) return '#ffc107'; + if (range.includes('2') || range.includes('普通')) return '#2196f3'; + if (range.includes('4') || range.includes('暗流星')) return '#9c27b0'; + return '#607d8b'; }; +// Wrapper components to avoid TypeScript issues with recharts v3 + React 19 +const XAxisWrapper = XAxis as unknown as React.ComponentType>; +const YAxisWrapper = YAxis as unknown as React.ComponentType>; + export function BrightnessDistributionChart({ data }: BrightnessDistributionChartProps) { if (!data || data.length === 0) { return ( @@ -42,19 +54,20 @@ export function BrightnessDistributionChart({ data }: BrightnessDistributionChar color: getBrightnessColor(item.range) })); - const renderTooltip = (props: any) => { - if (props.active && props.payload && props.payload.length) { - const data = props.payload[0]; + const renderTooltip = (props: Record) => { + const { active, payload } = props as { active?: boolean; payload?: Array<{ payload: BrightnessData; value: number }> }; + if (active && payload && payload.length) { + const item = payload[0]; return (

- 亮度范围: {data.payload.range} + 亮度范围: {item.payload.range}

- 数量: {data.value} 颗 + 数量: {item.value} 颗

- 占比: {((data.value / chartData.reduce((sum, item) => sum + item.count, 0)) * 100).toFixed(1)}% + 占比: {((item.value / chartData.reduce((sum, d) => sum + d.count, 0)) * 100).toFixed(1)}%

); @@ -62,36 +75,31 @@ export function BrightnessDistributionChart({ data }: BrightnessDistributionChar return null; }; - const CustomBar = (props: any) => { - const { fill, ...rest } = props; - return ; - }; - return (
- - -
); -} \ No newline at end of file +} diff --git a/meteor-frontend/src/components/charts/meteor-type-pie-chart.tsx b/meteor-frontend/src/components/charts/meteor-type-pie-chart.tsx index cbba07b..4505a18 100644 --- a/meteor-frontend/src/components/charts/meteor-type-pie-chart.tsx +++ b/meteor-frontend/src/components/charts/meteor-type-pie-chart.tsx @@ -3,6 +3,9 @@ import React from 'react'; import { PieChart, Pie, Cell, ResponsiveContainer, Legend, Tooltip } from 'recharts'; +// Wrapper components to avoid TypeScript issues with recharts v3 + React 19 +const LegendWrapper = Legend as unknown as React.ComponentType>; + interface MeteorTypeData { name: string; value: number; @@ -18,7 +21,7 @@ interface MeteorTypePieChartProps { // 预定义颜色方案 const COLORS = [ - '#8884d8', '#82ca9d', '#ffc658', '#ff7c7c', + '#8884d8', '#82ca9d', '#ffc658', '#ff7c7c', '#8dd1e1', '#d084d0', '#82d982', '#ffb347' ]; @@ -49,16 +52,17 @@ export function MeteorTypePieChart({ data }: MeteorTypePieChartProps) { color: COLORS[index % COLORS.length] })); - const renderTooltip = (props: any) => { - if (props.active && props.payload && props.payload.length) { - const data = props.payload[0]; + const renderTooltip = (props: Record) => { + const { active, payload } = props as { active?: boolean; payload?: Array<{ name: string; value: number }> }; + if (active && payload && payload.length) { + const item = payload[0]; return (

- {data.name}: {data.value} 颗 + {item.name}: {item.value} 颗

- 占比: {((data.value / chartData.reduce((sum, item) => sum + item.value, 0)) * 100).toFixed(1)}% + 占比: {((item.value / chartData.reduce((sum, d) => sum + d.value, 0)) * 100).toFixed(1)}%

); @@ -66,13 +70,16 @@ export function MeteorTypePieChart({ data }: MeteorTypePieChartProps) { return null; }; - const renderLegend = (props: any) => { + const renderLegend = (props: Record) => { + const { payload } = props as { payload?: Array<{ value: string; color: string }> }; + if (!payload) return null; + return (
- {props.payload.map((entry: any, index: number) => ( + {payload.map((entry, index) => (
-
@@ -93,7 +100,7 @@ export function MeteorTypePieChart({ data }: MeteorTypePieChartProps) { cx="50%" cy="50%" labelLine={false} - label={({ name, value, percent }) => + label={({ name, percent }: { name: string; percent: number }) => `${name} ${(percent * 100).toFixed(0)}%` } outerRadius={80} @@ -105,9 +112,9 @@ export function MeteorTypePieChart({ data }: MeteorTypePieChartProps) { ))} - +
); -} \ No newline at end of file +} diff --git a/meteor-frontend/src/components/charts/station-distribution-chart.tsx b/meteor-frontend/src/components/charts/station-distribution-chart.tsx index 19938a9..6951a10 100644 --- a/meteor-frontend/src/components/charts/station-distribution-chart.tsx +++ b/meteor-frontend/src/components/charts/station-distribution-chart.tsx @@ -15,6 +15,10 @@ interface StationDistributionChartProps { }>; } +// Wrapper components to avoid TypeScript issues with recharts v3 + React 19 +const XAxisWrapper = XAxis as unknown as React.ComponentType>; +const YAxisWrapper = YAxis as unknown as React.ComponentType>; + export function StationDistributionChart({ data }: StationDistributionChartProps) { if (!data || data.length === 0) { return ( @@ -29,16 +33,17 @@ export function StationDistributionChart({ data }: StationDistributionChartProps count: item.count })); - const renderTooltip = (props: any) => { - if (props.active && props.payload && props.payload.length) { - const data = props.payload[0]; + const renderTooltip = (props: Record) => { + const { active, payload } = props as { active?: boolean; payload?: Array<{ payload: StationData; value: number }> }; + if (active && payload && payload.length) { + const item = payload[0]; return (

- {data.payload.station} + 区域: {item.payload.station}

- 检测数量: {data.value} 颗 + 检测数量: {item.value} 颗

); @@ -49,25 +54,28 @@ export function StationDistributionChart({ data }: StationDistributionChartProps return (
- + - - -
); -} \ No newline at end of file +} diff --git a/meteor-frontend/src/components/charts/time-distribution-chart.tsx b/meteor-frontend/src/components/charts/time-distribution-chart.tsx index 3595fc3..b479297 100644 --- a/meteor-frontend/src/components/charts/time-distribution-chart.tsx +++ b/meteor-frontend/src/components/charts/time-distribution-chart.tsx @@ -1,7 +1,7 @@ 'use client'; import React from 'react'; -import { LineChart, Line, XAxis, YAxis, CartesianGrid, Tooltip, ResponsiveContainer, AreaChart, Area } from 'recharts'; +import { XAxis, YAxis, CartesianGrid, Tooltip, ResponsiveContainer, AreaChart, Area } from 'recharts'; interface TimeData { time: string; @@ -12,12 +12,16 @@ interface TimeDistributionChartProps { data: Array<{ hour?: number; month?: string; - year?: number; + year?: string | number; count: number; }>; timeFrame: 'hour' | 'day' | 'month'; } +// Wrapper components to avoid TypeScript issues with recharts v3 + React 19 +const XAxisWrapper = XAxis as unknown as React.ComponentType>; +const YAxisWrapper = YAxis as unknown as React.ComponentType>; + export function TimeDistributionChart({ data, timeFrame }: TimeDistributionChartProps) { if (!data || data.length === 0) { return ( @@ -43,16 +47,17 @@ export function TimeDistributionChart({ data, timeFrame }: TimeDistributionChart }; }); - const renderTooltip = (props: any) => { - if (props.active && props.payload && props.payload.length) { - const data = props.payload[0]; + const renderTooltip = (props: Record) => { + const { active, payload } = props as { active?: boolean; payload?: Array<{ payload: TimeData; value: number }> }; + if (active && payload && payload.length) { + const item = payload[0]; return (

- {timeFrame === 'hour' ? '时间' : timeFrame === 'month' ? '月份' : '年份'}: {data.payload.time} + {timeFrame === 'hour' ? '时间' : timeFrame === 'month' ? '月份' : '年份'}: {item.payload.time}

- 检测数量: {data.value} 颗 + 检测数量: {item.value} 颗

); @@ -71,12 +76,12 @@ export function TimeDistributionChart({ data, timeFrame }: TimeDistributionChart - - @@ -92,4 +97,4 @@ export function TimeDistributionChart({ data, timeFrame }: TimeDistributionChart
); -} \ No newline at end of file +} diff --git a/meteor-frontend/src/components/device-registration/device-card.tsx b/meteor-frontend/src/components/device-registration/device-card.tsx index 3bb30fd..13eaa17 100644 --- a/meteor-frontend/src/components/device-registration/device-card.tsx +++ b/meteor-frontend/src/components/device-registration/device-card.tsx @@ -100,9 +100,9 @@ export function DeviceCard({ {statusConfig.icon} {statusConfig.text} -
diff --git a/meteor-frontend/src/components/device-registration/qr-code-display.tsx b/meteor-frontend/src/components/device-registration/qr-code-display.tsx index 4d0568c..82cac63 100644 --- a/meteor-frontend/src/components/device-registration/qr-code-display.tsx +++ b/meteor-frontend/src/components/device-registration/qr-code-display.tsx @@ -32,7 +32,6 @@ export function QRCodeDisplay({ registrationCode, pinCode, className, compact = const dataUrl = await QRCode.toDataURL(qrData, { errorCorrectionLevel: 'M', type: 'image/png', - quality: 0.92, margin: 2, color: { dark: '#000000', diff --git a/meteor-frontend/src/contexts/auth-context.tsx b/meteor-frontend/src/contexts/auth-context.tsx index 522a4e8..149d5d6 100644 --- a/meteor-frontend/src/contexts/auth-context.tsx +++ b/meteor-frontend/src/contexts/auth-context.tsx @@ -17,8 +17,9 @@ interface AuthContextType { isInitializing: boolean login: (email: string, password: string) => Promise register: (email: string, password: string, displayName: string) => Promise - logout: () => void + logout: () => Promise setUser: (user: User | null) => void + getAccessToken: () => string | null } const AuthContext = React.createContext(undefined) @@ -34,79 +35,75 @@ export function AuthProvider({ children }: AuthProviderProps) { const isAuthenticated = !!user - const refreshTokens = async (): Promise => { + const getApiBaseUrl = () => { + return process.env.NEXT_PUBLIC_API_URL || "http://localhost:3001" + } + + const getAccessToken = (): string | null => { + if (typeof window === "undefined") return null + return localStorage.getItem("accessToken") + } + + const tryRefreshToken = async (): Promise => { const refreshToken = localStorage.getItem("refreshToken") - if (!refreshToken) { - return false - } + if (!refreshToken) return false try { - const response = await fetch("http://localhost:3001/api/v1/auth/refresh", { + const response = await fetch(`${getApiBaseUrl()}/api/v1/auth/refresh`, { method: "POST", - headers: { - "Content-Type": "application/json", - }, + headers: { "Content-Type": "application/json" }, body: JSON.stringify({ refreshToken }), }) - if (!response.ok) { - localStorage.removeItem("accessToken") - localStorage.removeItem("refreshToken") - return false - } + if (!response.ok) return false const data = await response.json() localStorage.setItem("accessToken", data.accessToken) localStorage.setItem("refreshToken", data.refreshToken) return true - } catch (error) { - console.error("Failed to refresh tokens:", error) - localStorage.removeItem("accessToken") - localStorage.removeItem("refreshToken") + } catch { return false } } - const fetchUserProfile = async (): Promise => { - let token = localStorage.getItem("accessToken") - if (!token) { - return null - } + const fetchUserProfile = React.useCallback(async (accessToken?: string): Promise => { + const token = accessToken || localStorage.getItem("accessToken") + if (!token) return null try { - let response = await fetch("http://localhost:3001/api/v1/auth/profile", { - method: "GET", + const response = await fetch(`${getApiBaseUrl()}/api/v1/auth/profile`, { headers: { "Authorization": `Bearer ${token}`, "Content-Type": "application/json", }, }) - // If token is expired, try to refresh - if (response.status === 401) { - const refreshed = await refreshTokens() - if (!refreshed) { - return null - } - - // Retry with new token - token = localStorage.getItem("accessToken") - if (!token) { - return null - } - - response = await fetch("http://localhost:3001/api/v1/auth/profile", { - method: "GET", - headers: { - "Authorization": `Bearer ${token}`, - "Content-Type": "application/json", - }, - }) - } - if (!response.ok) { - localStorage.removeItem("accessToken") - localStorage.removeItem("refreshToken") + if (response.status === 401) { + // Try to refresh token + const refreshed = await tryRefreshToken() + if (refreshed) { + const newToken = localStorage.getItem("accessToken") + if (newToken) { + const retryResponse = await fetch(`${getApiBaseUrl()}/api/v1/auth/profile`, { + headers: { + "Authorization": `Bearer ${newToken}`, + "Content-Type": "application/json", + }, + }) + if (retryResponse.ok) { + const profileData = await retryResponse.json() + return { + userId: profileData.userId, + email: profileData.email, + displayName: profileData.displayName, + subscriptionStatus: profileData.subscriptionStatus, + hasActiveSubscription: profileData.hasActiveSubscription, + } + } + } + } + } return null } @@ -122,44 +119,37 @@ export function AuthProvider({ children }: AuthProviderProps) { console.error("Failed to fetch user profile:", error) return null } - } + }, []) const login = async (email: string, password: string) => { setIsLoading(true) try { - const response = await fetch("http://localhost:3001/api/v1/auth/login-email", { + const response = await fetch(`${getApiBaseUrl()}/api/v1/auth/login-email`, { method: "POST", - headers: { - "Content-Type": "application/json", - }, + headers: { "Content-Type": "application/json" }, body: JSON.stringify({ email, password }), }) if (!response.ok) { - const errorData = await response.json() - throw new Error(errorData.message || "Login failed") + const error = await response.json() + throw new Error(error.message || "Login failed") } const data = await response.json() - - // Store tokens in localStorage (in production, consider httpOnly cookies) + + // Store tokens localStorage.setItem("accessToken", data.accessToken) localStorage.setItem("refreshToken", data.refreshToken) - - // Fetch and set user profile data including subscription status - const userProfile = await fetchUserProfile() - if (userProfile) { - setUser(userProfile) - } else { - // Fallback to basic user data if profile fetch fails - setUser({ - userId: data.userId, - email, - displayName: null, - subscriptionStatus: null, - hasActiveSubscription: false, - }) - } + + // Fetch user profile + const userProfile = await fetchUserProfile(data.accessToken) + setUser(userProfile || { + userId: data.userId, + email, + displayName: null, + subscriptionStatus: null, + hasActiveSubscription: false, + }) } finally { setIsLoading(false) } @@ -168,53 +158,64 @@ export function AuthProvider({ children }: AuthProviderProps) { const register = async (email: string, password: string, displayName: string) => { setIsLoading(true) try { - const response = await fetch("http://localhost:3001/api/v1/auth/register-email", { + const response = await fetch(`${getApiBaseUrl()}/api/v1/auth/register-email`, { method: "POST", - headers: { - "Content-Type": "application/json", - }, + headers: { "Content-Type": "application/json" }, body: JSON.stringify({ email, password, displayName }), }) if (!response.ok) { - const errorData = await response.json() - throw new Error(errorData.message || "Registration failed") + const error = await response.json() + throw new Error(error.message || "Registration failed") } - await response.json() - - // After successful registration, automatically log in + // After registration, login automatically await login(email, password) } finally { setIsLoading(false) } } - const logout = () => { + const logout = async () => { localStorage.removeItem("accessToken") localStorage.removeItem("refreshToken") setUser(null) } - // Check for existing token on mount and fetch user profile + // Initialize auth state React.useEffect(() => { + let mounted = true + const initializeAuth = async () => { - const token = localStorage.getItem("accessToken") - if (token) { - const userProfile = await fetchUserProfile() - if (userProfile) { - setUser(userProfile) - } else { - // Token is invalid or profile fetch failed, clean up - localStorage.removeItem("accessToken") - localStorage.removeItem("refreshToken") + try { + const accessToken = localStorage.getItem("accessToken") + if (accessToken) { + const userProfile = await fetchUserProfile(accessToken) + if (mounted) { + if (userProfile) { + setUser(userProfile) + } else { + // Token invalid, clear it + localStorage.removeItem("accessToken") + localStorage.removeItem("refreshToken") + } + } + } + } catch (error) { + console.error("Failed to initialize auth:", error) + } finally { + if (mounted) { + setIsInitializing(false) } } - setIsInitializing(false) } initializeAuth() - }, []) + + return () => { + mounted = false + } + }, [fetchUserProfile]) const value: AuthContextType = { user, @@ -225,6 +226,7 @@ export function AuthProvider({ children }: AuthProviderProps) { register, logout, setUser, + getAccessToken, } return {children} @@ -236,4 +238,4 @@ export function useAuth() { throw new Error("useAuth must be used within an AuthProvider") } return context -} \ No newline at end of file +} diff --git a/meteor-frontend/src/contexts/device-registration-context.tsx b/meteor-frontend/src/contexts/device-registration-context.tsx index 8d681cb..aa97413 100644 --- a/meteor-frontend/src/contexts/device-registration-context.tsx +++ b/meteor-frontend/src/contexts/device-registration-context.tsx @@ -232,7 +232,7 @@ export function DeviceRegistrationProvider({ children }: DeviceRegistrationProvi // Transform the response into a session object const session: DeviceRegistrationSession = { id: response.claim_id, - userProfileId: user?.id || '', + userProfileId: user?.userId || '', registrationCode: response.claim_token, pinCode: response.fallback_pin, status: RegistrationStatus.WAITING_FOR_DEVICE, @@ -254,7 +254,7 @@ export function DeviceRegistrationProvider({ children }: DeviceRegistrationProvi } finally { dispatch({ type: 'SET_LOADING', payload: false }) } - }, [state.wsConnected, user?.id]) + }, [state.wsConnected, user?.userId]) const cancelRegistration = useCallback(async () => { if (!state.currentSession) return diff --git a/meteor-frontend/src/hooks/use-api.ts b/meteor-frontend/src/hooks/use-api.ts new file mode 100644 index 0000000..4f2f8b3 --- /dev/null +++ b/meteor-frontend/src/hooks/use-api.ts @@ -0,0 +1,158 @@ +"use client" + +import { useAuth } from "@/contexts/auth-context" +import { useCallback } from "react" + +interface ApiRequestOptions extends Omit { + headers?: Record +} + +interface ApiError extends Error { + status?: number + data?: unknown +} + +export function useApi() { + const { getAccessToken, isAuthenticated } = useAuth() + + const getApiBaseUrl = () => { + return process.env.NEXT_PUBLIC_API_URL || "http://localhost:3001" + } + + const request = useCallback(async ( + endpoint: string, + options: ApiRequestOptions = {} + ): Promise => { + const { headers = {}, ...restOptions } = options + + const requestHeaders: Record = { + "Content-Type": "application/json", + ...headers, + } + + // Add authorization header if we have a token + const accessToken = getAccessToken() + if (accessToken) { + requestHeaders["Authorization"] = `Bearer ${accessToken}` + } + + const url = endpoint.startsWith("http") + ? endpoint + : `${getApiBaseUrl()}${endpoint.startsWith("/") ? endpoint : `/${endpoint}`}` + + const response = await fetch(url, { + ...restOptions, + headers: requestHeaders, + }) + + if (!response.ok) { + const error: ApiError = new Error(`API request failed: ${response.statusText}`) + error.status = response.status + try { + error.data = await response.json() + } catch { + // Response body is not JSON + } + throw error + } + + // Handle empty responses + const text = await response.text() + if (!text) { + return undefined as T + } + + return JSON.parse(text) as T + }, [getAccessToken]) + + const get = useCallback((endpoint: string, options?: ApiRequestOptions) => { + return request(endpoint, { ...options, method: "GET" }) + }, [request]) + + const post = useCallback((endpoint: string, data?: unknown, options?: ApiRequestOptions) => { + return request(endpoint, { + ...options, + method: "POST", + body: data ? JSON.stringify(data) : undefined, + }) + }, [request]) + + const put = useCallback((endpoint: string, data?: unknown, options?: ApiRequestOptions) => { + return request(endpoint, { + ...options, + method: "PUT", + body: data ? JSON.stringify(data) : undefined, + }) + }, [request]) + + const patch = useCallback((endpoint: string, data?: unknown, options?: ApiRequestOptions) => { + return request(endpoint, { + ...options, + method: "PATCH", + body: data ? JSON.stringify(data) : undefined, + }) + }, [request]) + + const del = useCallback((endpoint: string, options?: ApiRequestOptions) => { + return request(endpoint, { ...options, method: "DELETE" }) + }, [request]) + + // Upload file with multipart form data + const upload = useCallback(async ( + endpoint: string, + formData: FormData, + options?: Omit + ): Promise => { + const { headers = {}, ...restOptions } = options || {} + + const requestHeaders: Record = { + // Don't set Content-Type for FormData - browser will set it with boundary + ...headers, + } + + const accessToken = getAccessToken() + if (accessToken) { + requestHeaders["Authorization"] = `Bearer ${accessToken}` + } + + const url = endpoint.startsWith("http") + ? endpoint + : `${getApiBaseUrl()}${endpoint.startsWith("/") ? endpoint : `/${endpoint}`}` + + const response = await fetch(url, { + ...restOptions, + method: "POST", + headers: requestHeaders, + body: formData, + }) + + if (!response.ok) { + const error: ApiError = new Error(`Upload failed: ${response.statusText}`) + error.status = response.status + try { + error.data = await response.json() + } catch { + // Response body is not JSON + } + throw error + } + + const text = await response.text() + if (!text) { + return undefined as T + } + + return JSON.parse(text) as T + }, [getAccessToken]) + + return { + request, + get, + post, + put, + patch, + delete: del, + upload, + isAuthenticated, + } +} diff --git a/meteor-frontend/src/hooks/use-realtime.ts b/meteor-frontend/src/hooks/use-realtime.ts new file mode 100644 index 0000000..7103da0 --- /dev/null +++ b/meteor-frontend/src/hooks/use-realtime.ts @@ -0,0 +1,155 @@ +"use client" + +import { useEffect, useRef } from "react" +import { supabase } from "@/lib/supabase" +import { useAuth } from "@/contexts/auth-context" +import type { RealtimeChannel } from "@supabase/supabase-js" + +interface UseRealtimeOptions { + enabled?: boolean +} + +// Subscribe to broadcast events on a channel +export function useBroadcastChannel( + channelName: string, + eventName: string, + callback: (payload: unknown) => void, + options: UseRealtimeOptions = {} +) { + const { enabled = true } = options + const channelRef = useRef(null) + const callbackRef = useRef(callback) + + // Keep callback ref updated + useEffect(() => { + callbackRef.current = callback + }, [callback]) + + useEffect(() => { + if (!enabled) return + + const channel = supabase.channel(channelName) + .on("broadcast", { event: eventName }, (payload) => { + callbackRef.current(payload.payload) + }) + .subscribe() + + channelRef.current = channel + + return () => { + channel.unsubscribe() + channelRef.current = null + } + }, [channelName, eventName, enabled]) + + return channelRef.current +} + +// Subscribe to user-specific events +export function useUserEvents( + eventName: string, + callback: (payload: unknown) => void, + options: UseRealtimeOptions = {} +) { + const { user } = useAuth() + const { enabled = true } = options + + const channelName = user ? `user:${user.userId}` : null + const isEnabled = enabled && !!channelName + + return useBroadcastChannel( + channelName || "noop", + eventName, + callback, + { enabled: isEnabled } + ) +} + +// Subscribe to device status updates +export function useDeviceStatus( + callback: (payload: { + deviceId: string + status: "online" | "offline" + timestamp: string + }) => void, + options: UseRealtimeOptions = {} +) { + return useUserEvents("device-status-change", callback as (payload: unknown) => void, options) +} + +// Subscribe to device heartbeat updates +export function useDeviceHeartbeat( + callback: (payload: { + deviceId: string + data: unknown + timestamp: string + }) => void, + options: UseRealtimeOptions = {} +) { + return useUserEvents("device-heartbeat", callback as (payload: unknown) => void, options) +} + +// Subscribe to device alerts +export function useDeviceAlerts( + callback: (payload: { + deviceId: string + type: string + message: string + timestamp: string + }) => void, + options: UseRealtimeOptions = {} +) { + return useUserEvents("device-alert", callback as (payload: unknown) => void, options) +} + +// Subscribe to new event notifications +export function useNewEvents( + callback: (payload: unknown) => void, + options: UseRealtimeOptions = {} +) { + return useUserEvents("new-event", callback, options) +} + +// Subscribe to device registration updates (for registration flow) +export function useRegistrationUpdates( + claimId: string | null, + callback: (payload: { + claim_id: string + status: string + progress: number + error?: string + timestamp: string + }) => void, + options: UseRealtimeOptions = {} +) { + const { enabled = true } = options + const callbackRef = useRef(callback) + + useEffect(() => { + callbackRef.current = callback + }, [callback]) + + useEffect(() => { + if (!enabled || !claimId) return + + const channel = supabase.channel("registrations") + .on("broadcast", { event: "registration-update" }, (payload) => { + const data = payload.payload as { + claim_id: string + status: string + progress: number + error?: string + timestamp: string + } + // Only call callback if it's for our claim + if (data.claim_id === claimId) { + callbackRef.current(data) + } + }) + .subscribe() + + return () => { + channel.unsubscribe() + } + }, [claimId, enabled]) +} diff --git a/meteor-frontend/src/lib/supabase.ts b/meteor-frontend/src/lib/supabase.ts new file mode 100644 index 0000000..c5c8bf0 --- /dev/null +++ b/meteor-frontend/src/lib/supabase.ts @@ -0,0 +1,112 @@ +import { createClient } from '@supabase/supabase-js'; + +const supabaseUrl = process.env.NEXT_PUBLIC_SUPABASE_URL || ''; +const supabaseAnonKey = process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY || ''; + +if (!supabaseUrl || !supabaseAnonKey) { + console.warn('Supabase URL or Anon Key not configured. Supabase features will be disabled.'); +} + +export const supabase = createClient(supabaseUrl, supabaseAnonKey, { + auth: { + autoRefreshToken: true, + persistSession: true, + detectSessionInUrl: true, + }, +}); + +// Helper to check if Supabase is configured +export const isSupabaseConfigured = () => { + return Boolean(supabaseUrl && supabaseAnonKey); +}; + +// Auth helpers +export const signInWithEmail = async (email: string, password: string) => { + const { data, error } = await supabase.auth.signInWithPassword({ + email, + password, + }); + return { data, error }; +}; + +export const signUpWithEmail = async (email: string, password: string) => { + const { data, error } = await supabase.auth.signUp({ + email, + password, + }); + return { data, error }; +}; + +export const signOut = async () => { + const { error } = await supabase.auth.signOut(); + return { error }; +}; + +export const getSession = async () => { + const { data: { session }, error } = await supabase.auth.getSession(); + return { session, error }; +}; + +export const getUser = async () => { + const { data: { user }, error } = await supabase.auth.getUser(); + return { user, error }; +}; + +// Realtime helpers +export const subscribeToChannel = ( + channelName: string, + callback: (payload: any) => void, +) => { + return supabase + .channel(channelName) + .on('broadcast', { event: '*' }, callback) + .subscribe(); +}; + +export const subscribeToTableChanges = ( + tableName: string, + callback: (payload: any) => void, + filter?: string, +) => { + return supabase + .channel(`postgres:${tableName}`) + .on( + 'postgres_changes', + { + event: '*', + schema: 'public', + table: tableName, + filter, + }, + callback, + ) + .subscribe(); +}; + +// Storage helpers +export const uploadFile = async ( + bucket: string, + path: string, + file: File, +) => { + const { data, error } = await supabase.storage + .from(bucket) + .upload(path, file); + return { data, error }; +}; + +export const getPublicUrl = (bucket: string, path: string) => { + const { data } = supabase.storage.from(bucket).getPublicUrl(path); + return data.publicUrl; +}; + +export const getSignedUrl = async ( + bucket: string, + path: string, + expiresIn: number = 3600, +) => { + const { data, error } = await supabase.storage + .from(bucket) + .createSignedUrl(path, expiresIn); + return { data, error }; +}; diff --git a/meteor-frontend/src/services/camera.ts b/meteor-frontend/src/services/camera.ts index 245abba..699ef1b 100644 --- a/meteor-frontend/src/services/camera.ts +++ b/meteor-frontend/src/services/camera.ts @@ -1,10 +1,17 @@ const API_BASE_URL = process.env.NEXT_PUBLIC_API_URL || 'http://localhost:3001'; export interface CameraDevice { - id: number; - deviceId: string; + id: string; + deviceId?: string; name: string; - location: string; + location?: { + latitude?: number; + longitude?: number; + altitude?: number; + site_name?: string; + }; + legacyDeviceId?: string; + legacyLocation?: string; status: 'active' | 'maintenance' | 'offline'; lastSeenAt?: string; temperature?: number; @@ -16,6 +23,7 @@ export interface CameraDevice { serialNumber?: string; createdAt: string; updatedAt: string; + deletedAt?: string; } export interface CameraHistoryData { @@ -83,7 +91,7 @@ class CameraService { return this.fetch(`/api/v1/cameras?${params}`); } - async getCameraById(id: number): Promise { + async getCameraById(id: string): Promise { return this.fetch(`/api/v1/cameras/${id}`); } @@ -99,14 +107,14 @@ class CameraService { return this.fetch('/api/v1/cameras/stats'); } - async updateCameraStatus(id: number, status: 'active' | 'maintenance' | 'offline'): Promise { + async updateCameraStatus(id: string, status: 'active' | 'maintenance' | 'offline'): Promise { return this.fetch(`/api/v1/cameras/${id}/status`, { method: 'PATCH', body: JSON.stringify({ status }), }); } - async updateCamera(id: number, updateData: Partial): Promise { + async updateCamera(id: string, updateData: Partial): Promise { return this.fetch(`/api/v1/cameras/${id}`, { method: 'PATCH', body: JSON.stringify(updateData), diff --git a/meteor-frontend/src/services/weather.ts b/meteor-frontend/src/services/weather.ts index e0f45ff..4575909 100644 --- a/meteor-frontend/src/services/weather.ts +++ b/meteor-frontend/src/services/weather.ts @@ -1,10 +1,12 @@ interface WeatherStation { - id: number; + id: string; stationName: string; - location: string; + location?: string; latitude?: number; longitude?: number; altitude?: number; + elevation?: number; + timezone?: string; status: 'active' | 'maintenance' | 'offline'; currentTemperature?: number; humidity?: number; @@ -16,6 +18,7 @@ interface WeatherStation { observationQuality?: 'excellent' | 'moderate' | 'poor'; createdAt: string; updatedAt: string; + deletedAt?: string; } interface WeatherData extends WeatherStation { @@ -23,8 +26,8 @@ interface WeatherData extends WeatherStation { } interface WeatherObservation { - id: number; - weatherStationId: number; + id: string; + weatherStationId: string; observationTime: string; temperature: number; humidity: number; @@ -40,7 +43,7 @@ interface WeatherObservation { } interface WeatherForecast { - id: number; + id: string; forecastTime: string; temperature?: number; cloudCover?: number; diff --git a/meteor-web-backend/.env.example b/meteor-web-backend/.env.example index 92cbfd0..1b22c3a 100644 --- a/meteor-web-backend/.env.example +++ b/meteor-web-backend/.env.example @@ -1,14 +1,20 @@ # Database Configuration -DATABASE_URL=postgresql://user:password@localhost:5432/meteor_dev +# Use Supabase DATABASE_URL for production +DATABASE_URL=postgresql://postgres:[YOUR-PASSWORD]@db.your-project.supabase.co:5432/postgres TEST_DATABASE_URL=postgresql://username:password@host:port/test_database_name -# JWT Configuration +# Supabase Configuration +SUPABASE_URL=https://your-project.supabase.co +SUPABASE_PUBLISHABLE_KEY=sb_publishable_xxx +SUPABASE_SECRET_KEY=sb_secret_xxx +SUPABASE_JWT_SECRET=your-supabase-jwt-secret + +# JWT Configuration (legacy - will be replaced by Supabase Auth) JWT_ACCESS_SECRET=your-super-secret-access-key-change-this-in-production JWT_REFRESH_SECRET=your-super-secret-refresh-key-change-this-in-production JWT_ACCESS_EXPIRATION=15m JWT_REFRESH_EXPIRATION=7d - # Optional - Application Configuration PORT=3000 NODE_ENV=development @@ -16,12 +22,12 @@ NODE_ENV=development # Optional - Security Configuration BCRYPT_SALT_ROUNDS=10 -# AWS Configuration (required for event upload functionality) -AWS_REGION=us-east-1 -AWS_ACCESS_KEY_ID=your-aws-access-key-id -AWS_SECRET_ACCESS_KEY=your-aws-secret-access-key -AWS_S3_BUCKET_NAME=meteor-events-bucket -AWS_SQS_QUEUE_URL=https://sqs.us-east-1.amazonaws.com/123456789012/meteor-events-queue +# AWS Configuration (deprecated - migrating to Supabase Storage) +# AWS_REGION=us-east-1 +# AWS_ACCESS_KEY_ID=your-aws-access-key-id +# AWS_SECRET_ACCESS_KEY=your-aws-secret-access-key +# AWS_S3_BUCKET_NAME=meteor-events-bucket +# AWS_SQS_QUEUE_URL=https://sqs.us-east-1.amazonaws.com/123456789012/meteor-events-queue # Payment Provider Configuration # Stripe diff --git a/meteor-web-backend/migrations/1755016393640_create-device-registration-tables.js b/meteor-web-backend/migrations/1755016393640_create-device-registration-tables.js index 8520731..ad2ab7b 100644 --- a/meteor-web-backend/migrations/1755016393640_create-device-registration-tables.js +++ b/meteor-web-backend/migrations/1755016393640_create-device-registration-tables.js @@ -14,7 +14,7 @@ export const up = (pgm) => { id: { type: 'uuid', primaryKey: true, - default: pgm.func('uuid_generate_v4()'), + default: pgm.func('gen_random_uuid()'), }, user_profile_id: { type: 'uuid', @@ -132,7 +132,7 @@ export const up = (pgm) => { id: { type: 'uuid', primaryKey: true, - default: pgm.func('uuid_generate_v4()'), + default: pgm.func('gen_random_uuid()'), }, device_id: { type: 'uuid', @@ -243,7 +243,7 @@ export const up = (pgm) => { id: { type: 'uuid', primaryKey: true, - default: pgm.func('uuid_generate_v4()'), + default: pgm.func('gen_random_uuid()'), }, device_id: { type: 'uuid', @@ -347,7 +347,7 @@ export const up = (pgm) => { id: { type: 'uuid', primaryKey: true, - default: pgm.func('uuid_generate_v4()'), + default: pgm.func('gen_random_uuid()'), }, device_id: { type: 'uuid', diff --git a/meteor-web-backend/migrations/1766168324271_add-supabase-user-id.js b/meteor-web-backend/migrations/1766168324271_add-supabase-user-id.js new file mode 100644 index 0000000..02d7707 --- /dev/null +++ b/meteor-web-backend/migrations/1766168324271_add-supabase-user-id.js @@ -0,0 +1,40 @@ +/** + * @type {import('node-pg-migrate').ColumnDefinitions | undefined} + */ +export const shorthands = undefined; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const up = (pgm) => { + // Add supabase_user_id column to user_profiles table + pgm.addColumn('user_profiles', { + supabase_user_id: { + type: 'uuid', + unique: true, + }, + }); + + // Create index for faster lookups + pgm.createIndex('user_profiles', 'supabase_user_id', { + unique: true, + where: 'supabase_user_id IS NOT NULL', + }); + + // Add comment + pgm.sql(` + COMMENT ON COLUMN user_profiles.supabase_user_id IS 'Links to Supabase Auth user ID for authentication migration'; + `); +}; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const down = (pgm) => { + pgm.dropIndex('user_profiles', 'supabase_user_id'); + pgm.dropColumn('user_profiles', 'supabase_user_id'); +}; diff --git a/meteor-web-backend/migrations/1766209603920_create-missing-tables.js b/meteor-web-backend/migrations/1766209603920_create-missing-tables.js new file mode 100644 index 0000000..5f66a38 --- /dev/null +++ b/meteor-web-backend/migrations/1766209603920_create-missing-tables.js @@ -0,0 +1,156 @@ +/** + * @type {import('node-pg-migrate').ColumnDefinitions | undefined} + */ +export const shorthands = undefined; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const up = (pgm) => { + // 1. Create subscription_plans table + pgm.createTable('subscription_plans', { + id: { type: 'serial', primaryKey: true }, + plan_id: { type: 'varchar(50)', notNull: true, unique: true }, + name: { type: 'varchar(100)', notNull: true }, + description: { type: 'text' }, + price: { type: 'decimal(10,2)', notNull: true }, + currency: { type: 'varchar(10)', default: "'CNY'" }, + interval: { type: 'varchar(20)', notNull: true }, + interval_count: { type: 'integer', default: 1 }, + stripe_price_id: { type: 'varchar(100)', unique: true }, + features: { type: 'jsonb' }, + is_popular: { type: 'boolean', default: false }, + is_active: { type: 'boolean', default: true }, + created_at: { type: 'timestamptz', default: pgm.func('NOW()') }, + updated_at: { type: 'timestamptz', default: pgm.func('NOW()') }, + }); + + // 2. Create user_subscriptions table + pgm.createTable('user_subscriptions', { + id: { type: 'serial', primaryKey: true }, + user_profile_id: { + type: 'uuid', + notNull: true, + references: 'user_profiles(id)', + onDelete: 'CASCADE', + }, + subscription_plan_id: { + type: 'integer', + notNull: true, + references: 'subscription_plans(id)', + onDelete: 'CASCADE', + }, + stripe_subscription_id: { type: 'varchar(100)', unique: true }, + status: { type: 'varchar(20)', notNull: true, default: "'active'" }, + current_period_start: { type: 'timestamptz' }, + current_period_end: { type: 'timestamptz' }, + cancel_at_period_end: { type: 'boolean', default: false }, + canceled_at: { type: 'timestamptz' }, + trial_start: { type: 'timestamptz' }, + trial_end: { type: 'timestamptz' }, + created_at: { type: 'timestamptz', default: pgm.func('NOW()') }, + updated_at: { type: 'timestamptz', default: pgm.func('NOW()') }, + }); + + // 3. Create subscription_history table + pgm.createTable('subscription_history', { + id: { type: 'serial', primaryKey: true }, + user_subscription_id: { + type: 'integer', + notNull: true, + references: 'user_subscriptions(id)', + onDelete: 'CASCADE', + }, + action: { type: 'varchar(50)', notNull: true }, + old_status: { type: 'varchar(20)' }, + new_status: { type: 'varchar(20)' }, + metadata: { type: 'jsonb' }, + created_at: { type: 'timestamptz', default: pgm.func('NOW()') }, + }); + + // 4. Create payment_records table + pgm.createTable('payment_records', { + id: { type: 'serial', primaryKey: true }, + user_subscription_id: { + type: 'integer', + notNull: true, + references: 'user_subscriptions(id)', + onDelete: 'CASCADE', + }, + stripe_payment_intent_id: { type: 'varchar(100)', unique: true }, + amount: { type: 'decimal(10,2)', notNull: true }, + currency: { type: 'varchar(10)', notNull: true }, + status: { type: 'varchar(20)', notNull: true }, + payment_method: { type: 'varchar(50)' }, + failure_reason: { type: 'text' }, + paid_at: { type: 'timestamptz' }, + created_at: { type: 'timestamptz', default: pgm.func('NOW()') }, + }); + + // 5. Create camera_devices table + pgm.createTable('camera_devices', { + id: { type: 'serial', primaryKey: true }, + device_id: { type: 'varchar(100)', notNull: true, unique: true }, + name: { type: 'varchar(100)', notNull: true }, + location: { type: 'varchar(200)', notNull: true }, + status: { type: 'varchar(20)', default: "'offline'" }, + last_seen_at: { type: 'timestamptz' }, + temperature: { type: 'decimal(5,2)' }, + cooler_power: { type: 'decimal(5,2)' }, + gain: { type: 'integer' }, + exposure_count: { type: 'integer', default: 0 }, + uptime: { type: 'decimal(10,2)' }, + firmware_version: { type: 'varchar(50)' }, + serial_number: { type: 'varchar(100)', unique: true }, + created_at: { type: 'timestamptz', default: pgm.func('NOW()') }, + updated_at: { type: 'timestamptz', default: pgm.func('NOW()') }, + }); + + // 6. Create weather_observations table + pgm.createTable('weather_observations', { + id: { type: 'serial', primaryKey: true }, + weather_station_id: { + type: 'integer', + notNull: true, + references: 'weather_stations(id)', + onDelete: 'CASCADE', + }, + observation_time: { type: 'timestamptz', notNull: true }, + temperature: { type: 'decimal(5,2)', notNull: true }, + humidity: { type: 'decimal(5,2)', notNull: true }, + cloud_cover: { type: 'decimal(5,2)', notNull: true }, + visibility: { type: 'decimal(6,2)', notNull: true }, + wind_speed: { type: 'decimal(5,2)', notNull: true }, + wind_direction: { type: 'integer', notNull: true }, + condition: { type: 'varchar(50)', notNull: true }, + observation_quality: { type: 'varchar(20)', notNull: true }, + pressure: { type: 'decimal(7,2)', notNull: true }, + precipitation: { type: 'decimal(5,2)', notNull: true }, + created_at: { type: 'timestamptz', default: pgm.func('NOW()') }, + }); + + // Create indexes for foreign keys + pgm.createIndex('user_subscriptions', 'user_profile_id'); + pgm.createIndex('user_subscriptions', 'subscription_plan_id'); + pgm.createIndex('subscription_history', 'user_subscription_id'); + pgm.createIndex('payment_records', 'user_subscription_id'); + pgm.createIndex('weather_observations', 'weather_station_id'); + pgm.createIndex('weather_observations', 'observation_time'); +}; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const down = (pgm) => { + // Drop tables in reverse order due to foreign key constraints + pgm.dropTable('weather_observations', { ifExists: true, cascade: true }); + pgm.dropTable('camera_devices', { ifExists: true, cascade: true }); + pgm.dropTable('payment_records', { ifExists: true, cascade: true }); + pgm.dropTable('subscription_history', { ifExists: true, cascade: true }); + pgm.dropTable('user_subscriptions', { ifExists: true, cascade: true }); + pgm.dropTable('subscription_plans', { ifExists: true, cascade: true }); +}; diff --git a/meteor-web-backend/migrations/1766211093224_alter-weather-forecasts-cloud-cover.js b/meteor-web-backend/migrations/1766211093224_alter-weather-forecasts-cloud-cover.js new file mode 100644 index 0000000..e390196 --- /dev/null +++ b/meteor-web-backend/migrations/1766211093224_alter-weather-forecasts-cloud-cover.js @@ -0,0 +1,29 @@ +/** + * @type {import('node-pg-migrate').ColumnDefinitions | undefined} + */ +export const shorthands = undefined; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const up = (pgm) => { + // Change cloud_cover from integer to decimal(5,2) to match source data + pgm.alterColumn('weather_forecasts', 'cloud_cover', { + type: 'decimal(5,2)', + using: 'cloud_cover::decimal(5,2)', + }); +}; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const down = (pgm) => { + pgm.alterColumn('weather_forecasts', 'cloud_cover', { + type: 'integer', + using: 'cloud_cover::integer', + }); +}; diff --git a/meteor-web-backend/migrations/1766250271179_add-missing-device-columns.js b/meteor-web-backend/migrations/1766250271179_add-missing-device-columns.js new file mode 100644 index 0000000..173c2b0 --- /dev/null +++ b/meteor-web-backend/migrations/1766250271179_add-missing-device-columns.js @@ -0,0 +1,87 @@ +/** + * @type {import('node-pg-migrate').ColumnDefinitions | undefined} + */ +exports.shorthands = undefined; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +exports.up = (pgm) => { + // Add missing columns to devices table + pgm.addColumns('devices', { + device_token: { + type: 'varchar(255)', + unique: true, + }, + hardware_fingerprint_hash: { + type: 'varchar(128)', + }, + firmware_version: { + type: 'varchar(100)', + }, + device_model: { + type: 'varchar(100)', + }, + location: { + type: 'jsonb', + }, + capabilities: { + type: 'jsonb', + }, + network_info: { + type: 'jsonb', + }, + security_level: { + type: 'varchar(20)', + default: 'standard', + }, + trust_score: { + type: 'float', + default: 1.0, + }, + last_heartbeat_at: { + type: 'timestamp with time zone', + }, + activated_at: { + type: 'timestamp with time zone', + }, + deactivated_at: { + type: 'timestamp with time zone', + }, + metadata: { + type: 'jsonb', + }, + }); + + // Add indexes for commonly queried columns + pgm.createIndex('devices', 'device_token'); + pgm.createIndex('devices', 'last_heartbeat_at'); +}; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +exports.down = (pgm) => { + pgm.dropIndex('devices', 'last_heartbeat_at'); + pgm.dropIndex('devices', 'device_token'); + + pgm.dropColumns('devices', [ + 'device_token', + 'hardware_fingerprint_hash', + 'firmware_version', + 'device_model', + 'location', + 'capabilities', + 'network_info', + 'security_level', + 'trust_score', + 'last_heartbeat_at', + 'activated_at', + 'deactivated_at', + 'metadata', + ]); +}; diff --git a/meteor-web-backend/migrations/1766300000001_add-weather-station-location.js b/meteor-web-backend/migrations/1766300000001_add-weather-station-location.js new file mode 100644 index 0000000..1a554d3 --- /dev/null +++ b/meteor-web-backend/migrations/1766300000001_add-weather-station-location.js @@ -0,0 +1,86 @@ +/** + * Migration: Add location coordinates to weather_stations table + * + * Fixes: WeatherStation has no location/coordinates - how do you know where the station is? + * + * @type {import('node-pg-migrate').ColumnDefinitions | undefined} + */ +export const shorthands = undefined; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const up = (pgm) => { + console.log('Adding location coordinates to weather_stations...'); + + // Add location coordinate columns + pgm.addColumns('weather_stations', { + latitude: { + type: 'decimal(10,7)', + notNull: false, + comment: 'Latitude coordinate of the weather station (-90 to 90)', + }, + longitude: { + type: 'decimal(10,7)', + notNull: false, + comment: 'Longitude coordinate of the weather station (-180 to 180)', + }, + elevation: { + type: 'decimal(6,1)', + notNull: false, + comment: 'Elevation above sea level in meters', + }, + timezone: { + type: 'varchar(50)', + notNull: false, + comment: 'Timezone identifier (e.g., Asia/Shanghai)', + }, + }); + + // Add CHECK constraints for coordinate validation + pgm.addConstraint('weather_stations', 'chk_weather_stations_latitude', { + check: 'latitude IS NULL OR (latitude >= -90 AND latitude <= 90)', + }); + + pgm.addConstraint('weather_stations', 'chk_weather_stations_longitude', { + check: 'longitude IS NULL OR (longitude >= -180 AND longitude <= 180)', + }); + + // Add index for geospatial queries (basic, for more advanced use PostGIS) + pgm.createIndex('weather_stations', ['latitude', 'longitude'], { + name: 'idx_weather_stations_coordinates', + where: 'latitude IS NOT NULL AND longitude IS NOT NULL', + }); + + console.log('Weather station location columns added successfully.'); +}; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const down = (pgm) => { + console.log('Removing location coordinates from weather_stations...'); + + // Drop index first + pgm.dropIndex('weather_stations', ['latitude', 'longitude'], { + name: 'idx_weather_stations_coordinates', + ifExists: true, + }); + + // Drop constraints + pgm.dropConstraint('weather_stations', 'chk_weather_stations_longitude', { + ifExists: true, + }); + pgm.dropConstraint('weather_stations', 'chk_weather_stations_latitude', { + ifExists: true, + }); + + // Drop columns + pgm.dropColumns('weather_stations', ['latitude', 'longitude', 'elevation', 'timezone']); + + console.log('Weather station location columns removed.'); +}; diff --git a/meteor-web-backend/migrations/1766300000002_add-missing-indexes.js b/meteor-web-backend/migrations/1766300000002_add-missing-indexes.js new file mode 100644 index 0000000..a92cdec --- /dev/null +++ b/meteor-web-backend/migrations/1766300000002_add-missing-indexes.js @@ -0,0 +1,122 @@ +/** + * Migration: Add missing indexes for frequently queried columns + * + * Fixes: Missing indexes on frequently filtered/sorted columns + * + * @type {import('node-pg-migrate').ColumnDefinitions | undefined} + */ +export const shorthands = undefined; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const up = (pgm) => { + console.log('Adding missing indexes...'); + + // validated_events.media_url - frequently queried for display + pgm.createIndex('validated_events', 'media_url', { + name: 'idx_validated_events_media_url', + method: 'hash', // Hash index for equality lookups + }); + + // payment_records.status - frequently filtered for payment status + pgm.createIndex('payment_records', 'status', { + name: 'idx_payment_records_status', + }); + + // subscription_history.action - frequently filtered by action type + pgm.createIndex('subscription_history', 'action', { + name: 'idx_subscription_history_action', + }); + + // raw_events.file_type - frequently filtered for file type statistics + pgm.createIndex('raw_events', 'file_type', { + name: 'idx_raw_events_file_type', + }); + + // devices.firmware_version - for device management queries + pgm.createIndex('devices', 'firmware_version', { + name: 'idx_devices_firmware_version', + where: 'firmware_version IS NOT NULL', + }); + + // device_certificates.expires_at - for certificate renewal queries + pgm.createIndex('device_certificates', 'expires_at', { + name: 'idx_device_certificates_expires_at', + }); + + // user_subscriptions.current_period_end - for subscription renewal queries + pgm.createIndex('user_subscriptions', 'current_period_end', { + name: 'idx_user_subscriptions_period_end', + }); + + // Composite index for common subscription queries + pgm.createIndex('user_subscriptions', ['status', 'current_period_end'], { + name: 'idx_user_subscriptions_status_period', + }); + + // weather_observations.observation_time - for time-based queries + pgm.createIndex('weather_observations', 'observation_time', { + name: 'idx_weather_observations_time', + }); + + console.log('Missing indexes added successfully.'); +}; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const down = (pgm) => { + console.log('Removing added indexes...'); + + pgm.dropIndex('validated_events', 'media_url', { + name: 'idx_validated_events_media_url', + ifExists: true, + }); + + pgm.dropIndex('payment_records', 'status', { + name: 'idx_payment_records_status', + ifExists: true, + }); + + pgm.dropIndex('subscription_history', 'action', { + name: 'idx_subscription_history_action', + ifExists: true, + }); + + pgm.dropIndex('raw_events', 'file_type', { + name: 'idx_raw_events_file_type', + ifExists: true, + }); + + pgm.dropIndex('devices', 'firmware_version', { + name: 'idx_devices_firmware_version', + ifExists: true, + }); + + pgm.dropIndex('device_certificates', 'expires_at', { + name: 'idx_device_certificates_expires_at', + ifExists: true, + }); + + pgm.dropIndex('user_subscriptions', 'current_period_end', { + name: 'idx_user_subscriptions_period_end', + ifExists: true, + }); + + pgm.dropIndex('user_subscriptions', ['status', 'current_period_end'], { + name: 'idx_user_subscriptions_status_period', + ifExists: true, + }); + + pgm.dropIndex('weather_observations', 'observation_time', { + name: 'idx_weather_observations_time', + ifExists: true, + }); + + console.log('Indexes removed.'); +}; diff --git a/meteor-web-backend/migrations/1766300000003_add-check-constraints.js b/meteor-web-backend/migrations/1766300000003_add-check-constraints.js new file mode 100644 index 0000000..3b607dc --- /dev/null +++ b/meteor-web-backend/migrations/1766300000003_add-check-constraints.js @@ -0,0 +1,136 @@ +/** + * Migration: Add CHECK constraints for data integrity + * + * Fixes: Missing constraints on numeric ranges + * + * @type {import('node-pg-migrate').ColumnDefinitions | undefined} + */ +export const shorthands = undefined; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const up = (pgm) => { + console.log('Adding CHECK constraints...'); + + // devices.trust_score: should be between 0.0 and 1.0 + pgm.addConstraint('devices', 'chk_devices_trust_score', { + check: 'trust_score IS NULL OR (trust_score >= 0.0 AND trust_score <= 1.0)', + }); + + // validated_events.validation_score: should be between 0 and 1 + pgm.addConstraint('validated_events', 'chk_validated_events_validation_score', { + check: 'validation_score IS NULL OR (validation_score >= 0 AND validation_score <= 1)', + }); + + // validated_events.azimuth: should be between 0 and 360 + pgm.addConstraint('validated_events', 'chk_validated_events_azimuth', { + check: 'azimuth IS NULL OR (azimuth >= 0 AND azimuth <= 360)', + }); + + // validated_events.altitude: should be between -90 and 90 (elevation angle) + pgm.addConstraint('validated_events', 'chk_validated_events_altitude', { + check: 'altitude IS NULL OR (altitude >= -90 AND altitude <= 90)', + }); + + // validated_events.duration: should be positive + pgm.addConstraint('validated_events', 'chk_validated_events_duration', { + check: 'duration IS NULL OR duration >= 0', + }); + + // validated_events.velocity: should be positive + pgm.addConstraint('validated_events', 'chk_validated_events_velocity', { + check: 'velocity IS NULL OR velocity >= 0', + }); + + // device_security_events.risk_score: should be between 0 and 100 + pgm.addConstraint('device_security_events', 'chk_security_events_risk_score', { + check: 'risk_score >= 0 AND risk_score <= 100', + }); + + // device_security_events.false_positive_probability: should be between 0 and 1 + pgm.addConstraint('device_security_events', 'chk_security_events_fp_probability', { + check: 'false_positive_probability >= 0.0 AND false_positive_probability <= 1.0', + }); + + // device_certificates.key_size: should be valid key sizes + pgm.addConstraint('device_certificates', 'chk_certificates_key_size', { + check: 'key_size IN (1024, 2048, 3072, 4096, 256, 384, 521)', // RSA and EC key sizes + }); + + // device_configurations.is_active: only one active config per device + // This is enforced via partial unique index + pgm.createIndex('device_configurations', ['device_id'], { + name: 'idx_device_configurations_active_unique', + unique: true, + where: 'is_active = true', + }); + + // subscription_plans.price: should be non-negative + pgm.addConstraint('subscription_plans', 'chk_subscription_plans_price', { + check: 'price >= 0', + }); + + // subscription_plans.interval_count: should be positive + pgm.addConstraint('subscription_plans', 'chk_subscription_plans_interval_count', { + check: 'interval_count > 0', + }); + + // payment_records.amount: should be positive + pgm.addConstraint('payment_records', 'chk_payment_records_amount', { + check: 'amount > 0', + }); + + // device_registrations.retry_count: should be non-negative + pgm.addConstraint('device_registrations', 'chk_registrations_retry_count', { + check: 'retry_count >= 0', + }); + + // camera_devices.exposure_count: should be non-negative + pgm.addConstraint('camera_devices', 'chk_camera_devices_exposure_count', { + check: 'exposure_count >= 0', + }); + + console.log('CHECK constraints added successfully.'); +}; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const down = (pgm) => { + console.log('Removing CHECK constraints...'); + + // Drop unique partial index + pgm.dropIndex('device_configurations', ['device_id'], { + name: 'idx_device_configurations_active_unique', + ifExists: true, + }); + + // Drop all CHECK constraints + const constraints = [ + { table: 'devices', name: 'chk_devices_trust_score' }, + { table: 'validated_events', name: 'chk_validated_events_validation_score' }, + { table: 'validated_events', name: 'chk_validated_events_azimuth' }, + { table: 'validated_events', name: 'chk_validated_events_altitude' }, + { table: 'validated_events', name: 'chk_validated_events_duration' }, + { table: 'validated_events', name: 'chk_validated_events_velocity' }, + { table: 'device_security_events', name: 'chk_security_events_risk_score' }, + { table: 'device_security_events', name: 'chk_security_events_fp_probability' }, + { table: 'device_certificates', name: 'chk_certificates_key_size' }, + { table: 'subscription_plans', name: 'chk_subscription_plans_price' }, + { table: 'subscription_plans', name: 'chk_subscription_plans_interval_count' }, + { table: 'payment_records', name: 'chk_payment_records_amount' }, + { table: 'device_registrations', name: 'chk_registrations_retry_count' }, + { table: 'camera_devices', name: 'chk_camera_devices_exposure_count' }, + ]; + + constraints.forEach(({ table, name }) => { + pgm.dropConstraint(table, name, { ifExists: true }); + }); + + console.log('CHECK constraints removed.'); +}; diff --git a/meteor-web-backend/migrations/1766300000004_add-user-audit-fields.js b/meteor-web-backend/migrations/1766300000004_add-user-audit-fields.js new file mode 100644 index 0000000..6b70d1f --- /dev/null +++ b/meteor-web-backend/migrations/1766300000004_add-user-audit-fields.js @@ -0,0 +1,141 @@ +/** + * Migration: Add missing user audit and tracking fields + * + * Fixes: Missing important user fields (last_login_at, timezone, email_verified_at) + * + * @type {import('node-pg-migrate').ColumnDefinitions | undefined} + */ +export const shorthands = undefined; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const up = (pgm) => { + console.log('Adding user audit and tracking fields...'); + + // Add fields to user_profiles + pgm.addColumns('user_profiles', { + last_login_at: { + type: 'timestamptz', + notNull: false, + comment: 'Timestamp of last successful login', + }, + timezone: { + type: 'varchar(50)', + notNull: false, + default: 'UTC', + comment: 'User timezone preference (e.g., Asia/Shanghai)', + }, + locale: { + type: 'varchar(10)', + notNull: false, + default: 'zh-CN', + comment: 'User locale preference (e.g., en-US, zh-CN)', + }, + login_count: { + type: 'integer', + notNull: true, + default: 0, + comment: 'Total number of logins', + }, + }); + + // Add fields to user_identities + pgm.addColumns('user_identities', { + email_verified_at: { + type: 'timestamptz', + notNull: false, + comment: 'Timestamp when email was verified', + }, + last_auth_at: { + type: 'timestamptz', + notNull: false, + comment: 'Timestamp of last authentication attempt', + }, + auth_failure_count: { + type: 'integer', + notNull: true, + default: 0, + comment: 'Count of consecutive authentication failures', + }, + locked_until: { + type: 'timestamptz', + notNull: false, + comment: 'Account locked until this timestamp (for brute-force protection)', + }, + password_changed_at: { + type: 'timestamptz', + notNull: false, + comment: 'Timestamp when password was last changed', + }, + }); + + // Add CHECK constraint for auth_failure_count + pgm.addConstraint('user_identities', 'chk_user_identities_auth_failure_count', { + check: 'auth_failure_count >= 0', + }); + + // Add CHECK constraint for login_count + pgm.addConstraint('user_profiles', 'chk_user_profiles_login_count', { + check: 'login_count >= 0', + }); + + // Add index for locked accounts query + pgm.createIndex('user_identities', 'locked_until', { + name: 'idx_user_identities_locked_until', + where: 'locked_until IS NOT NULL', + }); + + // Add index for unverified emails + pgm.createIndex('user_identities', 'email_verified_at', { + name: 'idx_user_identities_email_unverified', + where: "email IS NOT NULL AND email_verified_at IS NULL AND provider = 'email'", + }); + + console.log('User audit and tracking fields added successfully.'); +}; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const down = (pgm) => { + console.log('Removing user audit and tracking fields...'); + + // Drop indexes + pgm.dropIndex('user_identities', 'locked_until', { + name: 'idx_user_identities_locked_until', + ifExists: true, + }); + + pgm.dropIndex('user_identities', 'email_verified_at', { + name: 'idx_user_identities_email_unverified', + ifExists: true, + }); + + // Drop constraints + pgm.dropConstraint('user_identities', 'chk_user_identities_auth_failure_count', { + ifExists: true, + }); + + pgm.dropConstraint('user_profiles', 'chk_user_profiles_login_count', { + ifExists: true, + }); + + // Drop columns from user_identities + pgm.dropColumns('user_identities', [ + 'email_verified_at', + 'last_auth_at', + 'auth_failure_count', + 'locked_until', + 'password_changed_at', + ]); + + // Drop columns from user_profiles + pgm.dropColumns('user_profiles', ['last_login_at', 'timezone', 'locale', 'login_count']); + + console.log('User audit and tracking fields removed.'); +}; diff --git a/meteor-web-backend/migrations/1766300000005_add-soft-delete.js b/meteor-web-backend/migrations/1766300000005_add-soft-delete.js new file mode 100644 index 0000000..ec042f4 --- /dev/null +++ b/meteor-web-backend/migrations/1766300000005_add-soft-delete.js @@ -0,0 +1,126 @@ +/** + * Migration: Add soft delete mechanism to all tables + * + * Fixes: Most tables use CASCADE delete with no way to recover deleted data + * + * @type {import('node-pg-migrate').ColumnDefinitions | undefined} + */ +export const shorthands = undefined; + +/** + * All tables that need soft delete support + */ +const TABLES_WITH_SOFT_DELETE = [ + 'user_profiles', + 'user_identities', + 'devices', + 'inventory_devices', + 'raw_events', + 'validated_events', + 'analysis_results', + 'weather_stations', + 'weather_observations', + 'weather_forecasts', + 'camera_devices', + 'subscription_plans', + 'user_subscriptions', + 'subscription_history', + 'payment_records', + 'device_configurations', + 'device_certificates', + 'device_security_events', + 'device_registrations', +]; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const up = (pgm) => { + console.log('Adding soft delete columns to all tables...'); + + // Add deleted_at column to all tables + TABLES_WITH_SOFT_DELETE.forEach((table) => { + console.log(` Adding deleted_at to ${table}...`); + + pgm.addColumn(table, { + deleted_at: { + type: 'timestamptz', + notNull: false, + comment: 'Soft delete timestamp. NULL = not deleted.', + }, + }); + + // Create partial index for efficient filtering of non-deleted records + pgm.createIndex(table, 'deleted_at', { + name: `idx_${table}_deleted_at`, + where: 'deleted_at IS NULL', + }); + }); + + // Create a helper function for soft delete operations + pgm.sql(` + -- Function to soft delete a record + CREATE OR REPLACE FUNCTION soft_delete() + RETURNS TRIGGER AS $$ + BEGIN + NEW.deleted_at = CURRENT_TIMESTAMP; + RETURN NEW; + END; + $$ LANGUAGE plpgsql; + + -- Function to check if record is deleted + CREATE OR REPLACE FUNCTION is_deleted(deleted_at TIMESTAMPTZ) + RETURNS BOOLEAN AS $$ + BEGIN + RETURN deleted_at IS NOT NULL; + END; + $$ LANGUAGE plpgsql IMMUTABLE; + + -- Function to restore a soft-deleted record + CREATE OR REPLACE FUNCTION restore_deleted() + RETURNS TRIGGER AS $$ + BEGIN + NEW.deleted_at = NULL; + RETURN NEW; + END; + $$ LANGUAGE plpgsql; + `); + + console.log('Soft delete columns added to all tables successfully.'); +}; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const down = (pgm) => { + console.log('Removing soft delete columns from all tables...'); + + // Drop helper functions + pgm.sql(` + DROP FUNCTION IF EXISTS soft_delete() CASCADE; + DROP FUNCTION IF EXISTS is_deleted(TIMESTAMPTZ) CASCADE; + DROP FUNCTION IF EXISTS restore_deleted() CASCADE; + `); + + // Remove deleted_at from all tables in reverse order + [...TABLES_WITH_SOFT_DELETE].reverse().forEach((table) => { + console.log(` Removing deleted_at from ${table}...`); + + // Drop index first + pgm.dropIndex(table, 'deleted_at', { + name: `idx_${table}_deleted_at`, + ifExists: true, + }); + + // Drop column + pgm.dropColumn(table, 'deleted_at', { + ifExists: true, + }); + }); + + console.log('Soft delete columns removed from all tables.'); +}; diff --git a/meteor-web-backend/migrations/1766300000006_prepare-uuid-migration.js b/meteor-web-backend/migrations/1766300000006_prepare-uuid-migration.js new file mode 100644 index 0000000..4d71d5b --- /dev/null +++ b/meteor-web-backend/migrations/1766300000006_prepare-uuid-migration.js @@ -0,0 +1,125 @@ +/** + * Migration: Prepare UUID migration - Add new UUID columns + * + * Phase 1 of 3 for migrating Serial primary keys to UUID + * This migration adds new UUID columns and generates values + * + * Affected tables: + * - analysis_results + * - weather_stations + * - weather_forecasts + * - weather_observations + * - subscription_plans + * - user_subscriptions + * - subscription_history + * - payment_records + * - camera_devices + * + * @type {import('node-pg-migrate').ColumnDefinitions | undefined} + */ +export const shorthands = undefined; + +/** + * Tables with serial primary keys that need UUID migration + */ +const TABLES_TO_MIGRATE = [ + 'analysis_results', + 'weather_stations', + 'weather_forecasts', + 'weather_observations', + 'subscription_plans', + 'user_subscriptions', + 'subscription_history', + 'payment_records', + 'camera_devices', +]; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const up = (pgm) => { + console.log('Phase 1: Adding new UUID columns to tables with Serial primary keys...'); + + // Ensure pgcrypto extension is available for gen_random_uuid() + pgm.sql('CREATE EXTENSION IF NOT EXISTS pgcrypto;'); + + // Add new_id UUID column to each table + TABLES_TO_MIGRATE.forEach((table) => { + console.log(` Adding new_id to ${table}...`); + + pgm.addColumn(table, { + new_id: { + type: 'uuid', + notNull: false, + comment: 'Temporary UUID column for migration from serial ID', + }, + }); + + // Generate UUID for all existing rows + pgm.sql(`UPDATE ${table} SET new_id = gen_random_uuid() WHERE new_id IS NULL;`); + + // Create index on new_id for faster lookups during migration + pgm.createIndex(table, 'new_id', { + name: `idx_${table}_new_id`, + unique: true, + where: 'new_id IS NOT NULL', + }); + }); + + // Create mapping tables to preserve old_id -> new_id relationships + // This is crucial for data migration and rollback + pgm.sql(` + -- Create mapping table for ID lookups during migration + CREATE TABLE IF NOT EXISTS _migration_id_mapping ( + table_name VARCHAR(100) NOT NULL, + old_id INTEGER NOT NULL, + new_id UUID NOT NULL, + created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (table_name, old_id) + ); + + CREATE INDEX IF NOT EXISTS idx_migration_mapping_new_id + ON _migration_id_mapping(table_name, new_id); + `); + + // Populate mapping table + TABLES_TO_MIGRATE.forEach((table) => { + pgm.sql(` + INSERT INTO _migration_id_mapping (table_name, old_id, new_id) + SELECT '${table}', id, new_id FROM ${table} + ON CONFLICT (table_name, old_id) DO UPDATE SET new_id = EXCLUDED.new_id; + `); + }); + + console.log('Phase 1 complete: UUID columns added and populated.'); +}; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const down = (pgm) => { + console.log('Rolling back Phase 1: Removing UUID columns...'); + + // Drop mapping table + pgm.sql('DROP TABLE IF EXISTS _migration_id_mapping CASCADE;'); + + // Remove new_id columns from all tables + TABLES_TO_MIGRATE.forEach((table) => { + console.log(` Removing new_id from ${table}...`); + + pgm.dropIndex(table, 'new_id', { + name: `idx_${table}_new_id`, + ifExists: true, + }); + + pgm.dropColumn(table, 'new_id', { + ifExists: true, + }); + }); + + console.log('Phase 1 rollback complete.'); +}; diff --git a/meteor-web-backend/migrations/1766300000007_update-fk-references.js b/meteor-web-backend/migrations/1766300000007_update-fk-references.js new file mode 100644 index 0000000..2bce197 --- /dev/null +++ b/meteor-web-backend/migrations/1766300000007_update-fk-references.js @@ -0,0 +1,179 @@ +/** + * Migration: Update foreign key references to use new UUID columns + * + * Phase 2 of 3 for migrating Serial primary keys to UUID + * This migration adds UUID foreign key columns and migrates the data + * + * Foreign Key Relationships to Update: + * - weather_forecasts.station_id -> weather_stations.id + * - weather_observations.weather_station_id -> weather_stations.id + * - user_subscriptions.subscription_plan_id -> subscription_plans.id + * - subscription_history.user_subscription_id -> user_subscriptions.id + * - payment_records.user_subscription_id -> user_subscriptions.id + * + * @type {import('node-pg-migrate').ColumnDefinitions | undefined} + */ +export const shorthands = undefined; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const up = (pgm) => { + console.log('Phase 2: Updating foreign key references to use UUID...'); + + // ===================================================== + // 1. weather_forecasts.station_id -> weather_stations + // ===================================================== + console.log(' Updating weather_forecasts.station_id...'); + + pgm.addColumn('weather_forecasts', { + new_station_id: { + type: 'uuid', + notNull: false, + comment: 'Temporary UUID FK for migration', + }, + }); + + pgm.sql(` + UPDATE weather_forecasts wf + SET new_station_id = ws.new_id + FROM weather_stations ws + WHERE wf.station_id = ws.id; + `); + + // ===================================================== + // 2. weather_observations.weather_station_id -> weather_stations + // ===================================================== + console.log(' Updating weather_observations.weather_station_id...'); + + pgm.addColumn('weather_observations', { + new_weather_station_id: { + type: 'uuid', + notNull: false, + comment: 'Temporary UUID FK for migration', + }, + }); + + pgm.sql(` + UPDATE weather_observations wo + SET new_weather_station_id = ws.new_id + FROM weather_stations ws + WHERE wo.weather_station_id = ws.id; + `); + + // ===================================================== + // 3. user_subscriptions.subscription_plan_id -> subscription_plans + // ===================================================== + console.log(' Updating user_subscriptions.subscription_plan_id...'); + + pgm.addColumn('user_subscriptions', { + new_subscription_plan_id: { + type: 'uuid', + notNull: false, + comment: 'Temporary UUID FK for migration', + }, + }); + + pgm.sql(` + UPDATE user_subscriptions us + SET new_subscription_plan_id = sp.new_id + FROM subscription_plans sp + WHERE us.subscription_plan_id = sp.id; + `); + + // ===================================================== + // 4. subscription_history.user_subscription_id -> user_subscriptions + // ===================================================== + console.log(' Updating subscription_history.user_subscription_id...'); + + pgm.addColumn('subscription_history', { + new_user_subscription_id: { + type: 'uuid', + notNull: false, + comment: 'Temporary UUID FK for migration', + }, + }); + + pgm.sql(` + UPDATE subscription_history sh + SET new_user_subscription_id = us.new_id + FROM user_subscriptions us + WHERE sh.user_subscription_id = us.id; + `); + + // ===================================================== + // 5. payment_records.user_subscription_id -> user_subscriptions + // ===================================================== + console.log(' Updating payment_records.user_subscription_id...'); + + pgm.addColumn('payment_records', { + new_user_subscription_id: { + type: 'uuid', + notNull: false, + comment: 'Temporary UUID FK for migration', + }, + }); + + pgm.sql(` + UPDATE payment_records pr + SET new_user_subscription_id = us.new_id + FROM user_subscriptions us + WHERE pr.user_subscription_id = us.id; + `); + + // Create indexes on new FK columns for validation + pgm.createIndex('weather_forecasts', 'new_station_id', { + name: 'idx_weather_forecasts_new_station_id', + }); + + pgm.createIndex('weather_observations', 'new_weather_station_id', { + name: 'idx_weather_observations_new_station_id', + }); + + pgm.createIndex('user_subscriptions', 'new_subscription_plan_id', { + name: 'idx_user_subscriptions_new_plan_id', + }); + + pgm.createIndex('subscription_history', 'new_user_subscription_id', { + name: 'idx_subscription_history_new_sub_id', + }); + + pgm.createIndex('payment_records', 'new_user_subscription_id', { + name: 'idx_payment_records_new_sub_id', + }); + + console.log('Phase 2 complete: Foreign key references updated.'); +}; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const down = (pgm) => { + console.log('Rolling back Phase 2: Removing UUID FK columns...'); + + // Drop indexes + const indexes = [ + { table: 'weather_forecasts', name: 'idx_weather_forecasts_new_station_id' }, + { table: 'weather_observations', name: 'idx_weather_observations_new_station_id' }, + { table: 'user_subscriptions', name: 'idx_user_subscriptions_new_plan_id' }, + { table: 'subscription_history', name: 'idx_subscription_history_new_sub_id' }, + { table: 'payment_records', name: 'idx_payment_records_new_sub_id' }, + ]; + + indexes.forEach(({ table, name }) => { + pgm.dropIndex(table, [], { name, ifExists: true }); + }); + + // Drop columns + pgm.dropColumn('weather_forecasts', 'new_station_id', { ifExists: true }); + pgm.dropColumn('weather_observations', 'new_weather_station_id', { ifExists: true }); + pgm.dropColumn('user_subscriptions', 'new_subscription_plan_id', { ifExists: true }); + pgm.dropColumn('subscription_history', 'new_user_subscription_id', { ifExists: true }); + pgm.dropColumn('payment_records', 'new_user_subscription_id', { ifExists: true }); + + console.log('Phase 2 rollback complete.'); +}; diff --git a/meteor-web-backend/migrations/1766300000008_switch-primary-keys.js b/meteor-web-backend/migrations/1766300000008_switch-primary-keys.js new file mode 100644 index 0000000..fd144c4 --- /dev/null +++ b/meteor-web-backend/migrations/1766300000008_switch-primary-keys.js @@ -0,0 +1,266 @@ +/** + * Migration: Switch from Serial to UUID primary keys + * + * Phase 3 of 3 for migrating Serial primary keys to UUID + * This migration performs the actual switch: + * 1. Drop old foreign key constraints + * 2. Drop old primary keys + * 3. Drop old columns + * 4. Rename new columns + * 5. Add new primary keys and foreign keys + * + * WARNING: This is a destructive migration. Ensure you have backups! + * + * @type {import('node-pg-migrate').ColumnDefinitions | undefined} + */ +export const shorthands = undefined; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const up = (pgm) => { + console.log('Phase 3: Switching to UUID primary keys...'); + console.log('WARNING: This is a destructive migration. Ensure backups exist!'); + + // ===================================================== + // Step 1: Drop existing foreign key constraints + // ===================================================== + console.log(' Step 1: Dropping existing foreign key constraints...'); + + // Drop FK constraints (dependent tables first) + pgm.dropConstraint('payment_records', 'payment_records_user_subscription_id_fkey', { + ifExists: true, + }); + pgm.dropConstraint('subscription_history', 'subscription_history_user_subscription_id_fkey', { + ifExists: true, + }); + pgm.dropConstraint('user_subscriptions', 'user_subscriptions_subscription_plan_id_fkey', { + ifExists: true, + }); + pgm.dropConstraint('weather_forecasts', 'weather_forecasts_station_id_fkey', { + ifExists: true, + }); + pgm.dropConstraint('weather_observations', 'weather_observations_weather_station_id_fkey', { + ifExists: true, + }); + + // ===================================================== + // Step 2: Drop old indexes on FK columns + // ===================================================== + console.log(' Step 2: Dropping old indexes...'); + + // Drop indexes that reference old columns + pgm.dropIndex('weather_forecasts', 'station_id', { ifExists: true }); + pgm.dropIndex('weather_observations', 'weather_station_id', { ifExists: true }); + pgm.dropIndex('user_subscriptions', 'subscription_plan_id', { ifExists: true }); + pgm.dropIndex('subscription_history', 'user_subscription_id', { ifExists: true }); + pgm.dropIndex('payment_records', 'user_subscription_id', { ifExists: true }); + + // ===================================================== + // Step 3: Drop old primary keys + // ===================================================== + console.log(' Step 3: Dropping old primary keys...'); + + const tablesToMigrate = [ + 'analysis_results', + 'weather_stations', + 'weather_forecasts', + 'weather_observations', + 'subscription_plans', + 'user_subscriptions', + 'subscription_history', + 'payment_records', + 'camera_devices', + ]; + + tablesToMigrate.forEach((table) => { + pgm.dropConstraint(table, `${table}_pkey`, { ifExists: true }); + }); + + // ===================================================== + // Step 4: Drop old columns and rename new columns + // ===================================================== + console.log(' Step 4: Dropping old columns and renaming new columns...'); + + // For each table, drop old id and rename new_id to id + tablesToMigrate.forEach((table) => { + // Drop old id column + pgm.dropColumn(table, 'id', { ifExists: true }); + + // Rename new_id to id + pgm.renameColumn(table, 'new_id', 'id'); + + // Make id NOT NULL + pgm.alterColumn(table, 'id', { notNull: true }); + }); + + // Rename FK columns + pgm.dropColumn('weather_forecasts', 'station_id', { ifExists: true }); + pgm.renameColumn('weather_forecasts', 'new_station_id', 'station_id'); + + pgm.dropColumn('weather_observations', 'weather_station_id', { ifExists: true }); + pgm.renameColumn('weather_observations', 'new_weather_station_id', 'weather_station_id'); + + pgm.dropColumn('user_subscriptions', 'subscription_plan_id', { ifExists: true }); + pgm.renameColumn('user_subscriptions', 'new_subscription_plan_id', 'subscription_plan_id'); + + pgm.dropColumn('subscription_history', 'user_subscription_id', { ifExists: true }); + pgm.renameColumn('subscription_history', 'new_user_subscription_id', 'user_subscription_id'); + + pgm.dropColumn('payment_records', 'user_subscription_id', { ifExists: true }); + pgm.renameColumn('payment_records', 'new_user_subscription_id', 'user_subscription_id'); + + // ===================================================== + // Step 5: Add new primary key constraints + // ===================================================== + console.log(' Step 5: Adding new primary key constraints...'); + + tablesToMigrate.forEach((table) => { + pgm.addConstraint(table, `${table}_pkey`, { primaryKey: 'id' }); + }); + + // ===================================================== + // Step 6: Add new foreign key constraints + // ===================================================== + console.log(' Step 6: Adding new foreign key constraints...'); + + pgm.addConstraint('weather_forecasts', 'weather_forecasts_station_id_fkey', { + foreignKeys: { + columns: 'station_id', + references: 'weather_stations(id)', + onDelete: 'CASCADE', + }, + }); + + pgm.addConstraint('weather_observations', 'weather_observations_weather_station_id_fkey', { + foreignKeys: { + columns: 'weather_station_id', + references: 'weather_stations(id)', + onDelete: 'CASCADE', + }, + }); + + pgm.addConstraint('user_subscriptions', 'user_subscriptions_subscription_plan_id_fkey', { + foreignKeys: { + columns: 'subscription_plan_id', + references: 'subscription_plans(id)', + onDelete: 'CASCADE', + }, + }); + + pgm.addConstraint('subscription_history', 'subscription_history_user_subscription_id_fkey', { + foreignKeys: { + columns: 'user_subscription_id', + references: 'user_subscriptions(id)', + onDelete: 'CASCADE', + }, + }); + + pgm.addConstraint('payment_records', 'payment_records_user_subscription_id_fkey', { + foreignKeys: { + columns: 'user_subscription_id', + references: 'user_subscriptions(id)', + onDelete: 'CASCADE', + }, + }); + + // ===================================================== + // Step 7: Recreate indexes on FK columns + // ===================================================== + console.log(' Step 7: Recreating indexes on FK columns...'); + + pgm.createIndex('weather_forecasts', 'station_id', { + name: 'idx_weather_forecasts_station_id', + }); + + pgm.createIndex('weather_observations', 'weather_station_id', { + name: 'idx_weather_observations_station_id', + }); + + pgm.createIndex('user_subscriptions', 'subscription_plan_id', { + name: 'idx_user_subscriptions_plan_id', + }); + + pgm.createIndex('subscription_history', 'user_subscription_id', { + name: 'idx_subscription_history_sub_id', + }); + + pgm.createIndex('payment_records', 'user_subscription_id', { + name: 'idx_payment_records_sub_id', + }); + + // ===================================================== + // Step 8: Clean up temporary indexes + // ===================================================== + console.log(' Step 8: Cleaning up temporary indexes...'); + + tablesToMigrate.forEach((table) => { + pgm.dropIndex(table, [], { name: `idx_${table}_new_id`, ifExists: true }); + }); + + // Drop temporary indexes from Phase 2 + pgm.dropIndex('weather_forecasts', [], { + name: 'idx_weather_forecasts_new_station_id', + ifExists: true, + }); + pgm.dropIndex('weather_observations', [], { + name: 'idx_weather_observations_new_station_id', + ifExists: true, + }); + pgm.dropIndex('user_subscriptions', [], { + name: 'idx_user_subscriptions_new_plan_id', + ifExists: true, + }); + pgm.dropIndex('subscription_history', [], { + name: 'idx_subscription_history_new_sub_id', + ifExists: true, + }); + pgm.dropIndex('payment_records', [], { + name: 'idx_payment_records_new_sub_id', + ifExists: true, + }); + + console.log('Phase 3 complete: All tables now use UUID primary keys.'); + console.log(''); + console.log('IMPORTANT: The _migration_id_mapping table has been preserved for reference.'); + console.log('You may drop it manually after verifying the migration: DROP TABLE _migration_id_mapping;'); +}; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const down = (pgm) => { + console.log('Rolling back Phase 3...'); + console.log('WARNING: This rollback requires the _migration_id_mapping table to exist!'); + + // This rollback is complex and requires the mapping table + // In production, consider using a backup restore instead + + pgm.sql(` + -- Check if mapping table exists + DO $$ + BEGIN + IF NOT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = '_migration_id_mapping') THEN + RAISE EXCEPTION 'Cannot rollback: _migration_id_mapping table does not exist. Restore from backup.'; + END IF; + END $$; + `); + + // The full rollback would be very complex. + // In practice, recommend restoring from backup before Phase 3. + console.log(''); + console.log('ROLLBACK NOTE: For Phase 3, it is recommended to restore from a backup'); + console.log('taken before the migration rather than attempting to reverse the changes.'); + console.log(''); + console.log('If you must rollback, the _migration_id_mapping table contains the'); + console.log('old_id -> new_id mappings needed to reconstruct the serial IDs.'); + + throw new Error( + 'Phase 3 rollback requires manual intervention or backup restore. ' + + 'See migration comments for details.', + ); +}; diff --git a/meteor-web-backend/migrations/1766300000009_camera-device-fk.js b/meteor-web-backend/migrations/1766300000009_camera-device-fk.js new file mode 100644 index 0000000..ce0ae17 --- /dev/null +++ b/meteor-web-backend/migrations/1766300000009_camera-device-fk.js @@ -0,0 +1,114 @@ +/** + * Migration: Add CameraDevice foreign key to devices + * + * Fixes: CameraDevice has device_id as varchar but no FK to devices table + * Changes: + * - Change device_id from varchar(100) to uuid + * - Add FK constraint to devices table + * - Change location from text to jsonb + * + * @type {import('node-pg-migrate').ColumnDefinitions | undefined} + */ +export const shorthands = undefined; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const up = (pgm) => { + console.log('Adding CameraDevice FK to devices...'); + + // Step 1: Add new UUID device_id column + pgm.addColumn('camera_devices', { + device_uuid: { + type: 'uuid', + notNull: false, + comment: 'Foreign key to devices table', + }, + }); + + // Step 2: Try to migrate existing data if device_id matches a device + // Note: This assumes device_id contains hardware_id or similar identifier + pgm.sql(` + -- Try to match existing device_id to devices by hardware_id + UPDATE camera_devices cd + SET device_uuid = d.id + FROM devices d + WHERE cd.device_id = d.hardware_id + OR cd.device_id = d.id::text; + `); + + // Step 3: Rename old device_id to legacy_device_id + pgm.renameColumn('camera_devices', 'device_id', 'legacy_device_id'); + + // Step 4: Rename device_uuid to device_id + pgm.renameColumn('camera_devices', 'device_uuid', 'device_id'); + + // Step 5: Add FK constraint (nullable for cameras without matching device) + pgm.addConstraint('camera_devices', 'camera_devices_device_id_fkey', { + foreignKeys: { + columns: 'device_id', + references: 'devices(id)', + onDelete: 'SET NULL', + }, + }); + + // Step 6: Add index on device_id + pgm.createIndex('camera_devices', 'device_id', { + name: 'idx_camera_devices_device_id', + where: 'device_id IS NOT NULL', + }); + + // Step 7: Convert location from text to jsonb + pgm.addColumn('camera_devices', { + location_jsonb: { + type: 'jsonb', + notNull: false, + comment: 'Structured location data {latitude, longitude, altitude, site_name}', + }, + }); + + // Migrate existing location text to jsonb + pgm.sql(` + UPDATE camera_devices + SET location_jsonb = jsonb_build_object('site_name', location) + WHERE location IS NOT NULL AND location != ''; + `); + + // Rename columns + pgm.renameColumn('camera_devices', 'location', 'legacy_location'); + pgm.renameColumn('camera_devices', 'location_jsonb', 'location'); + + console.log('CameraDevice FK and location migration complete.'); +}; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const down = (pgm) => { + console.log('Rolling back CameraDevice FK changes...'); + + // Restore location + pgm.renameColumn('camera_devices', 'location', 'location_jsonb'); + pgm.renameColumn('camera_devices', 'legacy_location', 'location'); + pgm.dropColumn('camera_devices', 'location_jsonb', { ifExists: true }); + + // Restore device_id + pgm.dropIndex('camera_devices', 'device_id', { + name: 'idx_camera_devices_device_id', + ifExists: true, + }); + + pgm.dropConstraint('camera_devices', 'camera_devices_device_id_fkey', { + ifExists: true, + }); + + pgm.renameColumn('camera_devices', 'device_id', 'device_uuid'); + pgm.renameColumn('camera_devices', 'legacy_device_id', 'device_id'); + pgm.dropColumn('camera_devices', 'device_uuid', { ifExists: true }); + + console.log('CameraDevice FK rollback complete.'); +}; diff --git a/meteor-web-backend/migrations/1766300000010_validated-event-station-fk.js b/meteor-web-backend/migrations/1766300000010_validated-event-station-fk.js new file mode 100644 index 0000000..c507660 --- /dev/null +++ b/meteor-web-backend/migrations/1766300000010_validated-event-station-fk.js @@ -0,0 +1,95 @@ +/** + * Migration: Add ValidatedEvent weather_station_id FK + * + * Fixes: ValidatedEvent has station_name as varchar instead of FK to WeatherStation + * Changes: + * - Add weather_station_id UUID FK + * - Migrate data by matching station_name + * - Keep station_name for historical reference + * + * @type {import('node-pg-migrate').ColumnDefinitions | undefined} + */ +export const shorthands = undefined; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const up = (pgm) => { + console.log('Adding ValidatedEvent weather_station_id FK...'); + + // Step 1: Add weather_station_id column + pgm.addColumn('validated_events', { + weather_station_id: { + type: 'uuid', + notNull: false, + comment: 'Foreign key to weather_stations table', + }, + }); + + // Step 2: Migrate data by matching station_name + pgm.sql(` + UPDATE validated_events ve + SET weather_station_id = ws.id + FROM weather_stations ws + WHERE ve.station_name IS NOT NULL + AND ve.station_name != '' + AND LOWER(TRIM(ve.station_name)) = LOWER(TRIM(ws.station_name)); + `); + + // Step 3: Add FK constraint + pgm.addConstraint('validated_events', 'validated_events_weather_station_id_fkey', { + foreignKeys: { + columns: 'weather_station_id', + references: 'weather_stations(id)', + onDelete: 'SET NULL', + }, + }); + + // Step 4: Add index + pgm.createIndex('validated_events', 'weather_station_id', { + name: 'idx_validated_events_weather_station_id', + where: 'weather_station_id IS NOT NULL', + }); + + // Step 5: Add comment to station_name indicating it's for historical reference + pgm.sql(` + COMMENT ON COLUMN validated_events.station_name IS + 'Historical station name (kept for reference). Use weather_station_id for queries.'; + `); + + console.log('ValidatedEvent weather_station_id FK added.'); + console.log('Note: station_name column preserved for historical reference.'); +}; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const down = (pgm) => { + console.log('Rolling back ValidatedEvent weather_station_id FK...'); + + // Remove index + pgm.dropIndex('validated_events', 'weather_station_id', { + name: 'idx_validated_events_weather_station_id', + ifExists: true, + }); + + // Remove FK constraint + pgm.dropConstraint('validated_events', 'validated_events_weather_station_id_fkey', { + ifExists: true, + }); + + // Remove column + pgm.dropColumn('validated_events', 'weather_station_id', { ifExists: true }); + + // Restore station_name comment + pgm.sql(` + COMMENT ON COLUMN validated_events.station_name IS + 'Name of the weather station associated with this event'; + `); + + console.log('ValidatedEvent weather_station_id FK rollback complete.'); +}; diff --git a/meteor-web-backend/migrations/1766300000011_analysis-result-relations.js b/meteor-web-backend/migrations/1766300000011_analysis-result-relations.js new file mode 100644 index 0000000..fc40e49 --- /dev/null +++ b/meteor-web-backend/migrations/1766300000011_analysis-result-relations.js @@ -0,0 +1,150 @@ +/** + * Migration: Add AnalysisResult relations + * + * Fixes: AnalysisResult is completely orphaned with no relationships + * Changes: + * - Add validated_event_id FK (nullable) + * - Add device_id FK (nullable) + * - Add user_profile_id FK (nullable) + * + * @type {import('node-pg-migrate').ColumnDefinitions | undefined} + */ +export const shorthands = undefined; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const up = (pgm) => { + console.log('Adding AnalysisResult relations...'); + + // Add relationship columns + pgm.addColumns('analysis_results', { + validated_event_id: { + type: 'uuid', + notNull: false, + comment: 'FK to validated_events - the event this analysis is for', + }, + device_id: { + type: 'uuid', + notNull: false, + comment: 'FK to devices - the device that captured the analyzed event', + }, + user_profile_id: { + type: 'uuid', + notNull: false, + comment: 'FK to user_profiles - the owner of the analyzed event', + }, + raw_event_id: { + type: 'uuid', + notNull: false, + comment: 'FK to raw_events - the original raw event', + }, + }); + + // Add FK constraints + pgm.addConstraint('analysis_results', 'analysis_results_validated_event_id_fkey', { + foreignKeys: { + columns: 'validated_event_id', + references: 'validated_events(id)', + onDelete: 'SET NULL', + }, + }); + + pgm.addConstraint('analysis_results', 'analysis_results_device_id_fkey', { + foreignKeys: { + columns: 'device_id', + references: 'devices(id)', + onDelete: 'SET NULL', + }, + }); + + pgm.addConstraint('analysis_results', 'analysis_results_user_profile_id_fkey', { + foreignKeys: { + columns: 'user_profile_id', + references: 'user_profiles(id)', + onDelete: 'SET NULL', + }, + }); + + pgm.addConstraint('analysis_results', 'analysis_results_raw_event_id_fkey', { + foreignKeys: { + columns: 'raw_event_id', + references: 'raw_events(id)', + onDelete: 'SET NULL', + }, + }); + + // Add indexes + pgm.createIndex('analysis_results', 'validated_event_id', { + name: 'idx_analysis_results_validated_event_id', + where: 'validated_event_id IS NOT NULL', + }); + + pgm.createIndex('analysis_results', 'device_id', { + name: 'idx_analysis_results_device_id', + where: 'device_id IS NOT NULL', + }); + + pgm.createIndex('analysis_results', 'user_profile_id', { + name: 'idx_analysis_results_user_profile_id', + where: 'user_profile_id IS NOT NULL', + }); + + pgm.createIndex('analysis_results', 'raw_event_id', { + name: 'idx_analysis_results_raw_event_id', + where: 'raw_event_id IS NOT NULL', + }); + + // Add composite index for common query pattern + pgm.createIndex('analysis_results', ['analysis_type', 'user_profile_id'], { + name: 'idx_analysis_results_type_user', + }); + + console.log('AnalysisResult relations added.'); +}; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const down = (pgm) => { + console.log('Rolling back AnalysisResult relations...'); + + // Drop indexes + const indexes = [ + 'idx_analysis_results_validated_event_id', + 'idx_analysis_results_device_id', + 'idx_analysis_results_user_profile_id', + 'idx_analysis_results_raw_event_id', + 'idx_analysis_results_type_user', + ]; + + indexes.forEach((name) => { + pgm.dropIndex('analysis_results', [], { name, ifExists: true }); + }); + + // Drop FK constraints + const constraints = [ + 'analysis_results_validated_event_id_fkey', + 'analysis_results_device_id_fkey', + 'analysis_results_user_profile_id_fkey', + 'analysis_results_raw_event_id_fkey', + ]; + + constraints.forEach((name) => { + pgm.dropConstraint('analysis_results', name, { ifExists: true }); + }); + + // Drop columns + pgm.dropColumns('analysis_results', [ + 'validated_event_id', + 'device_id', + 'user_profile_id', + 'raw_event_id', + ]); + + console.log('AnalysisResult relations rollback complete.'); +}; diff --git a/meteor-web-backend/migrations/1766300000012_inventory-device-link.js b/meteor-web-backend/migrations/1766300000012_inventory-device-link.js new file mode 100644 index 0000000..11810ef --- /dev/null +++ b/meteor-web-backend/migrations/1766300000012_inventory-device-link.js @@ -0,0 +1,89 @@ +/** + * Migration: Link Device to InventoryDevice + * + * Fixes: InventoryDevice and Device have no explicit relationship + * Changes: + * - Add inventory_device_id FK to devices table + * - Migrate existing data by matching hardware_id + * + * @type {import('node-pg-migrate').ColumnDefinitions | undefined} + */ +export const shorthands = undefined; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const up = (pgm) => { + console.log('Linking Device to InventoryDevice...'); + + // Add inventory_device_id column + pgm.addColumn('devices', { + inventory_device_id: { + type: 'uuid', + notNull: false, + comment: 'FK to inventory_devices - the inventory record for this device', + }, + }); + + // Migrate data by matching hardware_id + pgm.sql(` + UPDATE devices d + SET inventory_device_id = inv.id + FROM inventory_devices inv + WHERE d.hardware_id = inv.hardware_id; + `); + + // Add FK constraint + pgm.addConstraint('devices', 'devices_inventory_device_id_fkey', { + foreignKeys: { + columns: 'inventory_device_id', + references: 'inventory_devices(id)', + onDelete: 'SET NULL', + }, + }); + + // Add unique constraint - one device per inventory record + pgm.createIndex('devices', 'inventory_device_id', { + name: 'idx_devices_inventory_device_id_unique', + unique: true, + where: 'inventory_device_id IS NOT NULL', + }); + + // Update inventory_devices.is_claimed based on linked devices + pgm.sql(` + UPDATE inventory_devices inv + SET is_claimed = true + WHERE EXISTS ( + SELECT 1 FROM devices d WHERE d.inventory_device_id = inv.id + ); + `); + + console.log('Device to InventoryDevice link added.'); +}; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const down = (pgm) => { + console.log('Rolling back Device to InventoryDevice link...'); + + // Drop unique index + pgm.dropIndex('devices', 'inventory_device_id', { + name: 'idx_devices_inventory_device_id_unique', + ifExists: true, + }); + + // Drop FK constraint + pgm.dropConstraint('devices', 'devices_inventory_device_id_fkey', { + ifExists: true, + }); + + // Drop column + pgm.dropColumn('devices', 'inventory_device_id', { ifExists: true }); + + console.log('Device to InventoryDevice link rollback complete.'); +}; diff --git a/meteor-web-backend/migrations/1766300000013_encrypt-private-keys.js b/meteor-web-backend/migrations/1766300000013_encrypt-private-keys.js new file mode 100644 index 0000000..a483c31 --- /dev/null +++ b/meteor-web-backend/migrations/1766300000013_encrypt-private-keys.js @@ -0,0 +1,155 @@ +/** + * Migration: Add encrypted storage for private keys + * + * Fixes: DeviceCertificate.privateKeyPem stored in plaintext - security risk + * Changes: + * - Add encrypted_private_key column for AES-256-GCM encrypted storage + * - Add encryption_key_id for key management + * - Add encryption_algorithm field + * - Mark private_key_pem as deprecated (not removed for backwards compatibility) + * + * Note: Actual encryption/decryption should be handled at application level. + * This migration only sets up the schema for encrypted storage. + * + * @type {import('node-pg-migrate').ColumnDefinitions | undefined} + */ +export const shorthands = undefined; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const up = (pgm) => { + console.log('Adding encrypted private key storage...'); + + // Add encryption-related columns + pgm.addColumns('device_certificates', { + encrypted_private_key: { + type: 'text', + notNull: false, + comment: 'AES-256-GCM encrypted private key (base64 encoded ciphertext + IV + auth tag)', + }, + encryption_key_id: { + type: 'varchar(100)', + notNull: false, + comment: 'Identifier for the encryption key used (for key rotation support)', + }, + encryption_algorithm: { + type: 'varchar(50)', + notNull: false, + default: 'AES-256-GCM', + comment: 'Encryption algorithm used', + }, + encrypted_at: { + type: 'timestamptz', + notNull: false, + comment: 'Timestamp when the private key was encrypted', + }, + }); + + // Add deprecation comment to old column + pgm.sql(` + COMMENT ON COLUMN device_certificates.private_key_pem IS + 'DEPRECATED: Plaintext private key. Will be removed in future version. + Use encrypted_private_key instead for secure storage. + This column is kept for backwards compatibility during migration.'; + `); + + // Add index on encryption_key_id for key rotation queries + pgm.createIndex('device_certificates', 'encryption_key_id', { + name: 'idx_device_certificates_encryption_key_id', + where: 'encryption_key_id IS NOT NULL', + }); + + // Add CHECK constraint for encryption algorithm + pgm.addConstraint('device_certificates', 'chk_device_certificates_encryption_algorithm', { + check: "encryption_algorithm IS NULL OR encryption_algorithm IN ('AES-256-GCM', 'AES-256-CBC', 'ChaCha20-Poly1305')", + }); + + // Create a view that excludes private key data for safer queries + pgm.sql(` + CREATE OR REPLACE VIEW device_certificates_public AS + SELECT + id, + device_id, + serial_number, + fingerprint, + certificate_type, + status, + subject_dn, + issuer_dn, + certificate_pem, + public_key_pem, + key_algorithm, + key_size, + signature_algorithm, + issued_at, + expires_at, + revoked_at, + revocation_reason, + x509_extensions, + usage_count, + last_used_at, + renewal_notified_at, + created_at, + updated_at, + deleted_at, + -- Indicate if encrypted key exists without exposing it + CASE WHEN encrypted_private_key IS NOT NULL THEN true ELSE false END AS has_encrypted_key, + encryption_key_id, + encryption_algorithm, + encrypted_at + FROM device_certificates; + + COMMENT ON VIEW device_certificates_public IS + 'Safe view of device_certificates excluding private key data'; + `); + + console.log('Encrypted private key storage added.'); + console.log(''); + console.log('IMPORTANT: To complete the migration:'); + console.log('1. Update application code to use encrypted_private_key'); + console.log('2. Configure encryption key in environment'); + console.log('3. Run data migration to encrypt existing private keys'); + console.log('4. Once verified, clear private_key_pem column'); +}; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const down = (pgm) => { + console.log('Rolling back encrypted private key storage...'); + + // Drop view + pgm.sql('DROP VIEW IF EXISTS device_certificates_public;'); + + // Drop constraint + pgm.dropConstraint('device_certificates', 'chk_device_certificates_encryption_algorithm', { + ifExists: true, + }); + + // Drop index + pgm.dropIndex('device_certificates', 'encryption_key_id', { + name: 'idx_device_certificates_encryption_key_id', + ifExists: true, + }); + + // Drop columns + pgm.dropColumns('device_certificates', [ + 'encrypted_private_key', + 'encryption_key_id', + 'encryption_algorithm', + 'encrypted_at', + ]); + + // Restore original comment + pgm.sql(` + COMMENT ON COLUMN device_certificates.private_key_pem IS + 'Private key in PEM format (optional, for key recovery)'; + `); + + console.log('Encrypted private key storage rollback complete.'); +}; diff --git a/meteor-web-backend/migrations/1766300000014_add-audit-fields.js b/meteor-web-backend/migrations/1766300000014_add-audit-fields.js new file mode 100644 index 0000000..473a2c2 --- /dev/null +++ b/meteor-web-backend/migrations/1766300000014_add-audit-fields.js @@ -0,0 +1,148 @@ +/** + * Migration: Add audit fields for tracking who made changes + * + * Fixes: No created_by/updated_by tracking on important tables + * Changes: + * - Add created_by and updated_by to key tables + * - These are nullable to support system-generated records + * + * @type {import('node-pg-migrate').ColumnDefinitions | undefined} + */ +export const shorthands = undefined; + +/** + * Tables that should have audit fields + */ +const TABLES_WITH_AUDIT_FIELDS = [ + 'devices', + 'device_configurations', + 'device_certificates', + 'subscription_plans', + 'validated_events', + 'analysis_results', + 'weather_stations', +]; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const up = (pgm) => { + console.log('Adding audit fields to key tables...'); + + TABLES_WITH_AUDIT_FIELDS.forEach((table) => { + console.log(` Adding audit fields to ${table}...`); + + pgm.addColumns(table, { + created_by: { + type: 'uuid', + notNull: false, + comment: 'User who created this record (null for system-generated)', + }, + updated_by: { + type: 'uuid', + notNull: false, + comment: 'User who last updated this record (null for system updates)', + }, + }); + + // Add FK constraints to user_profiles + pgm.addConstraint(table, `${table}_created_by_fkey`, { + foreignKeys: { + columns: 'created_by', + references: 'user_profiles(id)', + onDelete: 'SET NULL', + }, + }); + + pgm.addConstraint(table, `${table}_updated_by_fkey`, { + foreignKeys: { + columns: 'updated_by', + references: 'user_profiles(id)', + onDelete: 'SET NULL', + }, + }); + }); + + // Create a function to automatically set updated_by from context + // This works with the application setting a session variable + pgm.sql(` + -- Function to get current user from session context + CREATE OR REPLACE FUNCTION get_current_user_id() + RETURNS UUID AS $$ + BEGIN + -- Try to get user_id from session context (set by application) + RETURN NULLIF(current_setting('app.current_user_id', true), '')::UUID; + EXCEPTION + WHEN OTHERS THEN + RETURN NULL; + END; + $$ LANGUAGE plpgsql STABLE; + + COMMENT ON FUNCTION get_current_user_id() IS + 'Get current user ID from session context. Returns NULL if not set. + Application should call: SET LOCAL app.current_user_id = ''''; before operations.'; + `); + + // Create triggers to automatically set audit fields + TABLES_WITH_AUDIT_FIELDS.forEach((table) => { + pgm.sql(` + CREATE OR REPLACE FUNCTION ${table}_audit_trigger() + RETURNS TRIGGER AS $$ + BEGIN + IF TG_OP = 'INSERT' THEN + NEW.created_by = COALESCE(NEW.created_by, get_current_user_id()); + NEW.updated_by = NEW.created_by; + ELSIF TG_OP = 'UPDATE' THEN + NEW.updated_by = COALESCE(get_current_user_id(), NEW.updated_by); + END IF; + RETURN NEW; + END; + $$ LANGUAGE plpgsql; + + DROP TRIGGER IF EXISTS ${table}_audit ON ${table}; + CREATE TRIGGER ${table}_audit + BEFORE INSERT OR UPDATE ON ${table} + FOR EACH ROW + EXECUTE FUNCTION ${table}_audit_trigger(); + `); + }); + + console.log('Audit fields added to key tables.'); + console.log(''); + console.log('Usage: Set session context before operations:'); + console.log(" SET LOCAL app.current_user_id = '';"); +}; + +/** + * @param pgm {import('node-pg-migrate').MigrationBuilder} + * @param run {() => void | undefined} + * @returns {Promise | void} + */ +export const down = (pgm) => { + console.log('Rolling back audit fields...'); + + // Drop triggers and functions + TABLES_WITH_AUDIT_FIELDS.forEach((table) => { + pgm.sql(` + DROP TRIGGER IF EXISTS ${table}_audit ON ${table}; + DROP FUNCTION IF EXISTS ${table}_audit_trigger(); + `); + }); + + // Drop the helper function + pgm.sql('DROP FUNCTION IF EXISTS get_current_user_id();'); + + // Drop FK constraints and columns + TABLES_WITH_AUDIT_FIELDS.forEach((table) => { + console.log(` Removing audit fields from ${table}...`); + + pgm.dropConstraint(table, `${table}_created_by_fkey`, { ifExists: true }); + pgm.dropConstraint(table, `${table}_updated_by_fkey`, { ifExists: true }); + + pgm.dropColumns(table, ['created_by', 'updated_by']); + }); + + console.log('Audit fields rollback complete.'); +}; diff --git a/meteor-web-backend/package.json b/meteor-web-backend/package.json index fdc8cfd..906a17f 100644 --- a/meteor-web-backend/package.json +++ b/meteor-web-backend/package.json @@ -8,8 +8,8 @@ "scripts": { "build": "nest build", "format": "prettier --write \"src/**/*.ts\" \"test/**/*.ts\"", - "start": "nest start", - "start:dev": "nest start --watch", + "start": "NODE_TLS_REJECT_UNAUTHORIZED=0 nest start", + "start:dev": "NODE_TLS_REJECT_UNAUTHORIZED=0 nest start --watch", "start:debug": "nest start --debug --watch", "start:prod": "node dist/main", "lint": "eslint \"{src,apps,libs,test}/**/*.ts\" --fix", @@ -19,8 +19,8 @@ "test:debug": "node --inspect-brk -r tsconfig-paths/register -r ts-node/register node_modules/.bin/jest --runInBand", "test:e2e": "jest --config ./test/jest-e2e.json", "test:integration": "TEST_DATABASE_URL=postgresql://meteor_test:meteor_test_pass@localhost:5433/meteor_test jest --config ./test/jest-e2e.json --testPathPattern=integration", - "migrate:up": "node-pg-migrate up", - "migrate:down": "node-pg-migrate down", + "migrate:up": "NODE_TLS_REJECT_UNAUTHORIZED=0 node-pg-migrate up", + "migrate:down": "NODE_TLS_REJECT_UNAUTHORIZED=0 node-pg-migrate down", "migrate:create": "node-pg-migrate create" }, "dependencies": { @@ -40,6 +40,7 @@ "@nestjs/throttler": "^6.4.0", "@nestjs/typeorm": "^11.0.0", "@nestjs/websockets": "^11.1.6", + "@supabase/supabase-js": "^2.89.0", "@types/bcrypt": "^6.0.0", "@types/node-forge": "^1.3.13", "@types/passport-jwt": "^4.0.1", diff --git a/meteor-web-backend/scripts/check-source-db.ts b/meteor-web-backend/scripts/check-source-db.ts new file mode 100644 index 0000000..c3c52ef --- /dev/null +++ b/meteor-web-backend/scripts/check-source-db.ts @@ -0,0 +1,70 @@ +import { Client } from 'pg'; + +const SOURCE_DATABASE_URL = 'postgresql://rabbit:g39j90p11@10.85.92.236:5433/dev'; + +async function checkSourceDatabase() { + const client = new Client({ + connectionString: SOURCE_DATABASE_URL, + }); + + try { + console.log('Connecting to source database...'); + await client.connect(); + console.log('Connected successfully!\n'); + + const tables = [ + 'user_profiles', + 'user_identities', + 'inventory_devices', + 'devices', + 'device_registrations', + 'device_certificates', + 'device_configurations', + 'device_security_events', + 'camera_devices', + 'raw_events', + 'validated_events', + 'analysis_results', + 'subscription_plans', + 'user_subscriptions', + 'subscription_history', + 'payment_records', + 'weather_stations', + 'weather_observations', + 'weather_forecasts', + ]; + + console.log('Table counts in source database:'); + console.log('================================'); + + let totalRows = 0; + for (const table of tables) { + try { + const result = await client.query(`SELECT count(*) FROM ${table}`); + const count = parseInt(result.rows[0].count, 10); + console.log(`${table.padEnd(25)} : ${count}`); + totalRows += count; + } catch (error: any) { + console.log(`${table.padEnd(25)} : ERROR - ${error.message}`); + } + } + + console.log('================================'); + console.log(`Total rows: ${totalRows}`); + + // Check user_identities details + console.log('\n\nUser identities with email provider:'); + const emailUsers = await client.query( + "SELECT email FROM user_identities WHERE provider = 'email'" + ); + console.log(`Found ${emailUsers.rows.length} email users:`); + emailUsers.rows.forEach((row) => console.log(` - ${row.email}`)); + + } catch (error) { + console.error('Error:', error); + } finally { + await client.end(); + } +} + +checkSourceDatabase(); diff --git a/meteor-web-backend/scripts/create-supabase-users.ts b/meteor-web-backend/scripts/create-supabase-users.ts new file mode 100644 index 0000000..1fd3b57 --- /dev/null +++ b/meteor-web-backend/scripts/create-supabase-users.ts @@ -0,0 +1,167 @@ +import { createClient } from '@supabase/supabase-js'; +import { Client, ClientConfig } from 'pg'; +import * as dotenv from 'dotenv'; + +// Load environment variables from .env file +dotenv.config(); + +const SUPABASE_URL = process.env.SUPABASE_URL || 'https://ffbgowwvcqmdtvvabmnh.supabase.co'; +// Try SUPABASE_SERVICE_ROLE_KEY first, then fall back to SUPABASE_SECRET_KEY +const SUPABASE_SERVICE_ROLE_KEY = process.env.SUPABASE_SERVICE_ROLE_KEY || process.env.SUPABASE_SECRET_KEY || ''; + +// Target database configuration (Supabase) +const targetConfig: ClientConfig = { + host: 'aws-1-us-east-1.pooler.supabase.com', + port: 6543, + database: 'postgres', + user: 'postgres.ffbgowwvcqmdtvvabmnh', + password: '!a_KW.-6Grb-X?#', + ssl: { rejectUnauthorized: false }, +}; + +interface UserIdentity { + id: string; + user_profile_id: string; + provider: string; + email: string; +} + +async function createSupabaseUsers() { + if (!SUPABASE_SERVICE_ROLE_KEY) { + console.error('Error: SUPABASE_SERVICE_ROLE_KEY environment variable is required'); + console.log(''); + console.log('To get your service role key:'); + console.log('1. Go to https://supabase.com/dashboard/project/ffbgowwvcqmdtvvabmnh/settings/api'); + console.log('2. Copy the "service_role" key (under "Project API keys")'); + console.log('3. Run: SUPABASE_SERVICE_ROLE_KEY="your-key" npx tsx scripts/create-supabase-users.ts'); + process.exit(1); + } + + const supabase = createClient(SUPABASE_URL, SUPABASE_SERVICE_ROLE_KEY, { + auth: { + autoRefreshToken: false, + persistSession: false, + }, + }); + + const pgClient = new Client(targetConfig); + + try { + console.log('Connecting to Supabase database...'); + await pgClient.connect(); + console.log('Connected!'); + + // Get all email users from user_identities + const result = await pgClient.query(` + SELECT id, user_profile_id, provider, email + FROM user_identities + WHERE provider = 'email' + `); + + console.log(`\nFound ${result.rows.length} email users to migrate:\n`); + + const results: { email: string; success: boolean; error?: string; authUserId?: string }[] = []; + + for (const user of result.rows) { + console.log(`Processing: ${user.email}`); + + try { + // Check if user already exists in Supabase Auth + const { data: existingUsers } = await supabase.auth.admin.listUsers(); + const existingUser = existingUsers?.users.find((u) => u.email === user.email); + + let authUserId: string; + + if (existingUser) { + console.log(` User already exists in Supabase Auth: ${existingUser.id}`); + authUserId = existingUser.id; + } else { + // Create new user with a temporary password + // Users will need to reset their password + const tempPassword = `Temp${Math.random().toString(36).slice(2)}!${Date.now()}`; + + const { data: newUser, error: createError } = await supabase.auth.admin.createUser({ + email: user.email, + password: tempPassword, + email_confirm: true, // Auto-confirm email + }); + + if (createError) { + throw new Error(createError.message); + } + + if (!newUser?.user) { + throw new Error('No user returned from createUser'); + } + + authUserId = newUser.user.id; + console.log(` Created new Supabase Auth user: ${authUserId}`); + } + + // Update user_profiles with supabase_user_id + await pgClient.query( + `UPDATE user_profiles SET supabase_user_id = $1 WHERE id = $2`, + [authUserId, user.user_profile_id] + ); + console.log(` Updated user_profiles.supabase_user_id`); + + // Update user_identities with provider_id + await pgClient.query( + `UPDATE user_identities SET provider_id = $1 WHERE id = $2`, + [authUserId, user.id] + ); + console.log(` Updated user_identities.provider_id`); + + // Generate password reset link (optional - for sending to users) + const { data: resetLink, error: resetError } = await supabase.auth.admin.generateLink({ + type: 'recovery', + email: user.email, + }); + + if (resetLink && !resetError) { + console.log(` Password reset link generated`); + } + + results.push({ email: user.email, success: true, authUserId }); + console.log(` ✓ Completed\n`); + + } catch (error: any) { + console.log(` ✗ Failed: ${error.message}\n`); + results.push({ email: user.email, success: false, error: error.message }); + } + } + + // Print summary + console.log('\n========================================'); + console.log('Supabase Auth User Creation Summary:'); + console.log('========================================'); + + const successful = results.filter((r) => r.success); + const failed = results.filter((r) => !r.success); + + console.log(`\nSuccessful: ${successful.length}`); + for (const r of successful) { + console.log(` ✓ ${r.email} -> ${r.authUserId}`); + } + + if (failed.length > 0) { + console.log(`\nFailed: ${failed.length}`); + for (const r of failed) { + console.log(` ✗ ${r.email}: ${r.error}`); + } + } + + console.log('\n========================================'); + console.log('\nNote: All users have been created with temporary passwords.'); + console.log('They will need to use "Forgot Password" to set a new password.'); + console.log('========================================\n'); + + } catch (error: any) { + console.error('Error:', error.message); + throw error; + } finally { + await pgClient.end(); + } +} + +createSupabaseUsers().catch(console.error); diff --git a/meteor-web-backend/scripts/migrate-data.ts b/meteor-web-backend/scripts/migrate-data.ts new file mode 100644 index 0000000..3a10d7d --- /dev/null +++ b/meteor-web-backend/scripts/migrate-data.ts @@ -0,0 +1,266 @@ +import { Client, ClientConfig } from 'pg'; + +// Source database configuration +const sourceConfig: ClientConfig = { + host: '10.85.92.236', + port: 5433, + database: 'dev', + user: 'rabbit', + password: 'g39j90p11', +}; + +// Target database configuration (Supabase) +const targetConfig: ClientConfig = { + host: 'aws-1-us-east-1.pooler.supabase.com', + port: 6543, + database: 'postgres', + user: 'postgres.ffbgowwvcqmdtvvabmnh', + password: '!a_KW.-6Grb-X?#', + ssl: { rejectUnauthorized: false }, +}; + +// Tables in dependency order (tables with no dependencies first) +const MIGRATION_ORDER = [ + // Layer 1: No foreign key dependencies + 'user_profiles', + 'inventory_devices', + 'subscription_plans', + 'weather_stations', + 'camera_devices', + 'analysis_results', // Independent table + + // Layer 2: Depends on Layer 1 + 'user_identities', // -> user_profiles + 'devices', // -> user_profiles + 'user_subscriptions', // -> user_profiles, subscription_plans + 'weather_observations', // -> weather_stations + 'weather_forecasts', // -> weather_stations + + // Layer 3: Depends on Layer 2 + 'device_registrations', // -> user_profiles, devices + 'device_certificates', // -> devices + 'device_configurations', // -> devices + 'device_security_events', // -> devices + 'raw_events', // -> devices, user_profiles + 'subscription_history', // -> user_subscriptions + 'payment_records', // -> user_subscriptions + + // Layer 4: Depends on Layer 3 + 'validated_events', // -> raw_events, devices, user_profiles +]; + +async function getTableColumns(client: Client, tableName: string): Promise { + const result = await client.query(` + SELECT column_name + FROM information_schema.columns + WHERE table_name = $1 AND table_schema = 'public' + ORDER BY ordinal_position + `, [tableName]); + return result.rows.map(row => row.column_name); +} + +async function getJsonColumns(client: Client, tableName: string): Promise { + const result = await client.query(` + SELECT column_name + FROM information_schema.columns + WHERE table_name = $1 AND table_schema = 'public' + AND data_type IN ('json', 'jsonb') + `, [tableName]); + return result.rows.map(row => row.column_name); +} + +// Column name mappings: source column -> target column +const COLUMN_MAPPINGS: Record> = { + weather_forecasts: { + weather_station_id: 'station_id', + }, +}; + +async function migrateTable( + sourceClient: Client, + targetClient: Client, + tableName: string +): Promise { + console.log(`\nMigrating table: ${tableName}`); + + // Get source data + const sourceData = await sourceClient.query(`SELECT * FROM ${tableName}`); + const rowCount = sourceData.rows.length; + + if (rowCount === 0) { + console.log(` No data to migrate`); + return 0; + } + + console.log(` Found ${rowCount} rows`); + + // Get target table columns to ensure we only insert columns that exist + const targetColumns = await getTableColumns(targetClient, tableName); + + if (targetColumns.length === 0) { + console.log(` WARNING: Table ${tableName} does not exist in target database, skipping`); + return 0; + } + + // Get column mappings for this table + const columnMapping = COLUMN_MAPPINGS[tableName] || {}; + const reverseMapping: Record = {}; + for (const [source, target] of Object.entries(columnMapping)) { + reverseMapping[target] = source; + } + + // Filter source columns to only those that exist in target (with mapping) + const sourceColumns = Object.keys(sourceData.rows[0] || {}); + const columnsToMigrate: { source: string; target: string }[] = []; + + for (const sourceCol of sourceColumns) { + const targetCol = columnMapping[sourceCol] || sourceCol; + if (targetColumns.includes(targetCol)) { + columnsToMigrate.push({ source: sourceCol, target: targetCol }); + } + } + + if (columnsToMigrate.length === 0) { + console.log(` WARNING: No common columns found, skipping`); + return 0; + } + + // Get JSON/JSONB columns for proper serialization + const jsonColumns = await getJsonColumns(targetClient, tableName); + if (jsonColumns.length > 0) { + console.log(` JSON columns: ${jsonColumns.join(', ')}`); + } + + const targetColumnNames = columnsToMigrate.map(c => c.target); + console.log(` Migrating columns: ${targetColumnNames.join(', ')}`); + + // Clear existing data in target table + await targetClient.query(`DELETE FROM ${tableName}`); + console.log(` Cleared existing data in target`); + + // Build insert query with parameterized placeholders + const columnList = targetColumnNames.join(', '); + + let insertedCount = 0; + for (const row of sourceData.rows) { + // Serialize JSON columns properly, read from source column name + const values = columnsToMigrate.map(({ source, target }) => { + const value = row[source]; + if (jsonColumns.includes(target) && value !== null && typeof value === 'object') { + return JSON.stringify(value); + } + return value; + }); + const placeholders = columnsToMigrate.map((_, i) => `$${i + 1}`).join(', '); + + try { + await targetClient.query( + `INSERT INTO ${tableName} (${columnList}) VALUES (${placeholders})`, + values + ); + insertedCount++; + } catch (error: any) { + console.error(` Error inserting row: ${error.message}`); + console.error(` Row data: ${JSON.stringify(row)}`); + } + } + + console.log(` Successfully inserted ${insertedCount}/${rowCount} rows`); + return insertedCount; +} + +async function resetSequences(client: Client, tableName: string) { + try { + // Find sequence for this table + const result = await client.query(` + SELECT column_name, column_default + FROM information_schema.columns + WHERE table_name = $1 + AND table_schema = 'public' + AND column_default LIKE 'nextval%' + `, [tableName]); + + for (const row of result.rows) { + const columnName = row.column_name; + // Get max value and reset sequence + const maxResult = await client.query(`SELECT MAX(${columnName}) as max_val FROM ${tableName}`); + const maxVal = maxResult.rows[0].max_val; + + if (maxVal !== null) { + // Extract sequence name from column_default + const match = row.column_default.match(/nextval\('([^']+)'/); + if (match) { + const sequenceName = match[1]; + await client.query(`SELECT setval('${sequenceName}', $1, true)`, [maxVal]); + console.log(` Reset sequence ${sequenceName} to ${maxVal}`); + } + } + } + } catch (error: any) { + // Ignore errors - some tables may not have sequences + } +} + +async function migrateData() { + const sourceClient = new Client(sourceConfig); + const targetClient = new Client(targetConfig); + + try { + console.log('Connecting to source database...'); + await sourceClient.connect(); + console.log('Connected to source database!'); + + console.log('Connecting to target database (Supabase)...'); + await targetClient.connect(); + console.log('Connected to target database!'); + + // Disable triggers temporarily + console.log('\nDisabling triggers...'); + await targetClient.query('SET session_replication_role = replica'); + + let totalMigrated = 0; + const results: { table: string; count: number }[] = []; + + for (const tableName of MIGRATION_ORDER) { + try { + const count = await migrateTable(sourceClient, targetClient, tableName); + results.push({ table: tableName, count }); + totalMigrated += count; + } catch (error: any) { + console.error(`Error migrating ${tableName}: ${error.message}`); + results.push({ table: tableName, count: -1 }); + } + } + + // Re-enable triggers + console.log('\nRe-enabling triggers...'); + await targetClient.query('SET session_replication_role = DEFAULT'); + + // Reset sequences + console.log('\nResetting sequences...'); + for (const tableName of MIGRATION_ORDER) { + await resetSequences(targetClient, tableName); + } + + // Print summary + console.log('\n========================================'); + console.log('Migration Summary:'); + console.log('========================================'); + for (const result of results) { + const status = result.count >= 0 ? `${result.count} rows` : 'FAILED'; + console.log(`${result.table.padEnd(25)} : ${status}`); + } + console.log('========================================'); + console.log(`Total rows migrated: ${totalMigrated}`); + console.log('========================================'); + + } catch (error: any) { + console.error('Migration failed:', error.message); + throw error; + } finally { + await sourceClient.end(); + await targetClient.end(); + } +} + +migrateData().catch(console.error); diff --git a/meteor-web-backend/scripts/verify-migration.ts b/meteor-web-backend/scripts/verify-migration.ts new file mode 100644 index 0000000..b9438fc --- /dev/null +++ b/meteor-web-backend/scripts/verify-migration.ts @@ -0,0 +1,123 @@ +import { Client, ClientConfig } from 'pg'; +import * as dotenv from 'dotenv'; + +dotenv.config(); + +// Target database configuration (Supabase) +const targetConfig: ClientConfig = { + host: 'aws-1-us-east-1.pooler.supabase.com', + port: 6543, + database: 'postgres', + user: 'postgres.ffbgowwvcqmdtvvabmnh', + password: '!a_KW.-6Grb-X?#', + ssl: { rejectUnauthorized: false }, +}; + +async function verifyMigration() { + const client = new Client(targetConfig); + + try { + console.log('Connecting to Supabase database...'); + await client.connect(); + console.log('Connected!\n'); + + // Tables to verify + const tables = [ + 'user_profiles', + 'user_identities', + 'inventory_devices', + 'weather_stations', + 'weather_forecasts', + 'analysis_results', + 'device_registrations', + 'devices', + 'raw_events', + 'validated_events', + ]; + + console.log('========================================'); + console.log('Migration Verification Results:'); + console.log('========================================\n'); + + console.log('Table Row Counts:'); + console.log('--------------------------------'); + + let totalRows = 0; + for (const table of tables) { + try { + const result = await client.query(`SELECT count(*) FROM ${table}`); + const count = parseInt(result.rows[0].count, 10); + console.log(`${table.padEnd(25)} : ${count}`); + totalRows += count; + } catch (error: any) { + console.log(`${table.padEnd(25)} : ERROR - ${error.message}`); + } + } + + console.log('--------------------------------'); + console.log(`Total rows: ${totalRows}\n`); + + // Verify user_profiles have supabase_user_id + console.log('\nUser Profile Verification:'); + console.log('--------------------------------'); + const userProfiles = await client.query(` + SELECT id, display_name, supabase_user_id + FROM user_profiles + `); + + for (const profile of userProfiles.rows) { + const status = profile.supabase_user_id ? '✓' : '✗'; + console.log(`${status} ${profile.display_name || 'N/A'} (${profile.id})`); + if (profile.supabase_user_id) { + console.log(` Supabase Auth ID: ${profile.supabase_user_id}`); + } + } + + // Verify user_identities + console.log('\nUser Identity Verification:'); + console.log('--------------------------------'); + const userIdentities = await client.query(` + SELECT email, provider, provider_id + FROM user_identities + WHERE provider = 'email' + `); + + for (const identity of userIdentities.rows) { + const status = identity.provider_id ? '✓' : '✗'; + console.log(`${status} ${identity.email}`); + if (identity.provider_id) { + console.log(` Provider ID: ${identity.provider_id}`); + } + } + + // Check Supabase Auth users + console.log('\nSupabase Auth Users (from auth.users):'); + console.log('--------------------------------'); + try { + const authUsers = await client.query(` + SELECT id, email, created_at + FROM auth.users + ORDER BY created_at DESC + `); + + for (const user of authUsers.rows) { + console.log(`✓ ${user.email} (${user.id})`); + } + console.log(`\nTotal Supabase Auth users: ${authUsers.rows.length}`); + } catch (error: any) { + console.log(`Cannot access auth.users table: ${error.message}`); + } + + console.log('\n========================================'); + console.log('Verification Complete!'); + console.log('========================================\n'); + + } catch (error: any) { + console.error('Error:', error.message); + throw error; + } finally { + await client.end(); + } +} + +verifyMigration().catch(console.error); diff --git a/meteor-web-backend/src/app.module.ts b/meteor-web-backend/src/app.module.ts index 0627785..dbed9fd 100644 --- a/meteor-web-backend/src/app.module.ts +++ b/meteor-web-backend/src/app.module.ts @@ -39,6 +39,7 @@ import { CorrelationMiddleware } from './logging/correlation.middleware'; import { MetricsMiddleware } from './metrics/metrics.middleware'; import { StructuredLogger } from './logging/logger.service'; import { pinoConfig } from './logging/logging.config'; +import { SupabaseModule } from './supabase/supabase.module'; // Ensure dotenv is loaded before anything else dotenv.config(); @@ -63,7 +64,17 @@ console.log('Current working directory:', process.cwd()); logger: 'simple-console', // Simplified to avoid conflicts with pino retryAttempts: 3, retryDelay: 3000, + // SSL configuration for Supabase - always use rejectUnauthorized: false for proxy compatibility + ssl: { + rejectUnauthorized: false, + }, + extra: { + ssl: { + rejectUnauthorized: false, + }, + }, }), + SupabaseModule, AuthModule, DevicesModule, DeviceRegistrationModule, diff --git a/meteor-web-backend/src/auth/auth.module.ts b/meteor-web-backend/src/auth/auth.module.ts index 5cffc7b..8416c1e 100644 --- a/meteor-web-backend/src/auth/auth.module.ts +++ b/meteor-web-backend/src/auth/auth.module.ts @@ -9,6 +9,7 @@ import { UserProfile } from '../entities/user-profile.entity'; import { UserIdentity } from '../entities/user-identity.entity'; import { PaymentsModule } from '../payments/payments.module'; import { MetricsModule } from '../metrics/metrics.module'; +import { SupabaseModule } from '../supabase/supabase.module'; @Module({ imports: [ @@ -23,6 +24,7 @@ import { MetricsModule } from '../metrics/metrics.module'; }), forwardRef(() => PaymentsModule), MetricsModule, + SupabaseModule, ], controllers: [AuthController], providers: [AuthService, JwtStrategy], diff --git a/meteor-web-backend/src/auth/auth.service.ts b/meteor-web-backend/src/auth/auth.service.ts index f29caa7..e1632cf 100644 --- a/meteor-web-backend/src/auth/auth.service.ts +++ b/meteor-web-backend/src/auth/auth.service.ts @@ -235,4 +235,78 @@ export class AuthService { throw new UnauthorizedException('Invalid refresh token'); } } + + /** + * Link an existing user account with a Supabase Auth user ID + * This is used during the migration period to connect legacy users with Supabase Auth + */ + async linkSupabaseUser( + userId: string, + supabaseUserId: string, + ): Promise<{ message: string }> { + // Check if this Supabase user ID is already linked to another account + const existingLink = await this.userProfileRepository.findOne({ + where: { supabaseUserId }, + }); + + if (existingLink && existingLink.id !== userId) { + throw new ConflictException( + 'This Supabase account is already linked to another user', + ); + } + + // Find the user profile + const userProfile = await this.userProfileRepository.findOne({ + where: { id: userId }, + }); + + if (!userProfile) { + throw new NotFoundException('User profile not found'); + } + + // Link the Supabase user ID + userProfile.supabaseUserId = supabaseUserId; + await this.userProfileRepository.save(userProfile); + + return { message: 'Supabase account linked successfully' }; + } + + /** + * Find or create a user profile based on Supabase user ID + */ + async findOrCreateBySupabaseId( + supabaseUserId: string, + email?: string, + ): Promise { + // Try to find existing user by Supabase ID + let userProfile = await this.userProfileRepository.findOne({ + where: { supabaseUserId }, + }); + + if (userProfile) { + return userProfile; + } + + // If email is provided, try to find by email and link + if (email) { + const userIdentity = await this.userIdentityRepository.findOne({ + where: { email, provider: 'email' }, + relations: ['userProfile'], + }); + + if (userIdentity?.userProfile) { + // Link existing account with Supabase + userIdentity.userProfile.supabaseUserId = supabaseUserId; + await this.userProfileRepository.save(userIdentity.userProfile); + return userIdentity.userProfile; + } + } + + // Create new user profile + userProfile = this.userProfileRepository.create({ + supabaseUserId, + displayName: email?.split('@')[0] || 'User', + }); + return this.userProfileRepository.save(userProfile); + } } diff --git a/meteor-web-backend/src/auth/strategies/jwt.strategy.ts b/meteor-web-backend/src/auth/strategies/jwt.strategy.ts index 82981a3..1c69213 100644 --- a/meteor-web-backend/src/auth/strategies/jwt.strategy.ts +++ b/meteor-web-backend/src/auth/strategies/jwt.strategy.ts @@ -4,8 +4,10 @@ import { ExtractJwt, Strategy } from 'passport-jwt'; import { InjectRepository } from '@nestjs/typeorm'; import { Repository } from 'typeorm'; import { UserProfile } from '../../entities/user-profile.entity'; +import { SupabaseService } from '../../supabase/supabase.service'; -export interface JwtPayload { +// Legacy JWT payload (self-built auth) +export interface LegacyJwtPayload { userId: string; email: string; sub: string; @@ -13,21 +15,72 @@ export interface JwtPayload { exp?: number; } +// Supabase JWT payload +export interface SupabaseJwtPayload { + sub: string; // Supabase user ID + email?: string; + aud: string; + role: string; + iat?: number; + exp?: number; +} + +export type JwtPayload = LegacyJwtPayload | SupabaseJwtPayload; + @Injectable() export class JwtStrategy extends PassportStrategy(Strategy) { constructor( @InjectRepository(UserProfile) private userProfileRepository: Repository, + private supabaseService: SupabaseService, ) { super({ jwtFromRequest: ExtractJwt.fromAuthHeaderAsBearerToken(), ignoreExpiration: false, - secretOrKey: - process.env.JWT_ACCESS_SECRET || 'default-secret-change-in-production', + // Use a callback to handle both legacy and Supabase JWTs + secretOrKeyProvider: (request: any, rawJwtToken: string, done: any) => { + // Try to decode without verification to check the token type + try { + const payload = JSON.parse( + Buffer.from(rawJwtToken.split('.')[1], 'base64').toString(), + ); + + // Check if this is a Supabase token (has 'aud' claim with supabase) + if (payload.aud && payload.aud.includes('authenticated')) { + // Use Supabase JWT secret + const supabaseJwtSecret = process.env.SUPABASE_JWT_SECRET; + if (supabaseJwtSecret) { + return done(null, supabaseJwtSecret); + } + } + } catch (e) { + // Fall through to legacy secret + } + + // Default to legacy JWT secret + done( + null, + process.env.JWT_ACCESS_SECRET || + 'default-secret-change-in-production', + ); + }, }); } async validate(payload: JwtPayload) { + // Check if this is a Supabase token + const isSupabaseToken = + 'aud' in payload && payload.aud?.includes('authenticated'); + + if (isSupabaseToken) { + return this.validateSupabaseToken(payload as SupabaseJwtPayload); + } + + // Legacy token validation + return this.validateLegacyToken(payload as LegacyJwtPayload); + } + + private async validateLegacyToken(payload: LegacyJwtPayload) { const { userId } = payload; // Verify user still exists @@ -42,6 +95,33 @@ export class JwtStrategy extends PassportStrategy(Strategy) { return { userId: user.id, email: payload.email, + authType: 'legacy', + }; + } + + private async validateSupabaseToken(payload: SupabaseJwtPayload) { + const supabaseUserId = payload.sub; + + // First, try to find user by supabase_user_id + let user = await this.userProfileRepository.findOne({ + where: { supabaseUserId }, + }); + + if (!user) { + // If not found, this might be a new Supabase user + // Create a new user profile for them + user = this.userProfileRepository.create({ + supabaseUserId, + displayName: payload.email?.split('@')[0] || 'User', + }); + await this.userProfileRepository.save(user); + } + + return { + userId: user.id, + supabaseUserId, + email: payload.email, + authType: 'supabase', }; } } diff --git a/meteor-web-backend/src/camera/camera.controller.ts b/meteor-web-backend/src/camera/camera.controller.ts index 5e79218..ec1ec4a 100644 --- a/meteor-web-backend/src/camera/camera.controller.ts +++ b/meteor-web-backend/src/camera/camera.controller.ts @@ -1,4 +1,4 @@ -import { Controller, Get, Query, Param, Patch, Body, ParseIntPipe } from '@nestjs/common'; +import { Controller, Get, Query, Param, Patch, Body, BadRequestException } from '@nestjs/common'; import { CameraService, CameraDeviceQuery } from './camera.service'; @Controller('api/v1/cameras') @@ -16,7 +16,7 @@ export class CameraController { } @Get(':id') - async getCameraDevice(@Param('id', ParseIntPipe) id: number) { + async getCameraDevice(@Param('id') id: string) { return await this.cameraService.findOne(id); } @@ -32,16 +32,19 @@ export class CameraController { @Patch(':id/status') async updateDeviceStatus( - @Param('id', ParseIntPipe) id: number, + @Param('id') id: string, @Body('status') status: 'active' | 'maintenance' | 'offline', ) { const device = await this.cameraService.findOne(id); + if (!device.deviceId) { + throw new BadRequestException('Device has no associated device ID'); + } return await this.cameraService.updateDeviceStatus(device.deviceId, status); } @Patch(':id') async updateDevice( - @Param('id', ParseIntPipe) id: number, + @Param('id') id: string, @Body() updateData: any, ) { return await this.cameraService.updateDevice(id, updateData); diff --git a/meteor-web-backend/src/camera/camera.service.ts b/meteor-web-backend/src/camera/camera.service.ts index 3618f64..f64d20d 100644 --- a/meteor-web-backend/src/camera/camera.service.ts +++ b/meteor-web-backend/src/camera/camera.service.ts @@ -36,10 +36,10 @@ export class CameraService { }, }; - if (status || location) { - options.where = {}; - if (status) options.where.status = status; - if (location) options.where.location = location; + if (status) { + options.where = { status }; + // Note: location filtering removed - location is now JSONB type + // To filter by location, use query builder with JSON operators } const [devices, total] = await this.cameraRepository.findAndCount(options); @@ -55,7 +55,7 @@ export class CameraService { }; } - async findOne(id: number) { + async findOne(id: string) { const device = await this.cameraRepository.findOne({ where: { id }, }); @@ -133,7 +133,7 @@ export class CameraService { return history; } - async updateDevice(id: number, updateData: Partial) { + async updateDevice(id: string, updateData: Partial) { const device = await this.findOne(id); Object.assign(device, updateData); diff --git a/meteor-web-backend/src/devices/device-registration.module.ts b/meteor-web-backend/src/devices/device-registration.module.ts index 619fe37..f5d9fd8 100644 --- a/meteor-web-backend/src/devices/device-registration.module.ts +++ b/meteor-web-backend/src/devices/device-registration.module.ts @@ -22,9 +22,13 @@ import { DeviceRegistrationController } from './controllers/device-registration. // Gateways import { DeviceRealtimeGateway } from './gateways/device-realtime.gateway'; +// Realtime +import { RealtimeModule } from '../realtime/realtime.module'; + @Module({ imports: [ ConfigModule, + RealtimeModule, TypeOrmModule.forFeature([ Device, DeviceRegistration, diff --git a/meteor-web-backend/src/entities/analysis-result.entity.ts b/meteor-web-backend/src/entities/analysis-result.entity.ts index d47e6b9..15cd84f 100644 --- a/meteor-web-backend/src/entities/analysis-result.entity.ts +++ b/meteor-web-backend/src/entities/analysis-result.entity.ts @@ -1,22 +1,85 @@ -import { Entity, PrimaryGeneratedColumn, Column, CreateDateColumn, UpdateDateColumn } from 'typeorm'; +import { + Entity, + PrimaryGeneratedColumn, + Column, + CreateDateColumn, + UpdateDateColumn, + DeleteDateColumn, + ManyToOne, + JoinColumn, + Index, +} from 'typeorm'; +import { ValidatedEvent } from './validated-event.entity'; +import { Device } from './device.entity'; +import { UserProfile } from './user-profile.entity'; +import { RawEvent } from './raw-event.entity'; @Entity('analysis_results') +// Composite index created in migration 1766300000002 export class AnalysisResult { - @PrimaryGeneratedColumn() - id: number; + // Changed from serial to UUID in migration 1766300000006-1766300000008 + @PrimaryGeneratedColumn('uuid') + id: string; @Column({ name: 'analysis_type', length: 50 }) + @Index() analysisType: string; @Column({ name: 'time_frame', length: 20, nullable: true }) + @Index() timeFrame?: string; @Column({ name: 'result_data', type: 'jsonb' }) - resultData: any; + resultData: Record; - @CreateDateColumn({ name: 'created_at' }) + // Relationship fields (added in migration 1766300000011) + @Column({ name: 'validated_event_id', type: 'uuid', nullable: true }) + @Index('idx_analysis_results_validated_event_id', { where: 'validated_event_id IS NOT NULL' }) + validatedEventId?: string; + + @ManyToOne(() => ValidatedEvent, { nullable: true, onDelete: 'SET NULL' }) + @JoinColumn({ name: 'validated_event_id' }) + validatedEvent?: ValidatedEvent; + + @Column({ name: 'device_id', type: 'uuid', nullable: true }) + @Index('idx_analysis_results_device_id', { where: 'device_id IS NOT NULL' }) + deviceId?: string; + + @ManyToOne(() => Device, { nullable: true, onDelete: 'SET NULL' }) + @JoinColumn({ name: 'device_id' }) + device?: Device; + + @Column({ name: 'user_profile_id', type: 'uuid', nullable: true }) + @Index('idx_analysis_results_user_profile_id', { where: 'user_profile_id IS NOT NULL' }) + userProfileId?: string; + + @ManyToOne(() => UserProfile, { nullable: true, onDelete: 'SET NULL' }) + @JoinColumn({ name: 'user_profile_id' }) + userProfile?: UserProfile; + + @Column({ name: 'raw_event_id', type: 'uuid', nullable: true }) + @Index('idx_analysis_results_raw_event_id', { where: 'raw_event_id IS NOT NULL' }) + rawEventId?: string; + + @ManyToOne(() => RawEvent, { nullable: true, onDelete: 'SET NULL' }) + @JoinColumn({ name: 'raw_event_id' }) + rawEvent?: RawEvent; + + // Audit fields (added in migration 1766300000014) + @Column({ name: 'created_by', type: 'uuid', nullable: true }) + createdBy?: string; + + @Column({ name: 'updated_by', type: 'uuid', nullable: true }) + updatedBy?: string; + + @CreateDateColumn({ name: 'created_at', type: 'timestamptz' }) createdAt: Date; - @UpdateDateColumn({ name: 'updated_at' }) + @UpdateDateColumn({ name: 'updated_at', type: 'timestamptz' }) updatedAt: Date; -} \ No newline at end of file + + // Soft delete support (added in migration 1766300000005) + @DeleteDateColumn({ name: 'deleted_at', type: 'timestamptz', nullable: true }) + @Index('idx_analysis_results_deleted_at', { where: 'deleted_at IS NULL' }) + deletedAt?: Date; +} diff --git a/meteor-web-backend/src/entities/camera-device.entity.ts b/meteor-web-backend/src/entities/camera-device.entity.ts index 2a8ab59..4d58c43 100644 --- a/meteor-web-backend/src/entities/camera-device.entity.ts +++ b/meteor-web-backend/src/entities/camera-device.entity.ts @@ -1,18 +1,50 @@ -import { Entity, PrimaryGeneratedColumn, Column, CreateDateColumn, UpdateDateColumn } from 'typeorm'; +import { + Entity, + PrimaryGeneratedColumn, + Column, + CreateDateColumn, + UpdateDateColumn, + DeleteDateColumn, + ManyToOne, + JoinColumn, + Index, +} from 'typeorm'; +import { Device } from './device.entity'; @Entity('camera_devices') export class CameraDevice { - @PrimaryGeneratedColumn() - id: number; + // Changed from serial to UUID in migration 1766300000006-1766300000008 + @PrimaryGeneratedColumn('uuid') + id: string; - @Column({ name: 'device_id', unique: true }) - deviceId: string; + // FK to devices table (added in migration 1766300000009) + @Column({ name: 'device_id', type: 'uuid', nullable: true }) + @Index('idx_camera_devices_device_id', { where: 'device_id IS NOT NULL' }) + deviceId?: string; + + @ManyToOne(() => Device, { nullable: true, onDelete: 'SET NULL' }) + @JoinColumn({ name: 'device_id' }) + device?: Device; + + // Legacy device_id kept for reference + @Column({ name: 'legacy_device_id', type: 'varchar', length: 100, unique: true, nullable: true }) + legacyDeviceId?: string; @Column() name: string; - @Column() - location: string; + // Changed to JSONB in migration 1766300000009 + @Column({ type: 'jsonb', nullable: true }) + location?: { + latitude?: number; + longitude?: number; + altitude?: number; + site_name?: string; + }; + + // Legacy location kept for reference + @Column({ name: 'legacy_location', type: 'text', nullable: true }) + legacyLocation?: string; @Column({ default: 'offline' }) status: 'active' | 'maintenance' | 'offline'; @@ -29,6 +61,7 @@ export class CameraDevice { @Column({ type: 'int', nullable: true }) gain?: number; + // exposure_count with CHECK constraint >= 0 (added in migration 1766300000003) @Column({ name: 'exposure_count', type: 'int', default: 0 }) exposureCount: number; @@ -41,9 +74,14 @@ export class CameraDevice { @Column({ name: 'serial_number', unique: true, nullable: true }) serialNumber?: string; - @CreateDateColumn({ name: 'created_at' }) + @CreateDateColumn({ name: 'created_at', type: 'timestamptz' }) createdAt: Date; - @UpdateDateColumn({ name: 'updated_at' }) + @UpdateDateColumn({ name: 'updated_at', type: 'timestamptz' }) updatedAt: Date; -} \ No newline at end of file + + // Soft delete support (added in migration 1766300000005) + @DeleteDateColumn({ name: 'deleted_at', type: 'timestamptz', nullable: true }) + @Index('idx_camera_devices_deleted_at', { where: 'deleted_at IS NULL' }) + deletedAt?: Date; +} diff --git a/meteor-web-backend/src/entities/device-certificate.entity.ts b/meteor-web-backend/src/entities/device-certificate.entity.ts index 3659351..88866cd 100644 --- a/meteor-web-backend/src/entities/device-certificate.entity.ts +++ b/meteor-web-backend/src/entities/device-certificate.entity.ts @@ -6,6 +6,7 @@ import { JoinColumn, CreateDateColumn, UpdateDateColumn, + DeleteDateColumn, Index, } from 'typeorm'; import { Device } from './device.entity'; @@ -72,9 +73,27 @@ export class DeviceCertificate { @Column({ name: 'certificate_pem', type: 'text' }) certificatePem: string; + // Legacy private key - deprecated in migration 1766300000013 + // New encrypted storage should be used instead @Column({ name: 'private_key_pem', type: 'text', nullable: true }) privateKeyPem?: string; + // Encrypted private key storage (added in migration 1766300000013) + @Column({ name: 'encrypted_private_key', type: 'text', nullable: true }) + encryptedPrivateKey?: string; + + @Column({ name: 'encryption_key_id', type: 'varchar', length: 100, nullable: true }) + encryptionKeyId?: string; + + @Column({ + name: 'encryption_algorithm', + type: 'varchar', + length: 50, + nullable: true, + default: 'AES-256-GCM', + }) + encryptionAlgorithm?: string; + @Column({ name: 'public_key_pem', type: 'text' }) publicKeyPem: string; @@ -126,4 +145,9 @@ export class DeviceCertificate { @UpdateDateColumn({ name: 'updated_at', type: 'timestamptz' }) updatedAt: Date; + + // Soft delete support (added in migration 1766300000005) + @DeleteDateColumn({ name: 'deleted_at', type: 'timestamptz', nullable: true }) + @Index('idx_device_certificates_deleted_at', { where: 'deleted_at IS NULL' }) + deletedAt?: Date; } \ No newline at end of file diff --git a/meteor-web-backend/src/entities/device-configuration.entity.ts b/meteor-web-backend/src/entities/device-configuration.entity.ts index 069eb86..c805265 100644 --- a/meteor-web-backend/src/entities/device-configuration.entity.ts +++ b/meteor-web-backend/src/entities/device-configuration.entity.ts @@ -6,6 +6,7 @@ import { JoinColumn, CreateDateColumn, UpdateDateColumn, + DeleteDateColumn, Index, } from 'typeorm'; import { Device } from './device.entity'; @@ -198,4 +199,9 @@ export class DeviceConfiguration { @UpdateDateColumn({ name: 'updated_at', type: 'timestamptz' }) updatedAt: Date; + + // Soft delete support (added in migration 1766300000005) + @DeleteDateColumn({ name: 'deleted_at', type: 'timestamptz', nullable: true }) + @Index('idx_device_configurations_deleted_at', { where: 'deleted_at IS NULL' }) + deletedAt?: Date; } \ No newline at end of file diff --git a/meteor-web-backend/src/entities/device-registration.entity.ts b/meteor-web-backend/src/entities/device-registration.entity.ts index f7a5d94..f533cde 100644 --- a/meteor-web-backend/src/entities/device-registration.entity.ts +++ b/meteor-web-backend/src/entities/device-registration.entity.ts @@ -6,6 +6,7 @@ import { JoinColumn, CreateDateColumn, UpdateDateColumn, + DeleteDateColumn, Index, } from 'typeorm'; import { UserProfile } from './user-profile.entity'; @@ -161,4 +162,9 @@ export class DeviceRegistration { @UpdateDateColumn({ name: 'updated_at', type: 'timestamptz' }) updatedAt: Date; + + // Soft delete support (added in migration 1766300000005) + @DeleteDateColumn({ name: 'deleted_at', type: 'timestamptz', nullable: true }) + @Index('idx_device_registrations_deleted_at', { where: 'deleted_at IS NULL' }) + deletedAt?: Date; } \ No newline at end of file diff --git a/meteor-web-backend/src/entities/device-security-event.entity.ts b/meteor-web-backend/src/entities/device-security-event.entity.ts index 554dae2..b52400c 100644 --- a/meteor-web-backend/src/entities/device-security-event.entity.ts +++ b/meteor-web-backend/src/entities/device-security-event.entity.ts @@ -5,6 +5,7 @@ import { ManyToOne, JoinColumn, CreateDateColumn, + DeleteDateColumn, Index, } from 'typeorm'; import { Device } from './device.entity'; @@ -168,4 +169,9 @@ export class DeviceSecurityEvent { @CreateDateColumn({ name: 'created_at', type: 'timestamptz' }) createdAt: Date; + + // Soft delete support (added in migration 1766300000005) + @DeleteDateColumn({ name: 'deleted_at', type: 'timestamptz', nullable: true }) + @Index('idx_device_security_events_deleted_at', { where: 'deleted_at IS NULL' }) + deletedAt?: Date; } \ No newline at end of file diff --git a/meteor-web-backend/src/entities/device.entity.ts b/meteor-web-backend/src/entities/device.entity.ts index 4b3a541..f7ea445 100644 --- a/meteor-web-backend/src/entities/device.entity.ts +++ b/meteor-web-backend/src/entities/device.entity.ts @@ -3,12 +3,15 @@ import { PrimaryGeneratedColumn, Column, ManyToOne, + OneToOne, JoinColumn, CreateDateColumn, UpdateDateColumn, + DeleteDateColumn, Index, } from 'typeorm'; import { UserProfile } from './user-profile.entity'; +import { InventoryDevice } from './inventory-device.entity'; export enum DeviceStatus { PENDING = 'pending', @@ -27,6 +30,7 @@ export enum DeviceStatus { @Index(['status']) @Index(['lastSeenAt']) @Index(['deviceToken']) +@Index(['firmwareVersion'], { where: 'firmware_version IS NOT NULL' }) export class Device { @PrimaryGeneratedColumn('uuid') id: string; @@ -38,19 +42,23 @@ export class Device { @JoinColumn({ name: 'user_profile_id' }) userProfile: UserProfile; + // Link to inventory device (added in migration 1766300000012) + @Column({ name: 'inventory_device_id', type: 'uuid', nullable: true }) + @Index('idx_devices_inventory_device_id_unique', { unique: true, where: 'inventory_device_id IS NOT NULL' }) + inventoryDeviceId?: string; + + @OneToOne(() => InventoryDevice, { nullable: true, onDelete: 'SET NULL' }) + @JoinColumn({ name: 'inventory_device_id' }) + inventoryDevice?: InventoryDevice; + @Column({ name: 'hardware_id', type: 'varchar', length: 255, unique: true }) hardwareId: string; @Column({ name: 'device_name', type: 'varchar', length: 255, nullable: true }) deviceName?: string; - @Column({ - name: 'status', - type: 'enum', - enum: DeviceStatus, - default: DeviceStatus.PENDING, - }) - status: DeviceStatus; + @Column({ type: 'varchar', length: 50, default: 'active' }) + status: string; @Column({ name: 'device_token', type: 'varchar', length: 255, nullable: true, unique: true }) deviceToken?: string; @@ -98,6 +106,7 @@ export class Device { @Column({ name: 'security_level', type: 'varchar', length: 20, default: 'standard' }) securityLevel: string; + // trust_score with CHECK constraint 0.0-1.0 (added in migration 1766300000003) @Column({ name: 'trust_score', type: 'float', default: 1.0 }) trustScore: number; @@ -119,9 +128,21 @@ export class Device { @Column({ name: 'metadata', type: 'jsonb', nullable: true }) metadata?: Record; + // Audit fields (added in migration 1766300000014) + @Column({ name: 'created_by', type: 'uuid', nullable: true }) + createdBy?: string; + + @Column({ name: 'updated_by', type: 'uuid', nullable: true }) + updatedBy?: string; + @CreateDateColumn({ name: 'created_at', type: 'timestamptz' }) createdAt: Date; @UpdateDateColumn({ name: 'updated_at', type: 'timestamptz' }) updatedAt: Date; + + // Soft delete support (added in migration 1766300000005) + @DeleteDateColumn({ name: 'deleted_at', type: 'timestamptz', nullable: true }) + @Index('idx_devices_deleted_at', { where: 'deleted_at IS NULL' }) + deletedAt?: Date; } diff --git a/meteor-web-backend/src/entities/inventory-device.entity.ts b/meteor-web-backend/src/entities/inventory-device.entity.ts index 85dae8f..0e1cd98 100644 --- a/meteor-web-backend/src/entities/inventory-device.entity.ts +++ b/meteor-web-backend/src/entities/inventory-device.entity.ts @@ -4,25 +4,38 @@ import { Column, CreateDateColumn, UpdateDateColumn, + DeleteDateColumn, + OneToOne, + Index, } from 'typeorm'; +import { Device } from './device.entity'; @Entity('inventory_devices') export class InventoryDevice { @PrimaryGeneratedColumn('uuid') id: string; - @Column({ type: 'varchar', length: 255, unique: true }) + @Column({ name: 'hardware_id', type: 'varchar', length: 255, unique: true }) hardwareId: string; - @Column({ type: 'boolean', default: false }) + @Column({ name: 'is_claimed', type: 'boolean', default: false }) isClaimed: boolean; - @Column({ type: 'varchar', length: 255, nullable: true }) + @Column({ name: 'device_model', type: 'varchar', length: 255, nullable: true }) deviceModel?: string; - @CreateDateColumn({ type: 'timestamptz' }) + // Inverse side of the Device relationship (added in migration 1766300000012) + @OneToOne(() => Device, (device) => device.inventoryDevice) + claimedDevice?: Device; + + @CreateDateColumn({ name: 'created_at', type: 'timestamptz' }) createdAt: Date; - @UpdateDateColumn({ type: 'timestamptz' }) + @UpdateDateColumn({ name: 'updated_at', type: 'timestamptz' }) updatedAt: Date; + + // Soft delete support (added in migration 1766300000005) + @DeleteDateColumn({ name: 'deleted_at', type: 'timestamptz', nullable: true }) + @Index('idx_inventory_devices_deleted_at', { where: 'deleted_at IS NULL' }) + deletedAt?: Date; } diff --git a/meteor-web-backend/src/entities/payment-record.entity.ts b/meteor-web-backend/src/entities/payment-record.entity.ts index 7371818..8a78426 100644 --- a/meteor-web-backend/src/entities/payment-record.entity.ts +++ b/meteor-web-backend/src/entities/payment-record.entity.ts @@ -1,21 +1,36 @@ -import { Entity, PrimaryGeneratedColumn, Column, ManyToOne, JoinColumn, CreateDateColumn } from 'typeorm'; +import { + Entity, + PrimaryGeneratedColumn, + Column, + ManyToOne, + JoinColumn, + CreateDateColumn, + DeleteDateColumn, + Index, +} from 'typeorm'; import { UserSubscription } from './user-subscription.entity'; @Entity('payment_records') export class PaymentRecord { - @PrimaryGeneratedColumn() - id: number; + // Changed from serial to UUID in migration 1766300000006-1766300000008 + @PrimaryGeneratedColumn('uuid') + id: string; - @Column({ name: 'user_subscription_id' }) - userSubscriptionId: number; + // Changed from number to UUID in migration 1766300000007-1766300000008 + @Column({ name: 'user_subscription_id', type: 'uuid' }) + @Index('idx_payment_records_sub_id') + userSubscriptionId: string; - @ManyToOne(() => UserSubscription, subscription => subscription.paymentRecords) + @ManyToOne(() => UserSubscription, (subscription) => subscription.paymentRecords, { + onDelete: 'CASCADE', + }) @JoinColumn({ name: 'user_subscription_id' }) userSubscription: UserSubscription; @Column({ name: 'stripe_payment_intent_id', nullable: true, unique: true }) stripePaymentIntentId?: string; + // amount with CHECK constraint > 0 (added in migration 1766300000003) @Column({ type: 'decimal', precision: 10, scale: 2 }) amount: number; @@ -23,6 +38,7 @@ export class PaymentRecord { currency: string; @Column() + @Index('idx_payment_records_status') status: 'succeeded' | 'failed' | 'pending' | 'canceled'; @Column({ name: 'payment_method', nullable: true }) @@ -34,6 +50,11 @@ export class PaymentRecord { @Column({ name: 'paid_at', type: 'timestamptz', nullable: true }) paidAt?: Date; - @CreateDateColumn({ name: 'created_at' }) + @CreateDateColumn({ name: 'created_at', type: 'timestamptz' }) createdAt: Date; -} \ No newline at end of file + + // Soft delete support (added in migration 1766300000005) + @DeleteDateColumn({ name: 'deleted_at', type: 'timestamptz', nullable: true }) + @Index('idx_payment_records_deleted_at', { where: 'deleted_at IS NULL' }) + deletedAt?: Date; +} diff --git a/meteor-web-backend/src/entities/raw-event.entity.ts b/meteor-web-backend/src/entities/raw-event.entity.ts index 8a18cc1..7a375d6 100644 --- a/meteor-web-backend/src/entities/raw-event.entity.ts +++ b/meteor-web-backend/src/entities/raw-event.entity.ts @@ -6,6 +6,8 @@ import { JoinColumn, CreateDateColumn, UpdateDateColumn, + DeleteDateColumn, + Index, } from 'typeorm'; import { Device } from './device.entity'; import { UserProfile } from './user-profile.entity'; @@ -18,11 +20,14 @@ export enum ProcessingStatus { } @Entity('raw_events') +@Index(['deviceId', 'eventTimestamp']) +@Index(['userProfileId', 'createdAt']) export class RawEvent { @PrimaryGeneratedColumn('uuid') id: string; @Column({ name: 'device_id' }) + @Index() deviceId: string; @ManyToOne(() => Device, { onDelete: 'CASCADE' }) @@ -30,50 +35,61 @@ export class RawEvent { device: Device; @Column({ name: 'user_profile_id' }) + @Index() userProfileId: string; @ManyToOne(() => UserProfile, { onDelete: 'CASCADE' }) @JoinColumn({ name: 'user_profile_id' }) userProfile: UserProfile; - @Column({ type: 'text' }) + @Column({ name: 'file_path', type: 'text' }) filePath: string; - @Column({ type: 'bigint', nullable: true }) + @Column({ name: 'file_size', type: 'bigint', nullable: true }) fileSize?: number; - @Column({ type: 'varchar', length: 100, nullable: true }) + @Column({ name: 'file_type', type: 'varchar', length: 100, nullable: true }) + @Index('idx_raw_events_file_type') fileType?: string; - @Column({ type: 'varchar', length: 255, nullable: true }) + @Column({ name: 'original_filename', type: 'varchar', length: 255, nullable: true }) originalFilename?: string; - @Column({ type: 'varchar', length: 50 }) + @Column({ name: 'event_type', type: 'varchar', length: 50 }) + @Index() eventType: string; - @Column({ type: 'timestamptz' }) + @Column({ name: 'event_timestamp', type: 'timestamptz' }) + @Index() eventTimestamp: Date; @Column({ type: 'jsonb', nullable: true }) metadata?: Record; @Column({ + name: 'processing_status', type: 'varchar', length: 20, enum: ProcessingStatus, default: ProcessingStatus.PENDING, }) + @Index() processingStatus: ProcessingStatus; - @Column({ type: 'varchar', length: 255, nullable: true }) + @Column({ name: 'sqs_message_id', type: 'varchar', length: 255, nullable: true }) sqsMessageId?: string; - @Column({ type: 'timestamptz', nullable: true }) + @Column({ name: 'processed_at', type: 'timestamptz', nullable: true }) processedAt?: Date; - @CreateDateColumn({ type: 'timestamptz' }) + @CreateDateColumn({ name: 'created_at', type: 'timestamptz' }) createdAt: Date; - @UpdateDateColumn({ type: 'timestamptz' }) + @UpdateDateColumn({ name: 'updated_at', type: 'timestamptz' }) updatedAt: Date; + + // Soft delete support (added in migration 1766300000005) + @DeleteDateColumn({ name: 'deleted_at', type: 'timestamptz', nullable: true }) + @Index('idx_raw_events_deleted_at', { where: 'deleted_at IS NULL' }) + deletedAt?: Date; } diff --git a/meteor-web-backend/src/entities/subscription-history.entity.ts b/meteor-web-backend/src/entities/subscription-history.entity.ts index a9c7100..837da59 100644 --- a/meteor-web-backend/src/entities/subscription-history.entity.ts +++ b/meteor-web-backend/src/entities/subscription-history.entity.ts @@ -1,19 +1,34 @@ -import { Entity, PrimaryGeneratedColumn, Column, ManyToOne, JoinColumn, CreateDateColumn } from 'typeorm'; +import { + Entity, + PrimaryGeneratedColumn, + Column, + ManyToOne, + JoinColumn, + CreateDateColumn, + DeleteDateColumn, + Index, +} from 'typeorm'; import { UserSubscription } from './user-subscription.entity'; @Entity('subscription_history') export class SubscriptionHistory { - @PrimaryGeneratedColumn() - id: number; + // Changed from serial to UUID in migration 1766300000006-1766300000008 + @PrimaryGeneratedColumn('uuid') + id: string; - @Column({ name: 'user_subscription_id' }) - userSubscriptionId: number; + // Changed from number to UUID in migration 1766300000007-1766300000008 + @Column({ name: 'user_subscription_id', type: 'uuid' }) + @Index('idx_subscription_history_sub_id') + userSubscriptionId: string; - @ManyToOne(() => UserSubscription, subscription => subscription.subscriptionHistory) + @ManyToOne(() => UserSubscription, (subscription) => subscription.subscriptionHistory, { + onDelete: 'CASCADE', + }) @JoinColumn({ name: 'user_subscription_id' }) userSubscription: UserSubscription; @Column() + @Index('idx_subscription_history_action') action: 'created' | 'updated' | 'canceled' | 'renewed' | 'payment_failed'; @Column({ name: 'old_status', nullable: true }) @@ -23,8 +38,13 @@ export class SubscriptionHistory { newStatus?: string; @Column({ type: 'jsonb', nullable: true }) - metadata?: any; + metadata?: Record; - @CreateDateColumn({ name: 'created_at' }) + @CreateDateColumn({ name: 'created_at', type: 'timestamptz' }) createdAt: Date; -} \ No newline at end of file + + // Soft delete support (added in migration 1766300000005) + @DeleteDateColumn({ name: 'deleted_at', type: 'timestamptz', nullable: true }) + @Index('idx_subscription_history_deleted_at', { where: 'deleted_at IS NULL' }) + deletedAt?: Date; +} diff --git a/meteor-web-backend/src/entities/subscription-plan.entity.ts b/meteor-web-backend/src/entities/subscription-plan.entity.ts index 7464ca5..e790378 100644 --- a/meteor-web-backend/src/entities/subscription-plan.entity.ts +++ b/meteor-web-backend/src/entities/subscription-plan.entity.ts @@ -1,10 +1,20 @@ -import { Entity, PrimaryGeneratedColumn, Column, OneToMany, CreateDateColumn, UpdateDateColumn } from 'typeorm'; +import { + Entity, + PrimaryGeneratedColumn, + Column, + OneToMany, + CreateDateColumn, + UpdateDateColumn, + DeleteDateColumn, + Index, +} from 'typeorm'; import { UserSubscription } from './user-subscription.entity'; @Entity('subscription_plans') export class SubscriptionPlan { - @PrimaryGeneratedColumn() - id: number; + // Changed from serial to UUID in migration 1766300000006-1766300000008 + @PrimaryGeneratedColumn('uuid') + id: string; @Column({ name: 'plan_id', unique: true }) planId: string; @@ -15,6 +25,7 @@ export class SubscriptionPlan { @Column({ type: 'text', nullable: true }) description?: string; + // price with CHECK constraint >= 0 (added in migration 1766300000003) @Column({ type: 'decimal', precision: 10, scale: 2 }) price: number; @@ -24,6 +35,7 @@ export class SubscriptionPlan { @Column() interval: 'month' | 'year' | 'week'; + // interval_count with CHECK constraint > 0 (added in migration 1766300000003) @Column({ name: 'interval_count', default: 1 }) intervalCount: number; @@ -39,12 +51,24 @@ export class SubscriptionPlan { @Column({ name: 'is_active', default: true }) isActive: boolean; - @OneToMany(() => UserSubscription, subscription => subscription.subscriptionPlan) + // Audit fields (added in migration 1766300000014) + @Column({ name: 'created_by', type: 'uuid', nullable: true }) + createdBy?: string; + + @Column({ name: 'updated_by', type: 'uuid', nullable: true }) + updatedBy?: string; + + @OneToMany(() => UserSubscription, (subscription) => subscription.subscriptionPlan) userSubscriptions: UserSubscription[]; - @CreateDateColumn({ name: 'created_at' }) + @CreateDateColumn({ name: 'created_at', type: 'timestamptz' }) createdAt: Date; - @UpdateDateColumn({ name: 'updated_at' }) + @UpdateDateColumn({ name: 'updated_at', type: 'timestamptz' }) updatedAt: Date; -} \ No newline at end of file + + // Soft delete support (added in migration 1766300000005) + @DeleteDateColumn({ name: 'deleted_at', type: 'timestamptz', nullable: true }) + @Index('idx_subscription_plans_deleted_at', { where: 'deleted_at IS NULL' }) + deletedAt?: Date; +} diff --git a/meteor-web-backend/src/entities/user-identity.entity.ts b/meteor-web-backend/src/entities/user-identity.entity.ts index e48f30a..f7ce09e 100644 --- a/meteor-web-backend/src/entities/user-identity.entity.ts +++ b/meteor-web-backend/src/entities/user-identity.entity.ts @@ -4,6 +4,7 @@ import { Column, CreateDateColumn, UpdateDateColumn, + DeleteDateColumn, ManyToOne, JoinColumn, Index, @@ -37,12 +38,34 @@ export class UserIdentity { }) passwordHash: string | null; - @CreateDateColumn({ name: 'created_at' }) + // Authentication tracking fields (added in migration 1766300000004) + @Column({ name: 'email_verified_at', type: 'timestamptz', nullable: true }) + emailVerifiedAt: Date | null; + + @Column({ name: 'last_auth_at', type: 'timestamptz', nullable: true }) + lastAuthAt: Date | null; + + @Column({ name: 'auth_failure_count', type: 'integer', default: 0 }) + authFailureCount: number; + + @Column({ name: 'locked_until', type: 'timestamptz', nullable: true }) + @Index('idx_user_identities_locked_until', { where: 'locked_until IS NOT NULL' }) + lockedUntil: Date | null; + + @Column({ name: 'password_changed_at', type: 'timestamptz', nullable: true }) + passwordChangedAt: Date | null; + + @CreateDateColumn({ name: 'created_at', type: 'timestamptz' }) createdAt: Date; - @UpdateDateColumn({ name: 'updated_at' }) + @UpdateDateColumn({ name: 'updated_at', type: 'timestamptz' }) updatedAt: Date; + // Soft delete support (added in migration 1766300000005) + @DeleteDateColumn({ name: 'deleted_at', type: 'timestamptz', nullable: true }) + @Index('idx_user_identities_deleted_at', { where: 'deleted_at IS NULL' }) + deletedAt: Date | null; + @ManyToOne(() => UserProfile, (profile) => profile.identities, { onDelete: 'CASCADE', }) diff --git a/meteor-web-backend/src/entities/user-profile.entity.ts b/meteor-web-backend/src/entities/user-profile.entity.ts index 4d795ef..c0085c6 100644 --- a/meteor-web-backend/src/entities/user-profile.entity.ts +++ b/meteor-web-backend/src/entities/user-profile.entity.ts @@ -4,7 +4,9 @@ import { Column, CreateDateColumn, UpdateDateColumn, + DeleteDateColumn, OneToMany, + Index, } from 'typeorm'; import { UserIdentity } from './user-identity.entity'; @@ -40,12 +42,50 @@ export class UserProfile { }) paymentProviderSubscriptionId: string | null; - @CreateDateColumn({ name: 'created_at' }) + @Column({ + name: 'supabase_user_id', + type: 'uuid', + nullable: true, + unique: true, + }) + supabaseUserId: string | null; + + // Audit and tracking fields (added in migration 1766300000004) + @Column({ name: 'last_login_at', type: 'timestamptz', nullable: true }) + lastLoginAt: Date | null; + + @Column({ + name: 'timezone', + type: 'varchar', + length: 50, + nullable: true, + default: 'UTC', + }) + timezone: string | null; + + @Column({ + name: 'locale', + type: 'varchar', + length: 10, + nullable: true, + default: 'zh-CN', + }) + locale: string | null; + + @Column({ name: 'login_count', type: 'integer', default: 0 }) + loginCount: number; + + @CreateDateColumn({ name: 'created_at', type: 'timestamptz' }) createdAt: Date; - @UpdateDateColumn({ name: 'updated_at' }) + @UpdateDateColumn({ name: 'updated_at', type: 'timestamptz' }) updatedAt: Date; + // Soft delete support (added in migration 1766300000005) + @DeleteDateColumn({ name: 'deleted_at', type: 'timestamptz', nullable: true }) + @Index('idx_user_profiles_deleted_at', { where: 'deleted_at IS NULL' }) + deletedAt: Date | null; + @OneToMany(() => UserIdentity, (identity) => identity.userProfile) identities: UserIdentity[]; } diff --git a/meteor-web-backend/src/entities/user-subscription.entity.ts b/meteor-web-backend/src/entities/user-subscription.entity.ts index b11e951..f04f600 100644 --- a/meteor-web-backend/src/entities/user-subscription.entity.ts +++ b/meteor-web-backend/src/entities/user-subscription.entity.ts @@ -1,25 +1,41 @@ -import { Entity, PrimaryGeneratedColumn, Column, ManyToOne, OneToMany, JoinColumn, CreateDateColumn, UpdateDateColumn } from 'typeorm'; +import { + Entity, + PrimaryGeneratedColumn, + Column, + ManyToOne, + OneToMany, + JoinColumn, + CreateDateColumn, + UpdateDateColumn, + DeleteDateColumn, + Index, +} from 'typeorm'; import { UserProfile } from './user-profile.entity'; import { SubscriptionPlan } from './subscription-plan.entity'; import { SubscriptionHistory } from './subscription-history.entity'; import { PaymentRecord } from './payment-record.entity'; @Entity('user_subscriptions') +@Index(['userProfileId']) +// Composite index idx_user_subscriptions_status_period created in migration 1766300000002 export class UserSubscription { - @PrimaryGeneratedColumn() - id: number; + // Changed from serial to UUID in migration 1766300000006-1766300000008 + @PrimaryGeneratedColumn('uuid') + id: string; @Column({ name: 'user_profile_id', type: 'uuid' }) userProfileId: string; - @ManyToOne(() => UserProfile) + @ManyToOne(() => UserProfile, { onDelete: 'CASCADE' }) @JoinColumn({ name: 'user_profile_id' }) userProfile: UserProfile; - @Column({ name: 'subscription_plan_id' }) - subscriptionPlanId: number; + // Changed from number to UUID in migration 1766300000007-1766300000008 + @Column({ name: 'subscription_plan_id', type: 'uuid' }) + @Index('idx_user_subscriptions_plan_id') + subscriptionPlanId: string; - @ManyToOne(() => SubscriptionPlan, plan => plan.userSubscriptions) + @ManyToOne(() => SubscriptionPlan, (plan) => plan.userSubscriptions, { onDelete: 'CASCADE' }) @JoinColumn({ name: 'subscription_plan_id' }) subscriptionPlan: SubscriptionPlan; @@ -33,6 +49,7 @@ export class UserSubscription { currentPeriodStart: Date; @Column({ name: 'current_period_end', type: 'timestamptz' }) + @Index('idx_user_subscriptions_period_end') currentPeriodEnd: Date; @Column({ name: 'cancel_at_period_end', default: false }) @@ -47,15 +64,20 @@ export class UserSubscription { @Column({ name: 'trial_end', type: 'timestamptz', nullable: true }) trialEnd?: Date; - @OneToMany(() => SubscriptionHistory, history => history.userSubscription) + @OneToMany(() => SubscriptionHistory, (history) => history.userSubscription) subscriptionHistory: SubscriptionHistory[]; - @OneToMany(() => PaymentRecord, payment => payment.userSubscription) + @OneToMany(() => PaymentRecord, (payment) => payment.userSubscription) paymentRecords: PaymentRecord[]; - @CreateDateColumn({ name: 'created_at' }) + @CreateDateColumn({ name: 'created_at', type: 'timestamptz' }) createdAt: Date; - @UpdateDateColumn({ name: 'updated_at' }) + @UpdateDateColumn({ name: 'updated_at', type: 'timestamptz' }) updatedAt: Date; -} \ No newline at end of file + + // Soft delete support (added in migration 1766300000005) + @DeleteDateColumn({ name: 'deleted_at', type: 'timestamptz', nullable: true }) + @Index('idx_user_subscriptions_deleted_at', { where: 'deleted_at IS NULL' }) + deletedAt?: Date; +} diff --git a/meteor-web-backend/src/entities/validated-event.entity.ts b/meteor-web-backend/src/entities/validated-event.entity.ts index 5689e2a..969a4c1 100644 --- a/meteor-web-backend/src/entities/validated-event.entity.ts +++ b/meteor-web-backend/src/entities/validated-event.entity.ts @@ -4,6 +4,7 @@ import { Column, CreateDateColumn, UpdateDateColumn, + DeleteDateColumn, ManyToOne, JoinColumn, Index, @@ -11,6 +12,7 @@ import { import { RawEvent } from './raw-event.entity'; import { Device } from './device.entity'; import { UserProfile } from './user-profile.entity'; +import { WeatherStation } from './weather-station.entity'; @Entity('validated_events') @Index(['userProfileId', 'capturedAt']) @@ -33,6 +35,7 @@ export class ValidatedEvent { userProfileId: string; @Column({ name: 'media_url' }) + // Hash index created in migration 1766300000002 mediaUrl: string; @Column({ name: 'file_size', type: 'bigint', nullable: true }) @@ -55,6 +58,7 @@ export class ValidatedEvent { @Column({ type: 'jsonb', nullable: true }) metadata?: Record; + // validation_score with CHECK constraint 0-1 (added in migration 1766300000003) @Column({ name: 'validation_score', type: 'decimal', @@ -74,30 +78,44 @@ export class ValidatedEvent { @Column({ name: 'validation_algorithm', length: 50, nullable: true }) validationAlgorithm?: string; - // 新增分析字段 + // Analysis fields @Column({ name: 'weather_condition', length: 50, nullable: true }) weatherCondition?: string; + // Legacy station name (kept for historical reference) @Column({ name: 'station_name', length: 100, nullable: true }) @Index() stationName?: string; + // FK to weather_stations (added in migration 1766300000010) + @Column({ name: 'weather_station_id', type: 'uuid', nullable: true }) + @Index('idx_validated_events_weather_station_id', { where: 'weather_station_id IS NOT NULL' }) + weatherStationId?: string; + + @ManyToOne(() => WeatherStation, { nullable: true, onDelete: 'SET NULL' }) + @JoinColumn({ name: 'weather_station_id' }) + weatherStation?: WeatherStation; + @Column({ length: 50, nullable: true }) @Index() classification?: string; + // duration with CHECK constraint >= 0 (added in migration 1766300000003) @Column({ type: 'decimal', precision: 5, scale: 2, nullable: true }) duration?: string; @Column({ length: 20, nullable: true }) direction?: string; + // azimuth with CHECK constraint 0-360 (added in migration 1766300000003) @Column({ type: 'integer', nullable: true }) azimuth?: number; + // altitude with CHECK constraint -90 to 90 (added in migration 1766300000003) @Column({ type: 'integer', nullable: true }) altitude?: number; + // velocity with CHECK constraint >= 0 (added in migration 1766300000003) @Column({ type: 'decimal', precision: 8, scale: 2, nullable: true }) velocity?: string; // km/s @@ -108,21 +126,33 @@ export class ValidatedEvent { @Column({ name: 'peak_magnitude', type: 'decimal', precision: 4, scale: 2, nullable: true }) peakMagnitude?: string; - @CreateDateColumn({ name: 'created_at' }) + // Audit fields (added in migration 1766300000014) + @Column({ name: 'created_by', type: 'uuid', nullable: true }) + createdBy?: string; + + @Column({ name: 'updated_by', type: 'uuid', nullable: true }) + updatedBy?: string; + + @CreateDateColumn({ name: 'created_at', type: 'timestamptz' }) createdAt: Date; - @UpdateDateColumn({ name: 'updated_at' }) + @UpdateDateColumn({ name: 'updated_at', type: 'timestamptz' }) updatedAt: Date; - @ManyToOne(() => RawEvent) + // Soft delete support (added in migration 1766300000005) + @DeleteDateColumn({ name: 'deleted_at', type: 'timestamptz', nullable: true }) + @Index('idx_validated_events_deleted_at', { where: 'deleted_at IS NULL' }) + deletedAt?: Date; + + @ManyToOne(() => RawEvent, { onDelete: 'CASCADE' }) @JoinColumn({ name: 'raw_event_id' }) rawEvent: RawEvent; - @ManyToOne(() => Device) + @ManyToOne(() => Device, { onDelete: 'CASCADE' }) @JoinColumn({ name: 'device_id' }) device: Device; - @ManyToOne(() => UserProfile) + @ManyToOne(() => UserProfile, { onDelete: 'CASCADE' }) @JoinColumn({ name: 'user_profile_id' }) userProfile: UserProfile; } diff --git a/meteor-web-backend/src/entities/weather-forecast.entity.ts b/meteor-web-backend/src/entities/weather-forecast.entity.ts index d2fb6fe..7f35718 100644 --- a/meteor-web-backend/src/entities/weather-forecast.entity.ts +++ b/meteor-web-backend/src/entities/weather-forecast.entity.ts @@ -1,21 +1,35 @@ -import { Entity, PrimaryGeneratedColumn, Column, CreateDateColumn, ManyToOne, JoinColumn } from 'typeorm'; +import { + Entity, + PrimaryGeneratedColumn, + Column, + CreateDateColumn, + DeleteDateColumn, + ManyToOne, + JoinColumn, + Index, +} from 'typeorm'; import { WeatherStation } from './weather-station.entity'; @Entity('weather_forecasts') export class WeatherForecast { - @PrimaryGeneratedColumn() - id: number; + // Changed from serial to UUID in migration 1766300000006-1766300000008 + @PrimaryGeneratedColumn('uuid') + id: string; - @Column({ name: 'weather_station_id' }) - weatherStationId: number; + // Changed from number to UUID in migration 1766300000007-1766300000008 + @Column({ name: 'station_id', type: 'uuid' }) + @Index('idx_weather_forecasts_station_id') + weatherStationId: string; @Column({ name: 'forecast_time', type: 'timestamp' }) + @Index('idx_weather_forecasts_time') forecastTime: Date; @Column({ type: 'decimal', precision: 4, scale: 1, nullable: true }) temperature?: number; - @Column({ name: 'cloud_cover', type: 'integer', nullable: true }) + // Changed to decimal in migration 1766211093224 + @Column({ name: 'cloud_cover', type: 'decimal', precision: 5, scale: 2, nullable: true }) cloudCover?: number; @Column({ type: 'decimal', precision: 5, scale: 1, nullable: true, default: 0 }) @@ -27,10 +41,15 @@ export class WeatherForecast { @Column({ length: 50, nullable: true }) condition?: string; - @CreateDateColumn({ name: 'created_at' }) + @CreateDateColumn({ name: 'created_at', type: 'timestamptz' }) createdAt: Date; - @ManyToOne(() => WeatherStation, station => station.forecasts, { onDelete: 'CASCADE' }) - @JoinColumn({ name: 'weather_station_id' }) + // Soft delete support (added in migration 1766300000005) + @DeleteDateColumn({ name: 'deleted_at', type: 'timestamptz', nullable: true }) + @Index('idx_weather_forecasts_deleted_at', { where: 'deleted_at IS NULL' }) + deletedAt?: Date; + + @ManyToOne(() => WeatherStation, (station) => station.forecasts, { onDelete: 'CASCADE' }) + @JoinColumn({ name: 'station_id' }) weatherStation: WeatherStation; -} \ No newline at end of file +} diff --git a/meteor-web-backend/src/entities/weather-observation.entity.ts b/meteor-web-backend/src/entities/weather-observation.entity.ts index 28a61fc..65b5250 100644 --- a/meteor-web-backend/src/entities/weather-observation.entity.ts +++ b/meteor-web-backend/src/entities/weather-observation.entity.ts @@ -1,19 +1,32 @@ -import { Entity, PrimaryGeneratedColumn, Column, ManyToOne, JoinColumn, CreateDateColumn } from 'typeorm'; +import { + Entity, + PrimaryGeneratedColumn, + Column, + ManyToOne, + JoinColumn, + CreateDateColumn, + DeleteDateColumn, + Index, +} from 'typeorm'; import { WeatherStation } from './weather-station.entity'; @Entity('weather_observations') export class WeatherObservation { - @PrimaryGeneratedColumn() - id: number; + // Changed from serial to UUID in migration 1766300000006-1766300000008 + @PrimaryGeneratedColumn('uuid') + id: string; - @Column({ name: 'weather_station_id' }) - weatherStationId: number; + // Changed from number to UUID in migration 1766300000007-1766300000008 + @Column({ name: 'weather_station_id', type: 'uuid' }) + @Index('idx_weather_observations_station_id') + weatherStationId: string; - @ManyToOne(() => WeatherStation) + @ManyToOne(() => WeatherStation, (station) => station.observations, { onDelete: 'CASCADE' }) @JoinColumn({ name: 'weather_station_id' }) weatherStation: WeatherStation; @Column({ name: 'observation_time', type: 'timestamptz' }) + @Index('idx_weather_observations_time') observationTime: Date; @Column({ type: 'decimal', precision: 5, scale: 2 }) @@ -46,6 +59,11 @@ export class WeatherObservation { @Column({ type: 'decimal', precision: 5, scale: 2 }) precipitation: number; - @CreateDateColumn({ name: 'created_at' }) + @CreateDateColumn({ name: 'created_at', type: 'timestamptz' }) createdAt: Date; -} \ No newline at end of file + + // Soft delete support (added in migration 1766300000005) + @DeleteDateColumn({ name: 'deleted_at', type: 'timestamptz', nullable: true }) + @Index('idx_weather_observations_deleted_at', { where: 'deleted_at IS NULL' }) + deletedAt?: Date; +} diff --git a/meteor-web-backend/src/entities/weather-station.entity.ts b/meteor-web-backend/src/entities/weather-station.entity.ts index 6dd0fd1..89e37aa 100644 --- a/meteor-web-backend/src/entities/weather-station.entity.ts +++ b/meteor-web-backend/src/entities/weather-station.entity.ts @@ -1,38 +1,84 @@ -import { Entity, PrimaryGeneratedColumn, Column, CreateDateColumn, UpdateDateColumn, OneToMany } from 'typeorm'; +import { + Entity, + PrimaryGeneratedColumn, + Column, + CreateDateColumn, + UpdateDateColumn, + DeleteDateColumn, + OneToMany, + Index, +} from 'typeorm'; import { WeatherForecast } from './weather-forecast.entity'; +import { WeatherObservation } from './weather-observation.entity'; @Entity('weather_stations') export class WeatherStation { - @PrimaryGeneratedColumn() - id: number; + // Changed from serial to UUID in migration 1766300000006-1766300000008 + @PrimaryGeneratedColumn('uuid') + id: string; - @Column({ name: 'station_name', unique: true }) + @Column({ name: 'station_name' }) + @Index('idx_weather_stations_name') stationName: string; - @Column() - location: string; - - @Column({ type: 'decimal', precision: 10, scale: 8, nullable: true }) + // Location coordinates (added in migration 1766300000001) + @Column({ type: 'decimal', precision: 10, scale: 7, nullable: true }) latitude?: number; - @Column({ type: 'decimal', precision: 11, scale: 8, nullable: true }) + @Column({ type: 'decimal', precision: 10, scale: 7, nullable: true }) longitude?: number; - @Column({ type: 'decimal', precision: 8, scale: 2, nullable: true }) - altitude?: number; + @Column({ type: 'decimal', precision: 6, scale: 1, nullable: true }) + elevation?: number; - @Column({ default: 'active' }) - status: 'active' | 'maintenance' | 'offline'; + @Column({ type: 'varchar', length: 50, nullable: true }) + timezone?: string; - // Note: Weather observations are stored in the weather_observations table - // The weather_stations table only contains station metadata + @Column({ name: 'current_temperature', type: 'decimal', precision: 4, scale: 1, nullable: true }) + currentTemperature?: number; - @CreateDateColumn({ name: 'created_at' }) + @Column({ type: 'integer', nullable: true }) + humidity?: number; + + @Column({ name: 'cloud_cover', type: 'integer', nullable: true }) + cloudCover?: number; + + @Column({ type: 'decimal', precision: 5, scale: 1, nullable: true }) + visibility?: number; + + @Column({ name: 'wind_speed', type: 'decimal', precision: 5, scale: 1, nullable: true }) + windSpeed?: number; + + @Column({ name: 'wind_direction', type: 'integer', nullable: true }) + windDirection?: number; + + @Column({ length: 50, nullable: true }) + condition?: string; + + @Column({ name: 'observation_quality', length: 20, nullable: true }) + observationQuality?: string; + + // Audit fields (added in migration 1766300000014) + @Column({ name: 'created_by', type: 'uuid', nullable: true }) + createdBy?: string; + + @Column({ name: 'updated_by', type: 'uuid', nullable: true }) + updatedBy?: string; + + @CreateDateColumn({ name: 'created_at', type: 'timestamptz' }) createdAt: Date; - @UpdateDateColumn({ name: 'updated_at' }) + @UpdateDateColumn({ name: 'updated_at', type: 'timestamptz' }) updatedAt: Date; - @OneToMany(() => WeatherForecast, forecast => forecast.weatherStation) + // Soft delete support (added in migration 1766300000005) + @DeleteDateColumn({ name: 'deleted_at', type: 'timestamptz', nullable: true }) + @Index('idx_weather_stations_deleted_at', { where: 'deleted_at IS NULL' }) + deletedAt?: Date; + + @OneToMany(() => WeatherForecast, (forecast) => forecast.weatherStation) forecasts: WeatherForecast[]; -} \ No newline at end of file + + @OneToMany(() => WeatherObservation, (observation) => observation.weatherStation) + observations: WeatherObservation[]; +} diff --git a/meteor-web-backend/src/events/events.controller.ts b/meteor-web-backend/src/events/events.controller.ts index b84d39c..83eaff8 100644 --- a/meteor-web-backend/src/events/events.controller.ts +++ b/meteor-web-backend/src/events/events.controller.ts @@ -195,11 +195,12 @@ export class EventsController { status: string; checks: { database: boolean; - aws: boolean; + storage: boolean; + queue: boolean; }; }> { const checks = await this.eventsService.healthCheck(); - const isHealthy = checks.database && checks.aws; + const isHealthy = checks.database && checks.storage && checks.queue; return { status: isHealthy ? 'healthy' : 'unhealthy', diff --git a/meteor-web-backend/src/events/events.module.ts b/meteor-web-backend/src/events/events.module.ts index 4d1caa4..613784f 100644 --- a/meteor-web-backend/src/events/events.module.ts +++ b/meteor-web-backend/src/events/events.module.ts @@ -2,20 +2,23 @@ import { Module, forwardRef } from '@nestjs/common'; import { TypeOrmModule } from '@nestjs/typeorm'; import { EventsController } from './events.controller'; import { EventsService } from './events.service'; -import { AwsService } from '../aws/aws.service'; import { RawEvent } from '../entities/raw-event.entity'; import { Device } from '../entities/device.entity'; import { ValidatedEvent } from '../entities/validated-event.entity'; import { UserProfile } from '../entities/user-profile.entity'; import { PaymentsModule } from '../payments/payments.module'; +import { StorageModule } from '../storage/storage.module'; +import { QueueModule } from '../queue/queue.module'; @Module({ imports: [ TypeOrmModule.forFeature([RawEvent, Device, ValidatedEvent, UserProfile]), forwardRef(() => PaymentsModule), + StorageModule, + QueueModule, ], controllers: [EventsController], - providers: [EventsService, AwsService], - exports: [EventsService, AwsService], + providers: [EventsService], + exports: [EventsService], }) export class EventsModule {} diff --git a/meteor-web-backend/src/events/events.service.ts b/meteor-web-backend/src/events/events.service.ts index 3a28103..5c72d35 100644 --- a/meteor-web-backend/src/events/events.service.ts +++ b/meteor-web-backend/src/events/events.service.ts @@ -10,7 +10,8 @@ import { Repository, DataSource } from 'typeorm'; import { RawEvent, ProcessingStatus } from '../entities/raw-event.entity'; import { Device } from '../entities/device.entity'; import { ValidatedEvent } from '../entities/validated-event.entity'; -import { AwsService } from '../aws/aws.service'; +import { StorageService } from '../storage/storage.service'; +import { QueueService } from '../queue/queue.service'; import { UploadEventDto } from './dto/upload-event.dto'; import { EventDto } from './dto/validated-events-response.dto'; @@ -32,7 +33,8 @@ export class EventsService { private readonly deviceRepository: Repository, @InjectRepository(ValidatedEvent) private readonly validatedEventRepository: Repository, - private readonly awsService: AwsService, + private readonly storageService: StorageService, + private readonly queueService: QueueService, private readonly dataSource: DataSource, ) {} @@ -90,8 +92,8 @@ export class EventsService { this.logger.log(`Device verified: ${device.hardwareId}`); - // 2. Upload file to S3 - const filePath = await this.awsService.uploadFile({ + // 2. Upload file to Supabase Storage + const filePath = await this.storageService.uploadFile({ buffer: file.buffer, originalFilename: file.originalname, mimeType: file.mimetype, @@ -116,25 +118,25 @@ export class EventsService { const savedEvent = await manager.save(RawEvent, rawEvent); this.logger.log(`Raw event created with ID: ${savedEvent.id}`); - // 4. Send message to SQS queue for processing + // 4. Send message to processing queue (PostgreSQL NOTIFY) try { - const messageId = await this.awsService.sendProcessingMessage({ + const messageId = await this.queueService.sendProcessingMessage({ rawEventId: savedEvent.id, deviceId, userProfileId, eventType: uploadEventDto.eventType, }); - // Update the raw event with SQS message ID + // Update the raw event with queue message ID await manager.update( RawEvent, { id: savedEvent.id }, - { sqsMessageId: messageId }, + { sqsMessageId: messageId }, // Keeping same field name for backward compatibility ); - this.logger.log(`SQS message sent with ID: ${messageId}`); - } catch (sqsError) { - this.logger.error(`Failed to send SQS message: ${sqsError.message}`); + this.logger.log(`Queue message sent with ID: ${messageId}`); + } catch (queueError) { + this.logger.error(`Failed to send queue message: ${queueError.message}`); // Mark as failed but don't throw - we still want to return success for the upload await manager.update( RawEvent, @@ -302,8 +304,8 @@ export class EventsService { /** * Health check for the events service */ - async healthCheck(): Promise<{ database: boolean; aws: boolean }> { - const results = { database: false, aws: false }; + async healthCheck(): Promise<{ database: boolean; storage: boolean; queue: boolean }> { + const results = { database: false, storage: false, queue: false }; try { // Test database connectivity @@ -314,11 +316,19 @@ export class EventsService { } try { - // Test AWS services - const awsHealth = await this.awsService.healthCheck(); - results.aws = awsHealth.s3 && awsHealth.sqs; + // Test storage service + const storageHealth = await this.storageService.healthCheck(); + results.storage = storageHealth.connected; } catch (error) { - this.logger.warn(`AWS health check failed: ${error.message}`); + this.logger.warn(`Storage health check failed: ${error.message}`); + } + + try { + // Test queue service + const queueHealth = await this.queueService.healthCheck(); + results.queue = queueHealth.connected; + } catch (error) { + this.logger.warn(`Queue health check failed: ${error.message}`); } return results; diff --git a/meteor-web-backend/src/queue/queue.module.ts b/meteor-web-backend/src/queue/queue.module.ts new file mode 100644 index 0000000..20fc8fc --- /dev/null +++ b/meteor-web-backend/src/queue/queue.module.ts @@ -0,0 +1,8 @@ +import { Module } from '@nestjs/common'; +import { QueueService } from './queue.service'; + +@Module({ + providers: [QueueService], + exports: [QueueService], +}) +export class QueueModule {} diff --git a/meteor-web-backend/src/queue/queue.service.ts b/meteor-web-backend/src/queue/queue.service.ts new file mode 100644 index 0000000..acff026 --- /dev/null +++ b/meteor-web-backend/src/queue/queue.service.ts @@ -0,0 +1,154 @@ +import { Injectable, Logger, OnModuleDestroy } from '@nestjs/common'; +import { DataSource } from 'typeorm'; +import { v4 as uuidv4 } from 'uuid'; + +export interface QueueMessageParams { + rawEventId: string; + deviceId: string; + userProfileId: string; + eventType: string; +} + +export interface QueueMessage { + id: string; + rawEventId: string; + deviceId: string; + userProfileId: string; + eventType: string; + timestamp: string; +} + +type MessageHandler = (message: QueueMessage) => Promise; + +@Injectable() +export class QueueService implements OnModuleDestroy { + private readonly logger = new Logger(QueueService.name); + private readonly channelName = 'meteor_event_processing'; + private messageHandlers: Map = new Map(); + private isListening = false; + + constructor(private readonly dataSource: DataSource) { + this.logger.log('Queue Service initialized using PostgreSQL NOTIFY/LISTEN'); + } + + /** + * Send a message to the processing queue using PostgreSQL NOTIFY + */ + async sendProcessingMessage(params: QueueMessageParams): Promise { + const { rawEventId, deviceId, userProfileId, eventType } = params; + const messageId = uuidv4(); + + const message: QueueMessage = { + id: messageId, + rawEventId, + deviceId, + userProfileId, + eventType, + timestamp: new Date().toISOString(), + }; + + try { + this.logger.log(`Sending message to queue for event: ${rawEventId}`); + + // Use NOTIFY to send the message + const payload = JSON.stringify(message); + await this.dataSource.query( + `SELECT pg_notify($1, $2)`, + [this.channelName, payload], + ); + + this.logger.log(`Successfully sent message to queue: ${messageId}`); + return messageId; + } catch (error) { + this.logger.error( + `Failed to send message to queue: ${error.message}`, + error.stack, + ); + throw new Error(`Queue send failed: ${error.message}`); + } + } + + /** + * Register a handler for incoming messages + */ + registerHandler(handlerId: string, handler: MessageHandler): void { + this.messageHandlers.set(handlerId, handler); + this.logger.log(`Registered message handler: ${handlerId}`); + } + + /** + * Unregister a handler + */ + unregisterHandler(handlerId: string): void { + this.messageHandlers.delete(handlerId); + this.logger.log(`Unregistered message handler: ${handlerId}`); + } + + /** + * Start listening for messages (for workers/processors) + * Note: This is typically used by the compute service, not the web backend + */ + async startListening(): Promise { + if (this.isListening) { + this.logger.warn('Already listening for messages'); + return; + } + + try { + // Get a raw connection for LISTEN + const queryRunner = this.dataSource.createQueryRunner(); + await queryRunner.connect(); + + // Execute LISTEN command + await queryRunner.query(`LISTEN ${this.channelName}`); + this.isListening = true; + this.logger.log(`Started listening on channel: ${this.channelName}`); + + // Note: TypeORM doesn't directly support pg_notify event handling + // For a full implementation, you would need to use the raw pg driver + // and set up event listeners. This is a simplified version. + // + // In production, the Go compute service would: + // 1. Connect to PostgreSQL using pgx + // 2. Execute LISTEN meteor_event_processing + // 3. Process incoming notifications + } catch (error) { + this.logger.error(`Failed to start listening: ${error.message}`); + throw error; + } + } + + /** + * Stop listening for messages + */ + async stopListening(): Promise { + if (!this.isListening) { + return; + } + + try { + await this.dataSource.query(`UNLISTEN ${this.channelName}`); + this.isListening = false; + this.logger.log('Stopped listening for messages'); + } catch (error) { + this.logger.error(`Failed to stop listening: ${error.message}`); + } + } + + /** + * Health check for queue service + */ + async healthCheck(): Promise<{ connected: boolean; error?: string }> { + try { + // Test that we can send a NOTIFY command + await this.dataSource.query('SELECT 1'); + return { connected: true }; + } catch (error) { + return { connected: false, error: String(error) }; + } + } + + async onModuleDestroy() { + await this.stopListening(); + } +} diff --git a/meteor-web-backend/src/realtime/realtime.module.ts b/meteor-web-backend/src/realtime/realtime.module.ts new file mode 100644 index 0000000..068f311 --- /dev/null +++ b/meteor-web-backend/src/realtime/realtime.module.ts @@ -0,0 +1,10 @@ +import { Module } from '@nestjs/common'; +import { RealtimeService } from './realtime.service'; +import { SupabaseModule } from '../supabase/supabase.module'; + +@Module({ + imports: [SupabaseModule], + providers: [RealtimeService], + exports: [RealtimeService], +}) +export class RealtimeModule {} diff --git a/meteor-web-backend/src/realtime/realtime.service.ts b/meteor-web-backend/src/realtime/realtime.service.ts new file mode 100644 index 0000000..e1035b8 --- /dev/null +++ b/meteor-web-backend/src/realtime/realtime.service.ts @@ -0,0 +1,243 @@ +import { Injectable, Logger, OnModuleDestroy } from '@nestjs/common'; +import { SupabaseService } from '../supabase/supabase.service'; +import { RealtimeChannel } from '@supabase/supabase-js'; + +export interface BroadcastMessage { + event: string; + payload: any; +} + +@Injectable() +export class RealtimeService implements OnModuleDestroy { + private readonly logger = new Logger(RealtimeService.name); + private channels: Map = new Map(); + + constructor(private readonly supabaseService: SupabaseService) { + this.logger.log('Realtime Service initialized using Supabase Realtime'); + } + + /** + * Broadcast a message to a specific channel + * Useful for server-initiated broadcasts + */ + async broadcast(channelName: string, message: BroadcastMessage): Promise { + try { + const supabase = this.supabaseService.getAdminClient(); + + // Get or create channel + let channel = this.channels.get(channelName); + if (!channel) { + channel = supabase.channel(channelName); + this.channels.set(channelName, channel); + await channel.subscribe(); + } + + // Send broadcast + await channel.send({ + type: 'broadcast', + event: message.event, + payload: message.payload, + }); + + this.logger.debug(`Broadcasted ${message.event} to channel ${channelName}`); + } catch (error) { + this.logger.error(`Failed to broadcast message: ${error.message}`); + throw error; + } + } + + /** + * Broadcast device status update to users watching this device + */ + async broadcastDeviceStatus( + userId: string, + deviceId: string, + status: 'online' | 'offline', + ): Promise { + await this.broadcast(`user:${userId}`, { + event: 'device-status-change', + payload: { + deviceId, + status, + timestamp: new Date().toISOString(), + }, + }); + } + + /** + * Broadcast device heartbeat data to users + */ + async broadcastDeviceHeartbeat( + userId: string, + deviceId: string, + heartbeatData: any, + ): Promise { + await this.broadcast(`user:${userId}`, { + event: 'device-heartbeat', + payload: { + deviceId, + data: heartbeatData, + timestamp: new Date().toISOString(), + }, + }); + } + + /** + * Broadcast device alert to users + */ + async broadcastDeviceAlert( + userId: string, + deviceId: string, + alertType: string, + message: string, + ): Promise { + await this.broadcast(`user:${userId}`, { + event: 'device-alert', + payload: { + deviceId, + type: alertType, + message, + timestamp: new Date().toISOString(), + }, + }); + } + + /** + * Broadcast registration status update + */ + async broadcastRegistrationUpdate( + claimId: string, + status: string, + progress: number, + error?: string, + ): Promise { + await this.broadcast('registrations', { + event: 'registration-update', + payload: { + claim_id: claimId, + status, + progress, + error, + timestamp: new Date().toISOString(), + }, + }); + } + + /** + * Broadcast new event notification to user + */ + async broadcastNewEvent(userId: string, eventData: any): Promise { + await this.broadcast(`user:${userId}`, { + event: 'new-event', + payload: { + ...eventData, + timestamp: new Date().toISOString(), + }, + }); + } + + /** + * Send command to device via realtime channel + */ + async sendDeviceCommand( + deviceId: string, + command: string, + parameters?: any, + ): Promise { + const commandId = `cmd_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`; + + await this.broadcast(`device:${deviceId}`, { + event: 'device-command', + payload: { + commandId, + command, + parameters, + timestamp: new Date().toISOString(), + }, + }); + + return commandId; + } + + /** + * Subscribe to postgres changes for a table + * Note: This is mainly for reference - frontend clients should use Supabase JS client directly + */ + async subscribeToTableChanges( + tableName: string, + callback: (payload: any) => void, + filter?: string, + ): Promise { + const supabase = this.supabaseService.getAdminClient(); + + const channelName = `postgres:${tableName}`; + let channel = this.channels.get(channelName); + + if (channel) { + await channel.unsubscribe(); + } + + channel = supabase + .channel(channelName) + .on( + 'postgres_changes', + { + event: '*', + schema: 'public', + table: tableName, + filter, + }, + (payload) => { + this.logger.debug(`Received postgres change for ${tableName}: ${payload.eventType}`); + callback(payload); + }, + ); + + await channel.subscribe(); + this.channels.set(channelName, channel); + + this.logger.log(`Subscribed to postgres changes for table: ${tableName}`); + return channel; + } + + /** + * Unsubscribe from a channel + */ + async unsubscribe(channelName: string): Promise { + const channel = this.channels.get(channelName); + if (channel) { + await channel.unsubscribe(); + this.channels.delete(channelName); + this.logger.log(`Unsubscribed from channel: ${channelName}`); + } + } + + /** + * Health check for realtime service + */ + async healthCheck(): Promise<{ connected: boolean; error?: string }> { + try { + const supabase = this.supabaseService.getAdminClient(); + const channel = supabase.channel('health-check'); + + await channel.subscribe((status) => { + if (status === 'SUBSCRIBED') { + channel.unsubscribe(); + } + }); + + return { connected: true }; + } catch (error) { + return { connected: false, error: String(error) }; + } + } + + async onModuleDestroy() { + // Cleanup all channels + for (const [name, channel] of this.channels) { + await channel.unsubscribe(); + this.logger.log(`Cleaned up channel: ${name}`); + } + this.channels.clear(); + } +} diff --git a/meteor-web-backend/src/storage/storage.module.ts b/meteor-web-backend/src/storage/storage.module.ts new file mode 100644 index 0000000..c89b4ed --- /dev/null +++ b/meteor-web-backend/src/storage/storage.module.ts @@ -0,0 +1,10 @@ +import { Module } from '@nestjs/common'; +import { StorageService } from './storage.service'; +import { SupabaseModule } from '../supabase/supabase.module'; + +@Module({ + imports: [SupabaseModule], + providers: [StorageService], + exports: [StorageService], +}) +export class StorageModule {} diff --git a/meteor-web-backend/src/storage/storage.service.ts b/meteor-web-backend/src/storage/storage.service.ts new file mode 100644 index 0000000..8ae9698 --- /dev/null +++ b/meteor-web-backend/src/storage/storage.service.ts @@ -0,0 +1,189 @@ +import { Injectable, Logger } from '@nestjs/common'; +import { SupabaseService } from '../supabase/supabase.service'; +import { v4 as uuidv4 } from 'uuid'; + +export interface UploadFileParams { + buffer: Buffer; + originalFilename: string; + mimeType: string; + deviceId: string; + eventType: string; +} + +export interface UploadResult { + filePath: string; + publicUrl: string; +} + +@Injectable() +export class StorageService { + private readonly logger = new Logger(StorageService.name); + private readonly bucketName: string; + + constructor(private readonly supabaseService: SupabaseService) { + this.bucketName = process.env.SUPABASE_STORAGE_BUCKET || 'meteor-events'; + this.logger.log(`Storage Service initialized with bucket: ${this.bucketName}`); + } + + /** + * Upload a file to Supabase Storage + */ + async uploadFile(params: UploadFileParams): Promise { + const { buffer, originalFilename, mimeType, deviceId, eventType } = params; + + // Generate a unique file path + const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); + const uniqueId = uuidv4(); + const fileExtension = this.getFileExtension(originalFilename); + const filePath = `events/${deviceId}/${eventType}/${timestamp}_${uniqueId}${fileExtension}`; + + try { + this.logger.log(`Uploading file to Supabase Storage: ${filePath}`); + + const supabase = this.supabaseService.getAdminClient(); + + const { data, error } = await supabase.storage + .from(this.bucketName) + .upload(filePath, buffer, { + contentType: mimeType, + upsert: false, + metadata: { + originalFilename, + deviceId, + eventType, + uploadTimestamp: timestamp, + }, + }); + + if (error) { + this.logger.error(`Supabase Storage upload error: ${error.message}`); + throw new Error(`Storage upload failed: ${error.message}`); + } + + this.logger.log(`Successfully uploaded file: ${data.path}`); + return data.path; + } catch (error) { + this.logger.error( + `Failed to upload file to Supabase Storage: ${error.message}`, + error.stack, + ); + throw new Error(`Storage upload failed: ${error.message}`); + } + } + + /** + * Get a signed URL for a file (for temporary access) + */ + async getSignedUrl(filePath: string, expiresIn: number = 3600): Promise { + try { + const supabase = this.supabaseService.getAdminClient(); + + const { data, error } = await supabase.storage + .from(this.bucketName) + .createSignedUrl(filePath, expiresIn); + + if (error) { + throw new Error(`Failed to create signed URL: ${error.message}`); + } + + return data.signedUrl; + } catch (error) { + this.logger.error(`Failed to get signed URL: ${error.message}`); + throw error; + } + } + + /** + * Get the public URL for a file (if bucket is public) + */ + getPublicUrl(filePath: string): string { + const supabase = this.supabaseService.getAdminClient(); + const { data } = supabase.storage + .from(this.bucketName) + .getPublicUrl(filePath); + return data.publicUrl; + } + + /** + * Delete a file from storage + */ + async deleteFile(filePath: string): Promise { + try { + const supabase = this.supabaseService.getAdminClient(); + + const { error } = await supabase.storage + .from(this.bucketName) + .remove([filePath]); + + if (error) { + throw new Error(`Failed to delete file: ${error.message}`); + } + + this.logger.log(`Successfully deleted file: ${filePath}`); + } catch (error) { + this.logger.error(`Failed to delete file: ${error.message}`); + throw error; + } + } + + /** + * List files in a directory + */ + async listFiles( + path: string, + options?: { limit?: number; offset?: number }, + ): Promise<{ name: string; id: string; created_at: string }[]> { + try { + const supabase = this.supabaseService.getAdminClient(); + + const { data, error } = await supabase.storage + .from(this.bucketName) + .list(path, { + limit: options?.limit || 100, + offset: options?.offset || 0, + }); + + if (error) { + throw new Error(`Failed to list files: ${error.message}`); + } + + return data || []; + } catch (error) { + this.logger.error(`Failed to list files: ${error.message}`); + throw error; + } + } + + /** + * Get file extension from filename + */ + private getFileExtension(filename: string): string { + const lastDotIndex = filename.lastIndexOf('.'); + if (lastDotIndex === -1) { + return ''; + } + return filename.substring(lastDotIndex); + } + + /** + * Health check for storage service + */ + async healthCheck(): Promise<{ connected: boolean; error?: string }> { + try { + const supabase = this.supabaseService.getAdminClient(); + + // Try to list files in the bucket root to verify connectivity + const { error } = await supabase.storage + .from(this.bucketName) + .list('', { limit: 1 }); + + if (error) { + return { connected: false, error: error.message }; + } + + return { connected: true }; + } catch (error) { + return { connected: false, error: String(error) }; + } + } +} diff --git a/meteor-web-backend/src/subscription/subscription.controller.ts b/meteor-web-backend/src/subscription/subscription.controller.ts index 0c4e523..59553a7 100644 --- a/meteor-web-backend/src/subscription/subscription.controller.ts +++ b/meteor-web-backend/src/subscription/subscription.controller.ts @@ -1,4 +1,4 @@ -import { Controller, Get, Post, Patch, Query, Param, Body, ParseIntPipe, UseGuards } from '@nestjs/common'; +import { Controller, Get, Post, Patch, Query, Param, Body, UseGuards } from '@nestjs/common'; import { SubscriptionService, SubscriptionQuery } from './subscription.service'; import { JwtAuthGuard } from '../auth/guards/jwt-auth.guard'; @@ -19,7 +19,7 @@ export class SubscriptionController { } @Get('plans/:id') - async getPlan(@Param('id', ParseIntPipe) id: number) { + async getPlan(@Param('id') id: string) { return await this.subscriptionService.getPlan(id); } @@ -50,7 +50,7 @@ export class SubscriptionController { @Patch(':id/status') async updateSubscriptionStatus( - @Param('id', ParseIntPipe) id: number, + @Param('id') id: string, @Body('status') status: string, @Body('metadata') metadata?: any, ) { @@ -59,19 +59,19 @@ export class SubscriptionController { // Subscription History @Get(':id/history') - async getSubscriptionHistory(@Param('id', ParseIntPipe) id: number) { + async getSubscriptionHistory(@Param('id') id: string) { return await this.subscriptionService.getSubscriptionHistory(id); } // Payment Records @Get(':id/payments') - async getPaymentRecords(@Param('id', ParseIntPipe) id: number) { + async getPaymentRecords(@Param('id') id: string) { return await this.subscriptionService.getPaymentRecords(id); } @Post(':id/payments') async createPaymentRecord( - @Param('id', ParseIntPipe) id: number, + @Param('id') id: string, @Body() paymentData: any, ) { return await this.subscriptionService.createPaymentRecord(id, paymentData); diff --git a/meteor-web-backend/src/subscription/subscription.service.ts b/meteor-web-backend/src/subscription/subscription.service.ts index 7517019..1bf089d 100644 --- a/meteor-web-backend/src/subscription/subscription.service.ts +++ b/meteor-web-backend/src/subscription/subscription.service.ts @@ -49,7 +49,7 @@ export class SubscriptionService { return plans; } - async getPlan(id: number) { + async getPlan(id: string) { const plan = await this.subscriptionPlanRepository.findOne({ where: { id }, }); @@ -152,7 +152,7 @@ export class SubscriptionService { return savedSubscription; } - async updateSubscriptionStatus(subscriptionId: number, status: string, metadata?: any) { + async updateSubscriptionStatus(subscriptionId: string, status: string, metadata?: any) { const subscription = await this.userSubscriptionRepository.findOne({ where: { id: subscriptionId }, }); @@ -179,7 +179,7 @@ export class SubscriptionService { // Subscription History async createSubscriptionHistory( - subscriptionId: number, + subscriptionId: string, action: string, oldStatus: string | null, newStatus: string | null, @@ -195,7 +195,7 @@ export class SubscriptionService { return await this.subscriptionHistoryRepository.save(history); } - async getSubscriptionHistory(subscriptionId: number) { + async getSubscriptionHistory(subscriptionId: string) { return await this.subscriptionHistoryRepository.find({ where: { userSubscriptionId: subscriptionId }, order: { createdAt: 'DESC' }, @@ -203,7 +203,7 @@ export class SubscriptionService { } // Payment Records - async createPaymentRecord(subscriptionId: number, paymentData: Partial) { + async createPaymentRecord(subscriptionId: string, paymentData: Partial) { const payment = this.paymentRecordRepository.create({ userSubscriptionId: subscriptionId, ...paymentData, @@ -212,7 +212,7 @@ export class SubscriptionService { return await this.paymentRecordRepository.save(payment); } - async getPaymentRecords(subscriptionId: number) { + async getPaymentRecords(subscriptionId: string) { return await this.paymentRecordRepository.find({ where: { userSubscriptionId: subscriptionId }, order: { createdAt: 'DESC' }, diff --git a/meteor-web-backend/src/supabase/index.ts b/meteor-web-backend/src/supabase/index.ts new file mode 100644 index 0000000..1b4bbcd --- /dev/null +++ b/meteor-web-backend/src/supabase/index.ts @@ -0,0 +1,2 @@ +export * from './supabase.module'; +export * from './supabase.service'; diff --git a/meteor-web-backend/src/supabase/supabase.module.ts b/meteor-web-backend/src/supabase/supabase.module.ts new file mode 100644 index 0000000..0c9abb6 --- /dev/null +++ b/meteor-web-backend/src/supabase/supabase.module.ts @@ -0,0 +1,9 @@ +import { Global, Module } from '@nestjs/common'; +import { SupabaseService } from './supabase.service'; + +@Global() +@Module({ + providers: [SupabaseService], + exports: [SupabaseService], +}) +export class SupabaseModule {} diff --git a/meteor-web-backend/src/supabase/supabase.service.ts b/meteor-web-backend/src/supabase/supabase.service.ts new file mode 100644 index 0000000..2fcdcdd --- /dev/null +++ b/meteor-web-backend/src/supabase/supabase.service.ts @@ -0,0 +1,101 @@ +import { Injectable, OnModuleInit } from '@nestjs/common'; +import { createClient, SupabaseClient } from '@supabase/supabase-js'; + +@Injectable() +export class SupabaseService implements OnModuleInit { + private supabase: SupabaseClient; + private supabaseAdmin: SupabaseClient; + + onModuleInit() { + const supabaseUrl = process.env.SUPABASE_URL; + const supabasePublishableKey = process.env.SUPABASE_PUBLISHABLE_KEY; + const supabaseSecretKey = process.env.SUPABASE_SECRET_KEY; + + if (!supabaseUrl) { + console.warn('SUPABASE_URL not configured - Supabase features disabled'); + return; + } + + // Client for public operations (uses publishable key) + if (supabasePublishableKey) { + this.supabase = createClient(supabaseUrl, supabasePublishableKey); + } + + // Admin client for server-side operations (uses secret key) + if (supabaseSecretKey) { + this.supabaseAdmin = createClient(supabaseUrl, supabaseSecretKey, { + auth: { + autoRefreshToken: false, + persistSession: false, + }, + }); + } + + console.log('Supabase clients initialized'); + } + + /** + * Get the public Supabase client + */ + getClient(): SupabaseClient { + if (!this.supabase) { + throw new Error('Supabase client not initialized'); + } + return this.supabase; + } + + /** + * Get the admin Supabase client (for server-side operations) + */ + getAdminClient(): SupabaseClient { + if (!this.supabaseAdmin) { + throw new Error('Supabase admin client not initialized'); + } + return this.supabaseAdmin; + } + + /** + * Verify a Supabase JWT token + */ + async verifyToken(token: string) { + if (!this.supabaseAdmin) { + throw new Error('Supabase admin client not initialized'); + } + + const { + data: { user }, + error, + } = await this.supabaseAdmin.auth.getUser(token); + + if (error) { + return { user: null, error }; + } + + return { user, error: null }; + } + + /** + * Health check for Supabase connection + */ + async healthCheck(): Promise<{ connected: boolean; error?: string }> { + try { + if (!this.supabaseAdmin) { + return { connected: false, error: 'Supabase not configured' }; + } + + // Simple query to check connection + const { error } = await this.supabaseAdmin + .from('user_profiles') + .select('id') + .limit(1); + + if (error) { + return { connected: false, error: error.message }; + } + + return { connected: true }; + } catch (err) { + return { connected: false, error: String(err) }; + } + } +} diff --git a/meteor-web-backend/src/weather/weather.controller.ts b/meteor-web-backend/src/weather/weather.controller.ts index 8370292..667d39c 100644 --- a/meteor-web-backend/src/weather/weather.controller.ts +++ b/meteor-web-backend/src/weather/weather.controller.ts @@ -1,4 +1,4 @@ -import { Controller, Get, Param, Query, UseGuards, NotFoundException, ParseIntPipe } from '@nestjs/common'; +import { Controller, Get, Param, Query, UseGuards, NotFoundException } from '@nestjs/common'; import { WeatherService, WeatherSummary, WeatherQuery } from './weather.service'; import { WeatherStation } from '../entities/weather-station.entity'; import { JwtAuthGuard } from '../auth/guards/jwt-auth.guard'; @@ -55,13 +55,13 @@ export class WeatherController { } @Get('stations/:id') - async getStationById(@Param('id', ParseIntPipe) id: number) { + async getStationById(@Param('id') id: string) { return await this.weatherService.getStation(id); } @Get('stations/:id/forecasts') async getStationForecasts( - @Param('id', ParseIntPipe) id: number, + @Param('id') id: string, @Query('days') days?: string, ) { const forecastDays = days ? parseInt(days) : 7; diff --git a/meteor-web-backend/src/weather/weather.service.ts b/meteor-web-backend/src/weather/weather.service.ts index ca370ef..14f2bc3 100644 --- a/meteor-web-backend/src/weather/weather.service.ts +++ b/meteor-web-backend/src/weather/weather.service.ts @@ -215,7 +215,7 @@ export class WeatherService { } } - async updateWeatherData(stationId: number, weatherData: Partial): Promise { + async updateWeatherData(stationId: string, weatherData: Partial): Promise { try { await this.weatherStationRepository.update(stationId, weatherData); const updatedStation = await this.weatherStationRepository.findOne({ @@ -248,7 +248,7 @@ export class WeatherService { }; if (location) { - options.where = { location }; + options.where = { stationName: location }; } const [stations, total] = await this.weatherStationRepository.findAndCount(options); @@ -264,7 +264,7 @@ export class WeatherService { }; } - async getStation(id: number) { + async getStation(id: string) { const station = await this.weatherStationRepository.findOne({ where: { id }, }); @@ -289,7 +289,7 @@ export class WeatherService { .take(limit); if (location) { - queryBuilder.andWhere('station.location = :location', { location }); + queryBuilder.andWhere('station.stationName = :location', { location }); } if (startDate && endDate) { @@ -337,7 +337,7 @@ export class WeatherService { .take(limit); if (location) { - queryBuilder.andWhere('station.location = :location', { location }); + queryBuilder.andWhere('station.stationName = :location', { location }); } const [forecasts, total] = await queryBuilder.getManyAndCount(); @@ -353,7 +353,7 @@ export class WeatherService { }; } - async getForecastsByStation(stationId: number, days: number = 7) { + async getForecastsByStation(stationId: string, days: number = 7) { const endDate = new Date(); endDate.setDate(endDate.getDate() + days); @@ -374,9 +374,8 @@ export class WeatherService { // Enhanced Weather Statistics async getWeatherStats() { const totalStations = await this.weatherStationRepository.count(); - const activeStations = await this.weatherStationRepository.count({ - where: { status: 'active' }, - }); + // All stations are considered active (no status column in DB) + const activeStations = totalStations; const avgTemperature = await this.weatherObservationRepository .createQueryBuilder('obs') diff --git a/package-lock.json b/package-lock.json index 07325ba..ef56261 100644 --- a/package-lock.json +++ b/package-lock.json @@ -25,6 +25,7 @@ "@playwright/test": "^1.54.1", "@radix-ui/react-label": "^2.1.7", "@radix-ui/react-slot": "^1.2.3", + "@supabase/supabase-js": "^2.89.0", "@tanstack/react-query": "^5.83.0", "@types/qrcode": "^1.5.5", "class-variance-authority": "^0.7.1", @@ -8026,6 +8027,7 @@ "@nestjs/throttler": "^6.4.0", "@nestjs/typeorm": "^11.0.0", "@nestjs/websockets": "^11.1.6", + "@supabase/supabase-js": "^2.89.0", "@types/bcrypt": "^6.0.0", "@types/node-forge": "^1.3.13", "@types/passport-jwt": "^4.0.1", @@ -19300,6 +19302,107 @@ "integrity": "sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g==", "license": "MIT" }, + "node_modules/@supabase/auth-js": { + "version": "2.89.0", + "resolved": "https://registry.npmjs.org/@supabase/auth-js/-/auth-js-2.89.0.tgz", + "integrity": "sha512-wiWZdz8WMad8LQdJMWYDZ2SJtZP5MwMqzQq3ehtW2ngiI3UTgbKiFrvMUUS3KADiVlk4LiGfODB2mrYx7w2f8w==", + "license": "MIT", + "dependencies": { + "tslib": "2.8.1" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@supabase/functions-js": { + "version": "2.89.0", + "resolved": "https://registry.npmjs.org/@supabase/functions-js/-/functions-js-2.89.0.tgz", + "integrity": "sha512-XEueaC5gMe5NufNYfBh9kPwJlP5M2f+Ogr8rvhmRDAZNHgY6mI35RCkYDijd92pMcNM7g8pUUJov93UGUnqfyw==", + "license": "MIT", + "dependencies": { + "tslib": "2.8.1" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@supabase/postgrest-js": { + "version": "2.89.0", + "resolved": "https://registry.npmjs.org/@supabase/postgrest-js/-/postgrest-js-2.89.0.tgz", + "integrity": "sha512-/b0fKrxV9i7RNOEXMno/I1862RsYhuUo+Q6m6z3ar1f4ulTMXnDfv0y4YYxK2POcgrOXQOgKYQx1eArybyNvtg==", + "license": "MIT", + "dependencies": { + "tslib": "2.8.1" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@supabase/realtime-js": { + "version": "2.89.0", + "resolved": "https://registry.npmjs.org/@supabase/realtime-js/-/realtime-js-2.89.0.tgz", + "integrity": "sha512-aMOvfDb2a52u6PX6jrrjvACHXGV3zsOlWRzZsTIOAJa0hOVvRp01AwC1+nLTGUzxzezejrYeCX+KnnM1xHdl+w==", + "license": "MIT", + "dependencies": { + "@types/phoenix": "^1.6.6", + "@types/ws": "^8.18.1", + "tslib": "2.8.1", + "ws": "^8.18.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@supabase/realtime-js/node_modules/ws": { + "version": "8.18.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz", + "integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==", + "license": "MIT", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/@supabase/storage-js": { + "version": "2.89.0", + "resolved": "https://registry.npmjs.org/@supabase/storage-js/-/storage-js-2.89.0.tgz", + "integrity": "sha512-6zKcXofk/M/4Eato7iqpRh+B+vnxeiTumCIP+Tz26xEqIiywzD9JxHq+udRrDuv6hXE+pmetvJd8n5wcf4MFRQ==", + "license": "MIT", + "dependencies": { + "iceberg-js": "^0.8.1", + "tslib": "2.8.1" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@supabase/supabase-js": { + "version": "2.89.0", + "resolved": "https://registry.npmjs.org/@supabase/supabase-js/-/supabase-js-2.89.0.tgz", + "integrity": "sha512-KlaRwSfFA0fD73PYVMHj5/iXFtQGCcX7PSx0FdQwYEEw9b2wqM7GxadY+5YwcmuEhalmjFB/YvqaoNVF+sWUlg==", + "license": "MIT", + "dependencies": { + "@supabase/auth-js": "2.89.0", + "@supabase/functions-js": "2.89.0", + "@supabase/postgrest-js": "2.89.0", + "@supabase/realtime-js": "2.89.0", + "@supabase/storage-js": "2.89.0" + }, + "engines": { + "node": ">=20.0.0" + } + }, "node_modules/@tokenizer/inflate": { "version": "0.2.7", "resolved": "https://registry.npmjs.org/@tokenizer/inflate/-/inflate-0.2.7.tgz", @@ -19420,6 +19523,12 @@ "@types/node": "*" } }, + "node_modules/@types/phoenix": { + "version": "1.6.7", + "resolved": "https://registry.npmjs.org/@types/phoenix/-/phoenix-1.6.7.tgz", + "integrity": "sha512-oN9ive//QSBkf19rfDv45M7eZPi0eEXylht2OLEXicu5b4KoQ1OzXIw+xDSGWxSxe1JmepRR/ZH283vsu518/Q==", + "license": "MIT" + }, "node_modules/@types/qrcode": { "version": "1.5.5", "resolved": "https://registry.npmjs.org/@types/qrcode/-/qrcode-1.5.5.tgz", @@ -19441,6 +19550,15 @@ "integrity": "sha512-y7pa/oEJJ4iGYBxOpfAKn5b9+xuihvzDVnC/OSvlVnGxVg0pOqmjiMafiJ1KVNQEaPZf9HsEp5icEwGg8uIe5Q==", "license": "MIT" }, + "node_modules/@types/ws": { + "version": "8.18.1", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz", + "integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==", + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/accepts": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", @@ -20569,6 +20687,15 @@ "node": ">= 0.8" } }, + "node_modules/iceberg-js": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/iceberg-js/-/iceberg-js-0.8.1.tgz", + "integrity": "sha512-1dhVQZXhcHje7798IVM+xoo/1ZdVfzOMIc8/rgVSijRK38EDqOJoGula9N/8ZI5RD8QTxNQtK/Gozpr+qUqRRA==", + "license": "MIT", + "engines": { + "node": ">=20.0.0" + } + }, "node_modules/iconv-lite": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz",