From 1a1afce7b3ade8e8a4ba2d51f4666a9a780ff774 Mon Sep 17 00:00:00 2001 From: dev-guy Date: Sun, 6 Jul 2025 19:37:29 -0700 Subject: [PATCH 1/2] Misc --- README.md | 253 +++++++++++++----- config/dev.exs | 4 +- lib/geo/application.ex | 13 +- lib/geo/geography/country/cache.ex | 45 ++-- lib/geo/geography/country/cache/server.ex | 185 +++++++------ lib/mix/tasks/{geo.restart.ex => restart.ex} | 69 +++++ lib/mix/tasks/{geo.seed.ex => seed.ex} | 0 lib/mix/tasks/{geo.stop.ex => stop.ex} | 0 log_trimmer_1751842644438.exs | 42 +++ log_trimmer_1751843245279.exs | 42 +++ log_trimmer_1751843394757.exs | 42 +++ log_trimmer_1751843473592.exs | 42 +++ log_trimmer_1751854827289.exs | 42 +++ log_trimmer_1751854856848.exs | 42 +++ log_trimmer_1751854959655.exs | 42 +++ log_trimmer_1751855026237.exs | 42 +++ log_trimmer_1751855102686.exs | 42 +++ log_trimmer_1751855180331.exs | 42 +++ log_trimmer_1751855277743.exs | 42 +++ log_trimmer_1751855689696.exs | 42 +++ .../cache/server_lazy_loading_test.exs | 97 +++++++ 21 files changed, 982 insertions(+), 188 deletions(-) rename lib/mix/tasks/{geo.restart.ex => restart.ex} (51%) rename lib/mix/tasks/{geo.seed.ex => seed.ex} (100%) rename lib/mix/tasks/{geo.stop.ex => stop.ex} (100%) create mode 100644 log_trimmer_1751842644438.exs create mode 100644 log_trimmer_1751843245279.exs create mode 100644 log_trimmer_1751843394757.exs create mode 100644 log_trimmer_1751843473592.exs create mode 100644 log_trimmer_1751854827289.exs create mode 100644 log_trimmer_1751854856848.exs create mode 100644 log_trimmer_1751854959655.exs create mode 100644 log_trimmer_1751855026237.exs create mode 100644 log_trimmer_1751855102686.exs create mode 100644 log_trimmer_1751855180331.exs create mode 100644 log_trimmer_1751855277743.exs create mode 100644 log_trimmer_1751855689696.exs create mode 100644 test/geo/geography/country/cache/server_lazy_loading_test.exs diff --git a/README.md b/README.md index e1e283f..c492b7f 100644 --- a/README.md +++ b/README.md @@ -21,14 +21,16 @@ Geo is an Elixir/Phoenix application built with the Ash Framework that provides This project might be useful if you're curious about the Ash Framework (version 3.5+) and you're looking for slightly more advanced Ash usage like: -Resources: +**Resources:** - Implementing manual reads and generic actions - Defining reusable attributes via macros - Implementing `slug` attributes that are computed via a 'change' when not provided (a change is the primary mechanism for customizing what happens during create, update, and destroy actions) - Seeding data via bulk upsert +- Using manual read actions for cache-backed queries +- Implementing search functionality with dedicated search result resources -LiveView: +**LiveView:** - A Phoenix component built from Mishka Chelekom's `<.combobox>` ![CleanShot 2025-06-16 at 11 03 53@2x](https://github.com/user-attachments/assets/0d672734-855c-49dc-828e-a539d29f078b) @@ -37,16 +39,24 @@ LiveView: - Not very fun fact: It has complex state and took well over 90% of the development effort. Ash was the easy part, by far! - A LiveView component that orchestrates Phoenix components and Ash resources +**Caching & Performance:** + +- Poolboy-based worker pool for high-performance caching +- On-demand worker creation with overflow capacity +- Intelligent search prioritization and result grouping +- Automatic cache refresh with graceful error handling + ## Custom Mix Tasks -- `seed`: Upserts seed data -- `start`: Starts the server in a foreground process -- `restart`: Starts/restarts server in background process, sends STDOUT and STDERR to geo.log -- `stop`: Stops server running in background process +- `geo.seed`: Upserts seed data +- `geo.start`: Starts the server in a foreground process +- `geo.restart`: Starts/restarts server in background process, sends STDOUT and STDERR to geo.log +- `geo.stop`: Stops server running in background process ## Running on Fly.io These tips are only suitable for a hobby project. Real projects should use an alternative Postgres solution. + ### Preparation 1. Go to fly.io and create an account @@ -122,37 +132,44 @@ After updating secrets or code, run `mix deploy` - **Domain Layer**: `Geo.Geography` - Core business logic and operations - **Resource Layer**: `Geo.Geography.Country` - Data models and validations with modular attributes +- **Search Layer**: `Geo.Geography.Country.SearchResult` - Dedicated search result resource for structured search responses - **Web Layer**: Phoenix LiveView components for interactive UI -- **Caching Layer**: High-performance country lookup and search caching with GenServer +- **Caching Layer**: High-performance country lookup and search caching with Poolboy-managed GenServer workers - **Attribute Layer**: Reusable attribute modules (`Geo.Resources.Attributes.*`) for DRY resource definitions - **Change Layer**: Custom change modules for automatic data transformations ### Key Features - **Country Management**: Full CRUD operations for country data (ISO codes, names, flags, slugs) -- **Intelligent Search**: Multi-criteria search with prioritized results (ISO codes, names) -- **High-Performance Caching**: Fast searches via `Geo.Geography.Country.Cache` with automatic refresh every 10 minutes +- **Intelligent Search**: Multi-criteria search with prioritized results (ISO codes, names) returned in structured format +- **High-Performance Caching**: Fast searches via `Geo.Geography.Country.Cache` with automatic refresh every 30 minutes +- **Pooled Workers**: Poolboy-managed cache workers (0 permanent, up to min(8, System.schedulers_online()) overflow workers) - **Interactive UI**: Real-time search with grouped, sortable results - **Upsert Operations**: Efficient create-or-update operations using unique identities +- **Manual Read Actions**: Cache-backed queries that bypass the database for ultra-fast lookups This project uses `dialyzer` and `credo` for code cleanliness. ### Requirements - PostgreSQL +- Elixir 1.18+ +- Node.js (for asset compilation) ### Technology Stack - **Backend**: Elixir - **Frontend**: Phoenix LiveView with Mishka Chelekom components - **Database**: PostgreSQL with Ecto/AshPostgres -- **Application**: Ash Framework for domain modeling +- **Application Framework**: Ash Framework for domain modeling +- **Caching**: Poolboy for worker management +- **Testing**: Playwright for E2E testing, ExUnit for unit testing ## Usage -1. Install Elixir +1. Install Elixir 1.18+ 2. Install PostgreSQL -3. Install `nodejs` (this might be optional) +3. Install Node.js 4. `mix setup` to install and setup dependencies 5. `mix phx.server` or inside IEx with `iex -S mix phx.server` @@ -162,15 +179,19 @@ Ready to run in production? Please [check our deployment guides](https://hexdocs ## Running Tests -1. UI: - - `npm run lint` - - `npm format` - - `npm test` +1. **UI Tests**: + - `npm run lint` - ESLint checking + - `npm run format` - Prettier formatting + - `npm test` - Playwright E2E tests -3. Elixir: - - `mix credo` - - `mix dialyzer` - - `mix test` +2. **Elixir Tests**: + - `mix credo` - Code quality analysis + - `mix dialyzer` - Static type analysis + - `mix test` - Unit tests + +3. **E2E Tests**: + - Playwright tests for the country combobox functionality + - Tests cover search, selection, grouping, and sorting behavior ## Architecture @@ -191,7 +212,7 @@ classDiagram class Geo.Geography.Country { <> Domain: Geo.Geography - Source: lib/geo/resources/country.ex + Source: lib/geo/geography/country.ex Ash.Type.UUIDv7 id Ash.Type.CiString name @@ -205,7 +226,16 @@ classDiagram create(name, iso_code, flag, slug) upsert(name, iso_code, flag, slug) update(name, slug, iso_code, flag) - get_by_iso_code_cached(iso_code) + get_by_iso_code(iso_code) + } + + class Geo.Geography.Country.SearchResult { + <> + Domain: Geo.Geography + Source: lib/geo/geography/country/search_result.ex + + Array~Country~ countries_by_iso_code + Array~Country~ countries_by_name search(query) } @@ -242,7 +272,7 @@ classDiagram class Geo.Geography.Country.Cache { <> - search!(query) tuple + search!(query) SearchResult get_by_iso_code!(iso_code) country refresh() ok running?() boolean @@ -253,28 +283,30 @@ classDiagram class Geo.Geography.Country.Cache.Server { <> start_link() ok_pid - search!(query) tuple + search!(query) SearchResult get_by_iso_code!(iso_code) country refresh() ok handle_info(:refresh) noreply + @refresh_interval 30 minutes } class Poolboy { <> transaction(pool, fun) result status(pool) keyword - size 5 - max_overflow 0 + size 0 + max_overflow min(8, System.schedulers_online()) } Geo.Geography --> Geo.Geography.Country : uses + Geo.Geography --> Geo.Geography.Country.SearchResult : uses Geo.Geography.Country --|> Geo.Resources.Attributes.Id : uses Geo.Geography.Country --|> Geo.Resources.Attributes.Name : uses Geo.Geography.Country --|> Geo.Resources.Attributes.Slug : uses Geo.Geography.Country --|> Geo.Resources.Attributes.Timestamps : uses Geo.Geography.Country --> Geo.Resources.Changes.SlugifyName : applies Geo.Geography.Country.Cache.Server --> Geo.Geography : calls for refresh - Poolboy --> Geo.Geography.Country.Cache.Server : manages 5 workers + Poolboy --> Geo.Geography.Country.Cache.Server : manages overflow workers Geo.Geography.Country.Cache --> Poolboy : uses pool transactions Geo.Geography.Country.Cache --> Geo.Geography.Country.Cache.Server : calls via pool ``` @@ -287,32 +319,38 @@ sequenceDiagram participant LiveView as GeoWeb.HomeLive participant Component as GeoWeb.CountrySelector participant Domain as Geo.Geography - participant Resource as Geo.Geography.Country + participant SearchResource as Geo.Geography.Country.SearchResult participant Cache as Geo.Geography.Country.Cache + participant Pool as Poolboy + participant Worker as Cache.Server participant DB as PostgreSQL User->>Component: Types search query - Component->>Component: handle_event("search_combobox_updated") + Component->>Component: handle_event("search_combobox_search") Component->>Domain: search_countries!(query) - Domain->>Resource: search action - Resource->>Cache: search!(query) + Domain->>SearchResource: search action + SearchResource->>Cache: search!(query) + Cache->>Pool: transaction(pool, fn worker -> ... end) + Pool->>Worker: GenServer.call(worker, {:search, query}) alt Cache Hit - Cache-->>Resource: {iso_code_results, name_results} - Resource-->>Domain: Search results + Worker-->>Pool: SearchResult struct + Pool-->>Cache: SearchResult struct + Cache-->>SearchResource: SearchResult struct + SearchResource-->>Domain: SearchResult resource else Cache Miss/Refresh - Cache->>Domain: list_countries() for refresh - Domain->>Resource: read action - Resource->>DB: Query all countries - DB-->>Resource: Country records - Resource-->>Domain: Countries list - Domain-->>Cache: Countries for caching - Cache->>Cache: Process and cache results - Cache-->>Resource: {iso_code_results, name_results} - Resource-->>Domain: Search results + Worker->>Domain: list_countries() for refresh + Domain->>DB: Query all countries + DB-->>Domain: Country records + Domain-->>Worker: Countries for caching + Worker->>Worker: Process and cache results + Worker-->>Pool: SearchResult struct + Pool-->>Cache: SearchResult struct + Cache-->>SearchResource: SearchResult struct + SearchResource-->>Domain: SearchResult resource end - Domain-->>Component: %{by_iso_code: [...], by_name: [...]} + Domain-->>Component: SearchResult{countries_by_iso_code: [...], countries_by_name: [...]} Component->>Component: Update current_countries assign Component-->>User: Render updated search results @@ -346,13 +384,13 @@ classDiagram iso_code_group_collapsed Boolean name_group_collapsed Boolean group_order Atom - search_combobox_updated/2 - toggle_group_sort/2 - toggle_group_collapse/2 + search_combobox_search/2 + search_combobox_toggle_group_sort/2 + search_combobox_toggle_group_collapse/2 country_selected/2 } - class MishkaChelekom.SearchCombobox { + class GeoWeb.SearchCombobox { <> name String value String @@ -361,6 +399,7 @@ classDiagram variant String color String enable_group_sorting Boolean + group_states Map } class CountryOptionContent { @@ -384,12 +423,18 @@ classDiagram slug CiString } + class Geo.Geography.Country.SearchResult { + <> + countries_by_iso_code Array~Country~ + countries_by_name Array~Country~ + } + GeoWeb.HomeLive --> GeoWeb.CountrySelector : uses - GeoWeb.CountrySelector --> MishkaChelekom.SearchCombobox : renders + GeoWeb.CountrySelector --> GeoWeb.SearchCombobox : renders GeoWeb.CountrySelector --> CountryOptionContent : renders GeoWeb.CountrySelector --> Geo.Geography : calls GeoWeb.HomeLive ..> Geo.Geography.Country : displays - GeoWeb.CountrySelector ..> Geo.Geography.Country : manages + GeoWeb.CountrySelector ..> Geo.Geography.Country.SearchResult : receives ``` ### C4 Architecture Diagrams @@ -420,7 +465,7 @@ C4Container Container(web, "Phoenix Web Server", "Elixir/Phoenix", "Handles HTTP requests and WebSocket connections") Container(liveview, "LiveView Components", "Phoenix LiveView", "Interactive UI components for country selection") Container(domain, "Geography Domain", "Ash Framework", "Core business logic and country operations") - Container(cache, "Country Cache", "Elixir/ETS", "High-performance country lookup cache") + Container(cache, "Country Cache", "Poolboy/GenServer", "High-performance country lookup cache with worker pool") } ContainerDb(postgres, "PostgreSQL Database", "PostgreSQL", "Stores country data") @@ -447,26 +492,30 @@ C4Component Container_Boundary(domain, "Geography Domain") { Component(geography, "Geo.Geography", "Ash Domain", "Main domain interface with defined functions") Component(country_resource, "Country Resource", "Ash Resource", "Country data model with actions and validations") + Component(search_resource, "SearchResult Resource", "Ash Resource", "Search result data model with manual search action") Component(manual_read, "ManualGetByIsoCode", "Ash Manual Read", "Custom read implementation using cache") + Component(manual_search, "ManualSearch", "Ash Manual Read", "Custom search implementation using cache") } Container_Boundary(cache, "Caching Layer") { Component(cache, "Country.Cache", "Module", "Poolboy-based cache API") - Component(poolboy, "Poolboy Pool", "Pool Manager", "Manages 5 cache worker processes") - Component(cache_genserver, "Country.Cache.GenServer", "GenServer", "High-performance country caching workers") + Component(poolboy, "Poolboy Pool", "Pool Manager", "Manages overflow worker processes (0 permanent, up to 8 overflow)") + Component(cache_genserver, "Country.Cache.Server", "GenServer", "High-performance country caching workers") } ContainerDb(postgres, "PostgreSQL", "Database") Rel(home_live, country_selector, "Uses") Rel(country_selector, geography, "Calls search_countries") - Rel(geography, country_resource, "Uses for actions") + Rel(geography, search_resource, "Uses for search") + Rel(geography, country_resource, "Uses for CRUD") + Rel(search_resource, manual_search, "Uses for cached search") Rel(country_resource, manual_read, "Uses for cached reads") Rel(manual_read, cache, "get_by_iso_code!") - Rel(geography, cache, "search!") + Rel(manual_search, cache, "search!") Rel(cache, poolboy, "Uses pool transactions") - Rel(poolboy, cache_genserver, "Manages 5 workers") - Rel(cache_genserver, geography, "Periodic refresh via") + Rel(poolboy, cache_genserver, "Manages overflow workers") + Rel(cache_genserver, geography, "Periodic refresh via (30 min)") Rel(country_resource, postgres, "CRUD operations") ``` @@ -497,8 +546,8 @@ These modules use `__using__` macros to inject attribute definitions, validation The main domain provides these key operations: - `list_countries/0` - Lists all countries -- `search_countries/1` - Intelligent search for UI components -- `get_country_by_iso_code/1` - High-performance country search by ISO code +- `search_countries!/1` - Intelligent search returning structured results via SearchResult resource +- `get_country_by_iso_code!/1` - High-performance country lookup by ISO code using cache - `create_country/1`, `update_country/1`, `upsert_country/1` - Country management ### Geo.Geography.Country Resource @@ -517,27 +566,91 @@ Key features: - Upsert capability using ISO code identity - Cached search operations for performance via manual read actions - Modular attribute composition using reusable attribute modules -- Manual read action `get_by_iso_code_cached` that bypasses database for cached lookups -- Map action `search` that returns structured search results from cache +- Manual read action `get_by_iso_code` that bypasses database for cached lookups + +### Geo.Geography.Country.SearchResult Resource + +A dedicated resource for search results: +- `countries_by_iso_code` - Array of countries matching by ISO code +- `countries_by_name` - Array of countries matching by name +- Manual search action that returns structured search results from cache +- Enables clean separation of search logic from base country resource ## Performance Features ### Caching Strategy - `Geo.Geography.Country.Cache` provides Poolboy-based cache API -- Pool of 5 `Geo.Geography.Country.Cache.Server` workers managed by Poolboy -- No race conditions - Poolboy handles worker allocation and load balancing -- Workers start at application boot - no lazy loading complexity -- Automatic cache refresh every 30 minutes via scheduled messages in each worker -- Intelligent search with prioritized results returned as separate lists: +- Pool of `Geo.Geography.Country.Cache.Server` workers managed by Poolboy +- **Configuration**: 0 permanent workers, up to `min(8, System.schedulers_online())` overflow workers +- **On-demand scaling**: Workers start only when needed, efficient resource usage +- **Automatic cache refresh**: Every 30 minutes via scheduled messages in each worker +- **Graceful startup**: Retry logic if database is not available during worker initialization +- **Intelligent search** with prioritized results returned in structured format: 1. **ISO Code Results**: Exact ISO code matches, then partial ISO code matches (≀3 chars) 2. **Name Results**: Exact name matches, names starting with query, then names containing query -- Each worker maintains two sorted collections: `countries_by_iso_code` and `countries_by_name` -- Graceful startup with retry logic if database is not available +- Each worker maintains optimized data structures: sorted lists and maps for fast lookup - Pool transactions ensure worker availability and fault tolerance +### Search Algorithm Details + +The cache implements a sophisticated search algorithm: + +1. **Exact matches**: Direct hash lookups for ISO codes and names +2. **Partial ISO code matching**: Only for queries ≀ 3 characters to avoid noise +3. **Name prefix matching**: Efficient string prefix search +4. **Name substring matching**: Fallback for partial name matches +5. **Result deduplication**: Ensures no country appears in both result groups +6. **Cross-pollination**: Countries from ISO code results are added to name results and vice versa + ### UI Optimizations - Real-time search with debouncing -- Grouped results (by ISO code and name) -- Sortable groups with multiple sort orders +- Grouped results (by ISO code and name) with independent sorting - Collapsible groups for better UX -- Efficient re-rendering with LiveView +- Efficient re-rendering with LiveView component state management +- Smart group ordering based on search context + +## Database Schema + +The PostgreSQL schema includes: + +- **countries** table with columns: id, name, iso_code, flag, slug, created_at, updated_at +- **Unique constraints**: iso_code, slug, name +- **Indexes**: Automatically created for identities and primary key +- **Data types**: UUIDv7 for IDs, case-insensitive strings for searchable fields + +## Testing Architecture + +### End-to-End Testing +- **Playwright**: Comprehensive E2E testing of the country combobox +- **Test coverage**: Search functionality, group sorting, collapsing, country selection +- **CI/CD integration**: Tests run in headless mode with structured reporting + +### Unit Testing +- **ExUnit**: Standard Elixir testing framework +- **Test utilities**: `Ash.Test` for resource testing +- **Mocking**: Test-specific configurations for cache and database operations + +### Code Quality +- **Credo**: Elixir code style and quality analysis +- **Dialyzer**: Static type analysis and bug detection +- **Continuous integration**: Automated testing on all changes + +## Development Workflow + +### Local Development +1. **Database setup**: PostgreSQL with development database +2. **Dependency management**: Mix for Elixir deps, npm for JS/CSS +3. **Asset compilation**: Tailwind CSS + ESBuild for optimized assets +4. **Live reload**: Phoenix LiveReload for development efficiency + +### Production Deployment +1. **Containerization**: Docker with multi-stage builds +2. **Asset optimization**: Minified CSS/JS with digest hashing +3. **Database migrations**: Automated via Ash codegen +4. **Monitoring**: Telemetry integration for performance tracking + +### Cache Management +- **Development**: Cache workers start on-demand +- **Production**: Pool sizing based on system capabilities +- **Monitoring**: Cache status and performance metrics available +- **Maintenance**: Manual refresh capabilities for data updates diff --git a/config/dev.exs b/config/dev.exs index db2b034..2e0f102 100644 --- a/config/dev.exs +++ b/config/dev.exs @@ -82,8 +82,8 @@ config :geo, GeoWeb.Endpoint, # Enable dev routes for dashboard and mailbox config :geo, dev_routes: true -# Do not include metadata nor timestamps in development logs -config :logger, :console, format: "[$level] $message\n" +# Include ISO8601 timestamps in development logs +config :logger, :console, format: "$dateT$time [$level] $message\n" # Set a higher stacktrace during development. Avoid configuring such # in production as building large stacktraces may be expensive. diff --git a/lib/geo/application.ex b/lib/geo/application.ex index 51b69c7..8e95bd4 100644 --- a/lib/geo/application.ex +++ b/lib/geo/application.ex @@ -37,14 +37,11 @@ defmodule Geo.Application do start: {:poolboy, :start_link, [ - [ - name: {:local, :country_cache_pool}, - worker_module: Geo.Geography.Country.Cache.Server, - # 5 permanent workers - size: 5, - # No overflow workers (fixed pool size) - max_overflow: 0 - ] + [ + name: {:local, :country_cache}, + worker_module: Geo.Geography.Country.Cache.Server, + size: min(5, System.schedulers_online()) + ] ]} }, # Start the Finch HTTP client for sending emails diff --git a/lib/geo/geography/country/cache.ex b/lib/geo/geography/country/cache.ex index 504c2ac..586c31d 100644 --- a/lib/geo/geography/country/cache.ex +++ b/lib/geo/geography/country/cache.ex @@ -1,12 +1,13 @@ defmodule Geo.Geography.Country.Cache do @moduledoc """ API for country cache operations. Only used by the Country resource. - Uses Poolboy to manage a pool of 5 cache GenServer workers for load balancing. + Uses Poolboy to manage a pool of cache GenServer workers for load balancing. + Starts with 0 permanent workers and can overflow up to min(8, System.schedulers_online()) workers. """ require Logger - @pool_name :country_cache_pool + @pool_name :country_cache @doc """ Search for countries using the pooled cache workers. @@ -54,17 +55,24 @@ defmodule Geo.Geography.Country.Cache do Logger.info("Refreshing #{worker_count} cache workers") - # Refresh each worker in the pool + # Refresh each worker in the pool (if any are running) refresh_results = - for _ <- 1..worker_count do - :poolboy.transaction( - @pool_name, - fn worker -> - GenServer.call(worker, :refresh) - end, - # 30 second timeout for refresh - 30_000 - ) + if worker_count > 0 do + for _ <- 1..worker_count do + :poolboy.transaction( + @pool_name, + fn worker -> + GenServer.call(worker, :refresh) + end, + # 30 second timeout for refresh + 30_000 + ) + end + else + # No workers running, trigger a cache load by doing a dummy search + # This will create a worker on-demand that will load fresh data + search!("") + [:ok] end case Enum.all?(refresh_results, &(&1 == :ok)) do @@ -79,19 +87,6 @@ defmodule Geo.Geography.Country.Cache do end end - @doc """ - Check if the cache pool is running and has workers available. - """ - def running? do - try do - workers = :poolboy.status(@pool_name) - total_workers = Keyword.get(workers, :ready, 0) + Keyword.get(workers, :busy, 0) - total_workers > 0 - rescue - _ -> false - end - end - @doc """ Get cache pool statistics and status. """ diff --git a/lib/geo/geography/country/cache/server.ex b/lib/geo/geography/country/cache/server.ex index a5771ef..73c8e78 100644 --- a/lib/geo/geography/country/cache/server.ex +++ b/lib/geo/geography/country/cache/server.ex @@ -1,18 +1,22 @@ defmodule Geo.Geography.Country.Cache.Server do @moduledoc """ GenServer that caches country data in memory for fast lookup and search operations. - Loads all countries once at startup and provides efficient search functions. + Uses lazy loading - countries are loaded on first access rather than at startup. Designed to work as a pooled worker with Poolboy - no longer uses named registration. + + The server will automatically stop after @stop_interval once countries are loaded. """ use GenServer require Logger - @refresh_interval :timer.minutes(30) + @stop_interval :timer.minutes(1) defmodule State do @moduledoc """ State structure for the Country Cache Server. + All country-related fields are nil until first access (lazy loading). + The timer_ref is nil until countries are loaded, then starts the stop timer. """ defstruct [ :countries_list_by_iso_code, @@ -24,11 +28,11 @@ defmodule Geo.Geography.Country.Cache.Server do ] @type t :: %__MODULE__{ - countries_list_by_iso_code: [Geo.Geography.Country.t()], - countries_list_by_name: [Geo.Geography.Country.t()], - countries_map_by_iso_code: %{String.t() => Geo.Geography.Country.t()}, - countries_map_by_name: %{String.t() => Geo.Geography.Country.t()}, - last_refresh: DateTime.t(), + countries_list_by_iso_code: [Geo.Geography.Country.t()] | nil, + countries_list_by_name: [Geo.Geography.Country.t()] | nil, + countries_map_by_iso_code: %{String.t() => Geo.Geography.Country.t()} | nil, + countries_map_by_name: %{String.t() => Geo.Geography.Country.t()} | nil, + last_refresh: DateTime.t() | nil, timer_ref: reference() | nil } end @@ -39,114 +43,107 @@ defmodule Geo.Geography.Country.Cache.Server do @impl true def init(_opts) do - try do - state = load_countries!() - - # Schedule periodic refresh - timer_ref = Process.send_after(self(), :refresh, @refresh_interval) - state = %{state | timer_ref: timer_ref} - - Logger.info("Cache worker started successfully") - {:ok, state} - rescue - error -> - Logger.error("Failed to initialize cache worker: #{inspect(error)}") - {:stop, error} - end + # Start with empty state - countries will be loaded on first access + # Timer will only start when countries are actually loaded + state = %State{ + countries_list_by_iso_code: nil, + countries_list_by_name: nil, + countries_map_by_iso_code: nil, + countries_map_by_name: nil, + last_refresh: nil, + timer_ref: nil + } + + Logger.info("Cache worker started successfully") + {:ok, state} end @impl true def handle_call(:search_all, _from, state) do - {:reply, do_search_all(state), state} + state = ensure_countries_loaded(state) + result = do_search_all(state) + {:reply, result, state} end @impl true def handle_call({:search, query}, _from, state) do + state = ensure_countries_loaded(state) {:reply, do_search(query, state), state} end @impl true def handle_call({:get_by_iso_code, iso_code}, _from, state) do - country = Map.get(state.countries_map_by_iso_code, String.downcase(iso_code)) - {:reply, country, state} - end + state = ensure_countries_loaded(state) - @impl true - def handle_call(:refresh, _from, state) do - try do - # Cancel existing timer if it exists - if state.timer_ref do - Process.cancel_timer(state.timer_ref) + country = + if state.countries_map_by_iso_code do + Map.get(state.countries_map_by_iso_code, String.downcase(iso_code)) + else + nil end - new_state = load_countries!() - - # Schedule next refresh - timer_ref = Process.send_after(self(), :refresh, @refresh_interval) - new_state = %{new_state | timer_ref: timer_ref} - - Logger.info("Cache worker refreshed successfully") - {:reply, :ok, new_state} - rescue - error -> - Logger.error("Failed to refresh cache worker: #{inspect(error)}") - {:reply, {:error, error}, state} - end + {:reply, country, state} end @impl true def handle_call(:status, _from, state) do - status = %{ - countries_count: map_size(state.countries_map_by_iso_code), - last_refresh: state.last_refresh, - worker_pid: self() - } + status = + if state.last_refresh do + %{ + countries_count: map_size(state.countries_map_by_iso_code), + last_refresh: state.last_refresh, + worker_pid: self(), + loaded: true + } + else + %{ + countries_count: 0, + last_refresh: nil, + worker_pid: self(), + loaded: false + } + end {:reply, status, state} end @impl true - def handle_info(:refresh, state) do - # Periodic refresh - try do - # Cancel existing timer if it exists - if state.timer_ref do - Process.cancel_timer(state.timer_ref) - end - - new_state = load_countries!() - - Logger.debug("Cache worker auto-refreshed successfully") - - # Schedule next refresh - timer_ref = Process.send_after(self(), :refresh, @refresh_interval) - new_state = %{new_state | timer_ref: timer_ref} - - {:noreply, new_state} - rescue - error -> - Logger.warning("Failed to auto-refresh cache worker: #{inspect(error)}") - - # Still schedule next refresh attempt - timer_ref = Process.send_after(self(), :refresh, @refresh_interval) - new_state = %{state | timer_ref: timer_ref} - {:noreply, new_state} - end + def handle_info(:stop, state) do + Logger.info("Cache worker stopping after #{@stop_interval} ms as scheduled") + {:stop, :normal, state} end @impl true - def terminate(_reason, state) do - # Cancel timer when GenServer is stopping + def terminate(reason, state) do + # Cancel stop timer when GenServer is stopping if state.timer_ref do Process.cancel_timer(state.timer_ref) end + + Logger.info("Cache worker exiting with reason: #{inspect(reason)}") :ok end # Private functions - # Returns a State struct with loaded countries data - defp load_countries! do + # Ensures countries are loaded in the state, loading them if last_refresh is nil + defp ensure_countries_loaded(state) do + if state.last_refresh do + state + else + try do + load_countries!(state) + rescue + error -> + Logger.error("Failed to load countries: #{inspect(error)}") + # Return state unchanged if loading fails + state + end + end + end + + # Returns a State struct with loaded countries data, preserving existing state + defp load_countries!(existing_state) do # Get countries sorted by iso_code (default sort from the resource) countries = Geo.Geography.list_countries!(authorize?: false) @@ -167,17 +164,39 @@ defmodule Geo.Geography.Country.Cache.Server do {Ash.CiString.to_comparable_string(country.name), country} end) + # Cancel existing timer if there is one + if existing_state.timer_ref do + Process.cancel_timer(existing_state.timer_ref) + end + + # Start stop timer now that countries are loaded + timer_ref = Process.send_after(self(), :stop, @stop_interval) + Logger.info("Countries loaded successfully, worker will stop in #{@stop_interval} ms") + %State{ countries_list_by_iso_code: countries_list_by_iso_code, countries_list_by_name: countries_list_by_name, countries_map_by_iso_code: countries_map_by_iso_code, countries_map_by_name: countries_map_by_name, last_refresh: DateTime.utc_now(), - timer_ref: nil + timer_ref: timer_ref } end defp do_search(query, state) do + # If countries not loaded, return empty results + if is_nil(state.countries_map_by_name) do + %Geo.Geography.Country.Cache.SearchResult{ + by_iso_code: [], + by_name: [] + } + else + do_search_with_data(query, state) + end + end + + defp do_search_with_data(query, state) do + query_down = String.downcase(query) # Use exact match from countries_map_by_name for efficiency @@ -271,10 +290,10 @@ defmodule Geo.Geography.Country.Cache.Server do end defp do_search_all(state) do - # Return SearchResults struct with all countries + # Return SearchResults struct with all countries, or empty if not loaded %Geo.Geography.Country.Cache.SearchResult{ - by_iso_code: state.countries_list_by_iso_code, - by_name: state.countries_list_by_name + by_iso_code: state.countries_list_by_iso_code || [], + by_name: state.countries_list_by_name || [] } end end diff --git a/lib/mix/tasks/geo.restart.ex b/lib/mix/tasks/restart.ex similarity index 51% rename from lib/mix/tasks/geo.restart.ex rename to lib/mix/tasks/restart.ex index 37e41a0..93bc4e9 100644 --- a/lib/mix/tasks/geo.restart.ex +++ b/lib/mix/tasks/restart.ex @@ -6,6 +6,7 @@ defmodule Mix.Tasks.Restart do 1. Stop any running Phoenix server 2. Start the server in the background 3. Redirect both stdout and stderr to geo.log + 4. Start a background process to periodically trim the log file ## Examples @@ -16,6 +17,10 @@ defmodule Mix.Tasks.Restart do @shortdoc "Restarts the Geo service" + # Configuration for log trimming + @trim_interval :timer.minutes(5) + @max_log_lines 10_000 + @impl Mix.Task def run(_args) do log_file = "geo.log" @@ -26,10 +31,14 @@ defmodule Mix.Tasks.Restart do # Start server in background with output redirection Mix.shell().info("Starting Phoenix server in background...") Mix.shell().info("Output will be redirected to #{log_file}") + Mix.shell().info("Log will be trimmed every #{div(@trim_interval, 60_000)} minutes to #{@max_log_lines} lines") # Create or truncate the log file File.write!(log_file, "") + # Start log trimming process in background + start_log_trimmer(log_file) + # Use System.cmd with a shell command to properly background the process case System.cmd("sh", ["-c", "nohup mix phx.server > #{log_file} 2>&1 &"], cd: File.cwd!()) do {_, 0} -> @@ -71,4 +80,64 @@ defmodule Mix.Tasks.Restart do ) end end + + defp start_log_trimmer(log_file) do + # Create a temporary script file for the log trimmer + script_content = """ + #!/usr/bin/env elixir + + defmodule LogTrimmer do + @trim_interval #{@trim_interval} + @max_log_lines #{@max_log_lines} + + def run(log_file) do + trim_log_periodically(log_file) + end + + defp trim_log_periodically(log_file) do + Process.sleep(@trim_interval) + + try do + trim_log_file(log_file) + rescue + _ -> :ok + end + + trim_log_periodically(log_file) + end + + defp trim_log_file(log_file) do + if File.exists?(log_file) do + content = File.read!(log_file) + lines = String.split(content, "\\n") + + if length(lines) > @max_log_lines do + # Keep only the last @max_log_lines lines + trimmed_lines = lines |> Enum.take(-@max_log_lines) + trimmed_content = Enum.join(trimmed_lines, "\\n") + + # Write to a temporary file and rename for atomic operation + temp_file = log_file <> ".tmp" + File.write!(temp_file, trimmed_content) + File.rename!(temp_file, log_file) + end + end + end + end + + LogTrimmer.run("#{log_file}") + """ + + script_file = "log_trimmer_#{System.system_time(:millisecond)}.exs" + File.write!(script_file, script_content) + + # Start the log trimmer in the background + System.cmd("sh", ["-c", "nohup elixir #{script_file} > /dev/null 2>&1 &"]) + + # Clean up the script file after a moment + spawn(fn -> + Process.sleep(5000) + File.rm(script_file) + end) + end end diff --git a/lib/mix/tasks/geo.seed.ex b/lib/mix/tasks/seed.ex similarity index 100% rename from lib/mix/tasks/geo.seed.ex rename to lib/mix/tasks/seed.ex diff --git a/lib/mix/tasks/geo.stop.ex b/lib/mix/tasks/stop.ex similarity index 100% rename from lib/mix/tasks/geo.stop.ex rename to lib/mix/tasks/stop.ex diff --git a/log_trimmer_1751842644438.exs b/log_trimmer_1751842644438.exs new file mode 100644 index 0000000..1fdc5cf --- /dev/null +++ b/log_trimmer_1751842644438.exs @@ -0,0 +1,42 @@ +#!/usr/bin/env elixir + +defmodule LogTrimmer do + @trim_interval_minutes 1 + @max_log_lines 10 + + def run(log_file) do + trim_log_periodically(log_file) + end + + defp trim_log_periodically(log_file) do + Process.sleep(@trim_interval_minutes * 60 * 1000) + + try do + trim_log_file(log_file) + rescue + _ -> :ok + end + + trim_log_periodically(log_file) + end + + defp trim_log_file(log_file) do + if File.exists?(log_file) do + content = File.read!(log_file) + lines = String.split(content, "\n") + + if length(lines) > @max_log_lines do + # Keep only the last @max_log_lines lines + trimmed_lines = lines |> Enum.take(-@max_log_lines) + trimmed_content = Enum.join(trimmed_lines, "\n") + + # Write to a temporary file and rename for atomic operation + temp_file = log_file <> ".tmp" + File.write!(temp_file, trimmed_content) + File.rename!(temp_file, log_file) + end + end + end +end + +LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751843245279.exs b/log_trimmer_1751843245279.exs new file mode 100644 index 0000000..d8156c1 --- /dev/null +++ b/log_trimmer_1751843245279.exs @@ -0,0 +1,42 @@ +#!/usr/bin/env elixir + +defmodule LogTrimmer do + @trim_interval 1800000 + @max_log_lines 10000 + + def run(log_file) do + trim_log_periodically(log_file) + end + + defp trim_log_periodically(log_file) do + Process.sleep(@trim_interval) + + try do + trim_log_file(log_file) + rescue + _ -> :ok + end + + trim_log_periodically(log_file) + end + + defp trim_log_file(log_file) do + if File.exists?(log_file) do + content = File.read!(log_file) + lines = String.split(content, "\n") + + if length(lines) > @max_log_lines do + # Keep only the last @max_log_lines lines + trimmed_lines = lines |> Enum.take(-@max_log_lines) + trimmed_content = Enum.join(trimmed_lines, "\n") + + # Write to a temporary file and rename for atomic operation + temp_file = log_file <> ".tmp" + File.write!(temp_file, trimmed_content) + File.rename!(temp_file, log_file) + end + end + end +end + +LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751843394757.exs b/log_trimmer_1751843394757.exs new file mode 100644 index 0000000..d8156c1 --- /dev/null +++ b/log_trimmer_1751843394757.exs @@ -0,0 +1,42 @@ +#!/usr/bin/env elixir + +defmodule LogTrimmer do + @trim_interval 1800000 + @max_log_lines 10000 + + def run(log_file) do + trim_log_periodically(log_file) + end + + defp trim_log_periodically(log_file) do + Process.sleep(@trim_interval) + + try do + trim_log_file(log_file) + rescue + _ -> :ok + end + + trim_log_periodically(log_file) + end + + defp trim_log_file(log_file) do + if File.exists?(log_file) do + content = File.read!(log_file) + lines = String.split(content, "\n") + + if length(lines) > @max_log_lines do + # Keep only the last @max_log_lines lines + trimmed_lines = lines |> Enum.take(-@max_log_lines) + trimmed_content = Enum.join(trimmed_lines, "\n") + + # Write to a temporary file and rename for atomic operation + temp_file = log_file <> ".tmp" + File.write!(temp_file, trimmed_content) + File.rename!(temp_file, log_file) + end + end + end +end + +LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751843473592.exs b/log_trimmer_1751843473592.exs new file mode 100644 index 0000000..d8156c1 --- /dev/null +++ b/log_trimmer_1751843473592.exs @@ -0,0 +1,42 @@ +#!/usr/bin/env elixir + +defmodule LogTrimmer do + @trim_interval 1800000 + @max_log_lines 10000 + + def run(log_file) do + trim_log_periodically(log_file) + end + + defp trim_log_periodically(log_file) do + Process.sleep(@trim_interval) + + try do + trim_log_file(log_file) + rescue + _ -> :ok + end + + trim_log_periodically(log_file) + end + + defp trim_log_file(log_file) do + if File.exists?(log_file) do + content = File.read!(log_file) + lines = String.split(content, "\n") + + if length(lines) > @max_log_lines do + # Keep only the last @max_log_lines lines + trimmed_lines = lines |> Enum.take(-@max_log_lines) + trimmed_content = Enum.join(trimmed_lines, "\n") + + # Write to a temporary file and rename for atomic operation + temp_file = log_file <> ".tmp" + File.write!(temp_file, trimmed_content) + File.rename!(temp_file, log_file) + end + end + end +end + +LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751854827289.exs b/log_trimmer_1751854827289.exs new file mode 100644 index 0000000..9ef26a8 --- /dev/null +++ b/log_trimmer_1751854827289.exs @@ -0,0 +1,42 @@ +#!/usr/bin/env elixir + +defmodule LogTrimmer do + @trim_interval 300000 + @max_log_lines 10000 + + def run(log_file) do + trim_log_periodically(log_file) + end + + defp trim_log_periodically(log_file) do + Process.sleep(@trim_interval) + + try do + trim_log_file(log_file) + rescue + _ -> :ok + end + + trim_log_periodically(log_file) + end + + defp trim_log_file(log_file) do + if File.exists?(log_file) do + content = File.read!(log_file) + lines = String.split(content, "\n") + + if length(lines) > @max_log_lines do + # Keep only the last @max_log_lines lines + trimmed_lines = lines |> Enum.take(-@max_log_lines) + trimmed_content = Enum.join(trimmed_lines, "\n") + + # Write to a temporary file and rename for atomic operation + temp_file = log_file <> ".tmp" + File.write!(temp_file, trimmed_content) + File.rename!(temp_file, log_file) + end + end + end +end + +LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751854856848.exs b/log_trimmer_1751854856848.exs new file mode 100644 index 0000000..9ef26a8 --- /dev/null +++ b/log_trimmer_1751854856848.exs @@ -0,0 +1,42 @@ +#!/usr/bin/env elixir + +defmodule LogTrimmer do + @trim_interval 300000 + @max_log_lines 10000 + + def run(log_file) do + trim_log_periodically(log_file) + end + + defp trim_log_periodically(log_file) do + Process.sleep(@trim_interval) + + try do + trim_log_file(log_file) + rescue + _ -> :ok + end + + trim_log_periodically(log_file) + end + + defp trim_log_file(log_file) do + if File.exists?(log_file) do + content = File.read!(log_file) + lines = String.split(content, "\n") + + if length(lines) > @max_log_lines do + # Keep only the last @max_log_lines lines + trimmed_lines = lines |> Enum.take(-@max_log_lines) + trimmed_content = Enum.join(trimmed_lines, "\n") + + # Write to a temporary file and rename for atomic operation + temp_file = log_file <> ".tmp" + File.write!(temp_file, trimmed_content) + File.rename!(temp_file, log_file) + end + end + end +end + +LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751854959655.exs b/log_trimmer_1751854959655.exs new file mode 100644 index 0000000..9ef26a8 --- /dev/null +++ b/log_trimmer_1751854959655.exs @@ -0,0 +1,42 @@ +#!/usr/bin/env elixir + +defmodule LogTrimmer do + @trim_interval 300000 + @max_log_lines 10000 + + def run(log_file) do + trim_log_periodically(log_file) + end + + defp trim_log_periodically(log_file) do + Process.sleep(@trim_interval) + + try do + trim_log_file(log_file) + rescue + _ -> :ok + end + + trim_log_periodically(log_file) + end + + defp trim_log_file(log_file) do + if File.exists?(log_file) do + content = File.read!(log_file) + lines = String.split(content, "\n") + + if length(lines) > @max_log_lines do + # Keep only the last @max_log_lines lines + trimmed_lines = lines |> Enum.take(-@max_log_lines) + trimmed_content = Enum.join(trimmed_lines, "\n") + + # Write to a temporary file and rename for atomic operation + temp_file = log_file <> ".tmp" + File.write!(temp_file, trimmed_content) + File.rename!(temp_file, log_file) + end + end + end +end + +LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751855026237.exs b/log_trimmer_1751855026237.exs new file mode 100644 index 0000000..9ef26a8 --- /dev/null +++ b/log_trimmer_1751855026237.exs @@ -0,0 +1,42 @@ +#!/usr/bin/env elixir + +defmodule LogTrimmer do + @trim_interval 300000 + @max_log_lines 10000 + + def run(log_file) do + trim_log_periodically(log_file) + end + + defp trim_log_periodically(log_file) do + Process.sleep(@trim_interval) + + try do + trim_log_file(log_file) + rescue + _ -> :ok + end + + trim_log_periodically(log_file) + end + + defp trim_log_file(log_file) do + if File.exists?(log_file) do + content = File.read!(log_file) + lines = String.split(content, "\n") + + if length(lines) > @max_log_lines do + # Keep only the last @max_log_lines lines + trimmed_lines = lines |> Enum.take(-@max_log_lines) + trimmed_content = Enum.join(trimmed_lines, "\n") + + # Write to a temporary file and rename for atomic operation + temp_file = log_file <> ".tmp" + File.write!(temp_file, trimmed_content) + File.rename!(temp_file, log_file) + end + end + end +end + +LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751855102686.exs b/log_trimmer_1751855102686.exs new file mode 100644 index 0000000..9ef26a8 --- /dev/null +++ b/log_trimmer_1751855102686.exs @@ -0,0 +1,42 @@ +#!/usr/bin/env elixir + +defmodule LogTrimmer do + @trim_interval 300000 + @max_log_lines 10000 + + def run(log_file) do + trim_log_periodically(log_file) + end + + defp trim_log_periodically(log_file) do + Process.sleep(@trim_interval) + + try do + trim_log_file(log_file) + rescue + _ -> :ok + end + + trim_log_periodically(log_file) + end + + defp trim_log_file(log_file) do + if File.exists?(log_file) do + content = File.read!(log_file) + lines = String.split(content, "\n") + + if length(lines) > @max_log_lines do + # Keep only the last @max_log_lines lines + trimmed_lines = lines |> Enum.take(-@max_log_lines) + trimmed_content = Enum.join(trimmed_lines, "\n") + + # Write to a temporary file and rename for atomic operation + temp_file = log_file <> ".tmp" + File.write!(temp_file, trimmed_content) + File.rename!(temp_file, log_file) + end + end + end +end + +LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751855180331.exs b/log_trimmer_1751855180331.exs new file mode 100644 index 0000000..9ef26a8 --- /dev/null +++ b/log_trimmer_1751855180331.exs @@ -0,0 +1,42 @@ +#!/usr/bin/env elixir + +defmodule LogTrimmer do + @trim_interval 300000 + @max_log_lines 10000 + + def run(log_file) do + trim_log_periodically(log_file) + end + + defp trim_log_periodically(log_file) do + Process.sleep(@trim_interval) + + try do + trim_log_file(log_file) + rescue + _ -> :ok + end + + trim_log_periodically(log_file) + end + + defp trim_log_file(log_file) do + if File.exists?(log_file) do + content = File.read!(log_file) + lines = String.split(content, "\n") + + if length(lines) > @max_log_lines do + # Keep only the last @max_log_lines lines + trimmed_lines = lines |> Enum.take(-@max_log_lines) + trimmed_content = Enum.join(trimmed_lines, "\n") + + # Write to a temporary file and rename for atomic operation + temp_file = log_file <> ".tmp" + File.write!(temp_file, trimmed_content) + File.rename!(temp_file, log_file) + end + end + end +end + +LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751855277743.exs b/log_trimmer_1751855277743.exs new file mode 100644 index 0000000..9ef26a8 --- /dev/null +++ b/log_trimmer_1751855277743.exs @@ -0,0 +1,42 @@ +#!/usr/bin/env elixir + +defmodule LogTrimmer do + @trim_interval 300000 + @max_log_lines 10000 + + def run(log_file) do + trim_log_periodically(log_file) + end + + defp trim_log_periodically(log_file) do + Process.sleep(@trim_interval) + + try do + trim_log_file(log_file) + rescue + _ -> :ok + end + + trim_log_periodically(log_file) + end + + defp trim_log_file(log_file) do + if File.exists?(log_file) do + content = File.read!(log_file) + lines = String.split(content, "\n") + + if length(lines) > @max_log_lines do + # Keep only the last @max_log_lines lines + trimmed_lines = lines |> Enum.take(-@max_log_lines) + trimmed_content = Enum.join(trimmed_lines, "\n") + + # Write to a temporary file and rename for atomic operation + temp_file = log_file <> ".tmp" + File.write!(temp_file, trimmed_content) + File.rename!(temp_file, log_file) + end + end + end +end + +LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751855689696.exs b/log_trimmer_1751855689696.exs new file mode 100644 index 0000000..9ef26a8 --- /dev/null +++ b/log_trimmer_1751855689696.exs @@ -0,0 +1,42 @@ +#!/usr/bin/env elixir + +defmodule LogTrimmer do + @trim_interval 300000 + @max_log_lines 10000 + + def run(log_file) do + trim_log_periodically(log_file) + end + + defp trim_log_periodically(log_file) do + Process.sleep(@trim_interval) + + try do + trim_log_file(log_file) + rescue + _ -> :ok + end + + trim_log_periodically(log_file) + end + + defp trim_log_file(log_file) do + if File.exists?(log_file) do + content = File.read!(log_file) + lines = String.split(content, "\n") + + if length(lines) > @max_log_lines do + # Keep only the last @max_log_lines lines + trimmed_lines = lines |> Enum.take(-@max_log_lines) + trimmed_content = Enum.join(trimmed_lines, "\n") + + # Write to a temporary file and rename for atomic operation + temp_file = log_file <> ".tmp" + File.write!(temp_file, trimmed_content) + File.rename!(temp_file, log_file) + end + end + end +end + +LogTrimmer.run("geo.log") diff --git a/test/geo/geography/country/cache/server_lazy_loading_test.exs b/test/geo/geography/country/cache/server_lazy_loading_test.exs new file mode 100644 index 0000000..5903ddb --- /dev/null +++ b/test/geo/geography/country/cache/server_lazy_loading_test.exs @@ -0,0 +1,97 @@ +defmodule Geo.Geography.Country.Cache.ServerLazyLoadingTest do + use ExUnit.Case, async: false + use Geo.DataCase + + setup do + # Seed some test data + Geo.Geography.create_country!(%{ + iso_code: "US", + name: "United States", + slug: "united-states", + flag: "πŸ‡ΊπŸ‡Έ" + }, authorize?: false) + + Geo.Geography.create_country!(%{ + iso_code: "CA", + name: "Canada", + slug: "canada", + flag: "πŸ‡¨πŸ‡¦" + }, authorize?: false) + + :ok + end + + test "lazy loading - countries not loaded initially" do + {:ok, pid} = Geo.Geography.Country.Cache.Server.start_link([]) + + # Initially, countries should not be loaded + status_before = GenServer.call(pid, :status) + assert status_before.loaded == false + assert status_before.countries_count == 0 + assert status_before.last_refresh == nil + + # This should trigger the lazy loading + result = GenServer.call(pid, :search_all) + assert length(result.by_iso_code) > 0 + + # Now countries should be loaded + status_after = GenServer.call(pid, :status) + assert status_after.loaded == true + assert status_after.countries_count > 0 + assert status_after.last_refresh != nil + + GenServer.stop(pid) + end + + test "lazy loading - get_by_iso_code triggers loading" do + {:ok, pid} = Geo.Geography.Country.Cache.Server.start_link([]) + + # Initially, countries should not be loaded + status_before = GenServer.call(pid, :status) + assert status_before.loaded == false + + # This should trigger the lazy loading + country = GenServer.call(pid, {:get_by_iso_code, "US"}) + assert country != nil + + # Now countries should be loaded + status_after = GenServer.call(pid, :status) + assert status_after.loaded == true + + GenServer.stop(pid) + end + + test "lazy loading - search triggers loading" do + {:ok, pid} = Geo.Geography.Country.Cache.Server.start_link([]) + + # Initially, countries should not be loaded + status_before = GenServer.call(pid, :status) + assert status_before.loaded == false + + # This should trigger the lazy loading + result = GenServer.call(pid, {:search, "United"}) + assert length(result.by_name) > 0 + + # Now countries should be loaded + status_after = GenServer.call(pid, :status) + assert status_after.loaded == true + + GenServer.stop(pid) + end + + test "stop timer only starts after countries are loaded" do + {:ok, pid} = Geo.Geography.Country.Cache.Server.start_link([]) + + # Worker should start without stopping immediately + Process.sleep(50) + assert Process.alive?(pid) + + # Trigger loading + _result = GenServer.call(pid, :search_all) + + # Worker should still be alive after loading + assert Process.alive?(pid) + + GenServer.stop(pid) + end +end From f2a0286473720a31b4c4c0b5c16e101a7c0f1b67 Mon Sep 17 00:00:00 2001 From: dev-guy Date: Sun, 6 Jul 2025 20:03:52 -0700 Subject: [PATCH 2/2] Misc --- .cursor/rules/usage-rules.mdc | 4 +- lib/geo/application.ex | 8 ++ lib/geo/geography/country/cache/server.ex | 5 +- lib/geo/log_trimmer.ex | 147 ++++++++++++++++++++++ lib/mix/tasks/restart.ex | 71 +++-------- lib/mix/tasks/stop.ex | 41 +++++- log_trimmer_1751842644438.exs | 42 ------- log_trimmer_1751843245279.exs | 42 ------- log_trimmer_1751843394757.exs | 42 ------- log_trimmer_1751843473592.exs | 42 ------- log_trimmer_1751854827289.exs | 42 ------- log_trimmer_1751854856848.exs | 42 ------- log_trimmer_1751854959655.exs | 42 ------- log_trimmer_1751855026237.exs | 42 ------- log_trimmer_1751855102686.exs | 42 ------- log_trimmer_1751855180331.exs | 42 ------- log_trimmer_1751855277743.exs | 42 ------- log_trimmer_1751855689696.exs | 42 ------- test/geo/log_trimmer_test.exs | 89 +++++++++++++ 19 files changed, 299 insertions(+), 570 deletions(-) create mode 100644 lib/geo/log_trimmer.ex delete mode 100644 log_trimmer_1751842644438.exs delete mode 100644 log_trimmer_1751843245279.exs delete mode 100644 log_trimmer_1751843394757.exs delete mode 100644 log_trimmer_1751843473592.exs delete mode 100644 log_trimmer_1751854827289.exs delete mode 100644 log_trimmer_1751854856848.exs delete mode 100644 log_trimmer_1751854959655.exs delete mode 100644 log_trimmer_1751855026237.exs delete mode 100644 log_trimmer_1751855102686.exs delete mode 100644 log_trimmer_1751855180331.exs delete mode 100644 log_trimmer_1751855277743.exs delete mode 100644 log_trimmer_1751855689696.exs create mode 100644 test/geo/log_trimmer_test.exs diff --git a/.cursor/rules/usage-rules.mdc b/.cursor/rules/usage-rules.mdc index aadd272..d9987fa 100644 --- a/.cursor/rules/usage-rules.mdc +++ b/.cursor/rules/usage-rules.mdc @@ -1,7 +1,5 @@ --- -description: All rules from `mix usage_rules.sync` -globs: -alwaysApply: true +alwaysApply: false --- <-- usage-rules-start --> <-- igniter-start --> diff --git a/lib/geo/application.ex b/lib/geo/application.ex index 8e95bd4..d5df4ce 100644 --- a/lib/geo/application.ex +++ b/lib/geo/application.ex @@ -59,4 +59,12 @@ defmodule Geo.Application do GeoWeb.Endpoint.config_change(changed, removed) :ok end + + # Add shutdown logging + @impl true + def stop(reason) do + require Logger + Logger.info("Geo.Application terminating with reason: #{inspect(reason)}") + :ok + end end diff --git a/lib/geo/geography/country/cache/server.ex b/lib/geo/geography/country/cache/server.ex index 73c8e78..837bbda 100644 --- a/lib/geo/geography/country/cache/server.ex +++ b/lib/geo/geography/country/cache/server.ex @@ -54,7 +54,7 @@ defmodule Geo.Geography.Country.Cache.Server do timer_ref: nil } - Logger.info("Cache worker started successfully") + Logger.info("Cache worker started successfully: #{inspect(self())}") {:ok, state} end @@ -115,12 +115,11 @@ defmodule Geo.Geography.Country.Cache.Server do @impl true def terminate(reason, state) do + Logger.info("Cache worker #{inspect(self())} terminating with reason: #{inspect(reason)}") # Cancel stop timer when GenServer is stopping if state.timer_ref do Process.cancel_timer(state.timer_ref) end - - Logger.info("Cache worker exiting with reason: #{inspect(reason)}") :ok end diff --git a/lib/geo/log_trimmer.ex b/lib/geo/log_trimmer.ex new file mode 100644 index 0000000..4630add --- /dev/null +++ b/lib/geo/log_trimmer.ex @@ -0,0 +1,147 @@ +defmodule Geo.LogTrimmer do + @moduledoc """ + A GenServer that periodically trims log files to prevent them from growing too large. + + This process runs in the background and periodically checks the specified log file. + If the file exceeds the maximum number of lines, it trims it to keep only the most recent lines. + """ + + use GenServer + require Logger + + @trim_interval :timer.minutes(5) + @max_log_lines 10_000 + + # Client API + + @doc """ + Starts the log trimmer for the specified log file. + """ + def start_link(log_file) when is_binary(log_file) do + GenServer.start_link(__MODULE__, log_file, name: __MODULE__) + end + + @doc """ + Stops the log trimmer. + """ + def stop do + if Process.whereis(__MODULE__) do + GenServer.stop(__MODULE__) + end + end + + @doc """ + Manually triggers a log trim operation. + """ + def trim_now do + if Process.whereis(__MODULE__) do + GenServer.cast(__MODULE__, :trim_now) + end + end + + @doc """ + Gets the current status of the log trimmer. + """ + def status do + if Process.whereis(__MODULE__) do + GenServer.call(__MODULE__, :status) + else + {:error, :not_running} + end + end + + # Server Callbacks + + @impl GenServer + def init(log_file) do + # Schedule the first trim + schedule_trim() + + state = %{ + log_file: log_file, + last_trim: DateTime.utc_now(), + trim_count: 0 + } + + Logger.info("LogTrimmer started for #{log_file}, trimming every #{div(@trim_interval, 60_000)} minutes") + + {:ok, state} + end + + @impl GenServer + def handle_info(:trim, state) do + new_state = perform_trim(state) + schedule_trim() + {:noreply, new_state} + end + + @impl GenServer + def handle_cast(:trim_now, state) do + new_state = perform_trim(state) + {:noreply, new_state} + end + + @impl GenServer + def handle_call(:status, _from, state) do + status = %{ + log_file: state.log_file, + last_trim: state.last_trim, + trim_count: state.trim_count, + max_lines: @max_log_lines, + trim_interval_minutes: div(@trim_interval, 60_000) + } + {:reply, {:ok, status}, state} + end + + # Private Functions + + defp schedule_trim do + Process.send_after(self(), :trim, @trim_interval) + end + + defp perform_trim(state) do + case trim_log_file(state.log_file) do + {:ok, :trimmed, lines_removed} -> + Logger.debug("LogTrimmer: Trimmed #{lines_removed} lines from #{state.log_file}") + %{state | last_trim: DateTime.utc_now(), trim_count: state.trim_count + 1} + + {:ok, :no_trim_needed} -> + %{state | last_trim: DateTime.utc_now()} + + {:error, reason} -> + Logger.warning("LogTrimmer: Failed to trim #{state.log_file}: #{reason}") + state + end + end + + defp trim_log_file(log_file) do + try do + if File.exists?(log_file) do + content = File.read!(log_file) + lines = String.split(content, "\n") + line_count = length(lines) + + if line_count > @max_log_lines do + # Keep only the last @max_log_lines lines + trimmed_lines = lines |> Enum.take(-@max_log_lines) + trimmed_content = Enum.join(trimmed_lines, "\n") + + # Write to a temporary file and rename for atomic operation + temp_file = log_file <> ".tmp" + File.write!(temp_file, trimmed_content) + File.rename!(temp_file, log_file) + + lines_removed = line_count - @max_log_lines + {:ok, :trimmed, lines_removed} + else + {:ok, :no_trim_needed} + end + else + {:ok, :no_trim_needed} + end + rescue + error -> + {:error, Exception.message(error)} + end + end +end diff --git a/lib/mix/tasks/restart.ex b/lib/mix/tasks/restart.ex index 93bc4e9..bec2f0a 100644 --- a/lib/mix/tasks/restart.ex +++ b/lib/mix/tasks/restart.ex @@ -3,10 +3,10 @@ defmodule Mix.Tasks.Restart do Restarts the Phoenix server in the background with output redirected to geo.log. This task will: - 1. Stop any running Phoenix server + 1. Stop any running Phoenix server and log trimmer 2. Start the server in the background 3. Redirect both stdout and stderr to geo.log - 4. Start a background process to periodically trim the log file + 4. Start a background LogTrimmer process to periodically trim the log file ## Examples @@ -25,7 +25,7 @@ defmodule Mix.Tasks.Restart do def run(_args) do log_file = "geo.log" - # Stop existing server using the geo.stop task + # Stop existing server and log trimmer using the geo.stop task Mix.Tasks.Stop.run([]) # Start server in background with output redirection @@ -36,7 +36,7 @@ defmodule Mix.Tasks.Restart do # Create or truncate the log file File.write!(log_file, "") - # Start log trimming process in background + # Start log trimming process start_log_trimmer(log_file) # Use System.cmd with a shell command to properly background the process @@ -82,62 +82,19 @@ defmodule Mix.Tasks.Restart do end defp start_log_trimmer(log_file) do - # Create a temporary script file for the log trimmer - script_content = """ - #!/usr/bin/env elixir + # Stop any existing log trimmer first + Geo.LogTrimmer.stop() - defmodule LogTrimmer do - @trim_interval #{@trim_interval} - @max_log_lines #{@max_log_lines} + # Start the LogTrimmer GenServer + case Geo.LogTrimmer.start_link(log_file) do + {:ok, _pid} -> + Mix.shell().info("Log trimmer started successfully") - def run(log_file) do - trim_log_periodically(log_file) - end + {:error, {:already_started, _pid}} -> + Mix.shell().info("Log trimmer already running") - defp trim_log_periodically(log_file) do - Process.sleep(@trim_interval) - - try do - trim_log_file(log_file) - rescue - _ -> :ok - end - - trim_log_periodically(log_file) - end - - defp trim_log_file(log_file) do - if File.exists?(log_file) do - content = File.read!(log_file) - lines = String.split(content, "\\n") - - if length(lines) > @max_log_lines do - # Keep only the last @max_log_lines lines - trimmed_lines = lines |> Enum.take(-@max_log_lines) - trimmed_content = Enum.join(trimmed_lines, "\\n") - - # Write to a temporary file and rename for atomic operation - temp_file = log_file <> ".tmp" - File.write!(temp_file, trimmed_content) - File.rename!(temp_file, log_file) - end - end - end + {:error, reason} -> + Mix.shell().error("Failed to start log trimmer: #{inspect(reason)}") end - - LogTrimmer.run("#{log_file}") - """ - - script_file = "log_trimmer_#{System.system_time(:millisecond)}.exs" - File.write!(script_file, script_content) - - # Start the log trimmer in the background - System.cmd("sh", ["-c", "nohup elixir #{script_file} > /dev/null 2>&1 &"]) - - # Clean up the script file after a moment - spawn(fn -> - Process.sleep(5000) - File.rm(script_file) - end) end end diff --git a/lib/mix/tasks/stop.ex b/lib/mix/tasks/stop.ex index 5ab2ab2..615d88d 100644 --- a/lib/mix/tasks/stop.ex +++ b/lib/mix/tasks/stop.ex @@ -1,8 +1,8 @@ defmodule Mix.Tasks.Stop do @moduledoc """ - Stops the server if it's running. + Stops the server and log trimmer if they're running. - This task will find and stop any running server process. + This task will find and stop any running server process and log trimmer. ## Examples @@ -15,6 +15,12 @@ defmodule Mix.Tasks.Stop do @impl Mix.Task def run(_args) do + # Stop log trimmer first + stop_log_trimmer() + + # Stop orphaned log trimmer processes + stop_orphaned_log_trimmers() + # Function to find the server PID find_phoenix_pid = fn -> case System.cmd("pgrep", ["-f", "beam.smp.*mix phx.server"], stderr_to_stdout: true) do @@ -45,7 +51,7 @@ defmodule Mix.Tasks.Stop do # Graceful shutdown first case System.cmd("kill", ["-TERM", pid], stderr_to_stdout: true) do {_, 0} -> - wait_for_shutdown(pid, 3) + wait_for_shutdown(pid, 5) # Check if still running and force kill if necessary case System.cmd("kill", ["-0", pid], stderr_to_stdout: true) do @@ -66,6 +72,35 @@ defmodule Mix.Tasks.Stop do end end + defp stop_log_trimmer do + try do + case Geo.LogTrimmer.stop() do + :ok -> + Mix.shell().info("Log trimmer stopped.") + _ -> + # Already stopped or not running + nil + end + rescue + _ -> + # LogTrimmer module might not be loaded in some contexts + nil + end + end + + # Helper function to stop orphaned log trimmer processes + defp stop_orphaned_log_trimmers do + {output, _} = System.cmd("pgrep", ["-af", "log_trimmer_.*\\.exs"], stderr_to_stdout: true) + output + |> String.split("\n") + |> Enum.reject(&(&1 == "")) + |> Enum.each(fn line -> + [pid | _] = String.split(line) + Mix.shell().info("Killing orphaned log trimmer with PID #{pid}") + System.cmd("kill", ["-TERM", pid]) + end) + end + # Helper function to wait for process to stop defp wait_for_shutdown(pid, seconds_remaining) when seconds_remaining > 0 do case System.cmd("kill", ["-0", pid], stderr_to_stdout: true) do diff --git a/log_trimmer_1751842644438.exs b/log_trimmer_1751842644438.exs deleted file mode 100644 index 1fdc5cf..0000000 --- a/log_trimmer_1751842644438.exs +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env elixir - -defmodule LogTrimmer do - @trim_interval_minutes 1 - @max_log_lines 10 - - def run(log_file) do - trim_log_periodically(log_file) - end - - defp trim_log_periodically(log_file) do - Process.sleep(@trim_interval_minutes * 60 * 1000) - - try do - trim_log_file(log_file) - rescue - _ -> :ok - end - - trim_log_periodically(log_file) - end - - defp trim_log_file(log_file) do - if File.exists?(log_file) do - content = File.read!(log_file) - lines = String.split(content, "\n") - - if length(lines) > @max_log_lines do - # Keep only the last @max_log_lines lines - trimmed_lines = lines |> Enum.take(-@max_log_lines) - trimmed_content = Enum.join(trimmed_lines, "\n") - - # Write to a temporary file and rename for atomic operation - temp_file = log_file <> ".tmp" - File.write!(temp_file, trimmed_content) - File.rename!(temp_file, log_file) - end - end - end -end - -LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751843245279.exs b/log_trimmer_1751843245279.exs deleted file mode 100644 index d8156c1..0000000 --- a/log_trimmer_1751843245279.exs +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env elixir - -defmodule LogTrimmer do - @trim_interval 1800000 - @max_log_lines 10000 - - def run(log_file) do - trim_log_periodically(log_file) - end - - defp trim_log_periodically(log_file) do - Process.sleep(@trim_interval) - - try do - trim_log_file(log_file) - rescue - _ -> :ok - end - - trim_log_periodically(log_file) - end - - defp trim_log_file(log_file) do - if File.exists?(log_file) do - content = File.read!(log_file) - lines = String.split(content, "\n") - - if length(lines) > @max_log_lines do - # Keep only the last @max_log_lines lines - trimmed_lines = lines |> Enum.take(-@max_log_lines) - trimmed_content = Enum.join(trimmed_lines, "\n") - - # Write to a temporary file and rename for atomic operation - temp_file = log_file <> ".tmp" - File.write!(temp_file, trimmed_content) - File.rename!(temp_file, log_file) - end - end - end -end - -LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751843394757.exs b/log_trimmer_1751843394757.exs deleted file mode 100644 index d8156c1..0000000 --- a/log_trimmer_1751843394757.exs +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env elixir - -defmodule LogTrimmer do - @trim_interval 1800000 - @max_log_lines 10000 - - def run(log_file) do - trim_log_periodically(log_file) - end - - defp trim_log_periodically(log_file) do - Process.sleep(@trim_interval) - - try do - trim_log_file(log_file) - rescue - _ -> :ok - end - - trim_log_periodically(log_file) - end - - defp trim_log_file(log_file) do - if File.exists?(log_file) do - content = File.read!(log_file) - lines = String.split(content, "\n") - - if length(lines) > @max_log_lines do - # Keep only the last @max_log_lines lines - trimmed_lines = lines |> Enum.take(-@max_log_lines) - trimmed_content = Enum.join(trimmed_lines, "\n") - - # Write to a temporary file and rename for atomic operation - temp_file = log_file <> ".tmp" - File.write!(temp_file, trimmed_content) - File.rename!(temp_file, log_file) - end - end - end -end - -LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751843473592.exs b/log_trimmer_1751843473592.exs deleted file mode 100644 index d8156c1..0000000 --- a/log_trimmer_1751843473592.exs +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env elixir - -defmodule LogTrimmer do - @trim_interval 1800000 - @max_log_lines 10000 - - def run(log_file) do - trim_log_periodically(log_file) - end - - defp trim_log_periodically(log_file) do - Process.sleep(@trim_interval) - - try do - trim_log_file(log_file) - rescue - _ -> :ok - end - - trim_log_periodically(log_file) - end - - defp trim_log_file(log_file) do - if File.exists?(log_file) do - content = File.read!(log_file) - lines = String.split(content, "\n") - - if length(lines) > @max_log_lines do - # Keep only the last @max_log_lines lines - trimmed_lines = lines |> Enum.take(-@max_log_lines) - trimmed_content = Enum.join(trimmed_lines, "\n") - - # Write to a temporary file and rename for atomic operation - temp_file = log_file <> ".tmp" - File.write!(temp_file, trimmed_content) - File.rename!(temp_file, log_file) - end - end - end -end - -LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751854827289.exs b/log_trimmer_1751854827289.exs deleted file mode 100644 index 9ef26a8..0000000 --- a/log_trimmer_1751854827289.exs +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env elixir - -defmodule LogTrimmer do - @trim_interval 300000 - @max_log_lines 10000 - - def run(log_file) do - trim_log_periodically(log_file) - end - - defp trim_log_periodically(log_file) do - Process.sleep(@trim_interval) - - try do - trim_log_file(log_file) - rescue - _ -> :ok - end - - trim_log_periodically(log_file) - end - - defp trim_log_file(log_file) do - if File.exists?(log_file) do - content = File.read!(log_file) - lines = String.split(content, "\n") - - if length(lines) > @max_log_lines do - # Keep only the last @max_log_lines lines - trimmed_lines = lines |> Enum.take(-@max_log_lines) - trimmed_content = Enum.join(trimmed_lines, "\n") - - # Write to a temporary file and rename for atomic operation - temp_file = log_file <> ".tmp" - File.write!(temp_file, trimmed_content) - File.rename!(temp_file, log_file) - end - end - end -end - -LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751854856848.exs b/log_trimmer_1751854856848.exs deleted file mode 100644 index 9ef26a8..0000000 --- a/log_trimmer_1751854856848.exs +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env elixir - -defmodule LogTrimmer do - @trim_interval 300000 - @max_log_lines 10000 - - def run(log_file) do - trim_log_periodically(log_file) - end - - defp trim_log_periodically(log_file) do - Process.sleep(@trim_interval) - - try do - trim_log_file(log_file) - rescue - _ -> :ok - end - - trim_log_periodically(log_file) - end - - defp trim_log_file(log_file) do - if File.exists?(log_file) do - content = File.read!(log_file) - lines = String.split(content, "\n") - - if length(lines) > @max_log_lines do - # Keep only the last @max_log_lines lines - trimmed_lines = lines |> Enum.take(-@max_log_lines) - trimmed_content = Enum.join(trimmed_lines, "\n") - - # Write to a temporary file and rename for atomic operation - temp_file = log_file <> ".tmp" - File.write!(temp_file, trimmed_content) - File.rename!(temp_file, log_file) - end - end - end -end - -LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751854959655.exs b/log_trimmer_1751854959655.exs deleted file mode 100644 index 9ef26a8..0000000 --- a/log_trimmer_1751854959655.exs +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env elixir - -defmodule LogTrimmer do - @trim_interval 300000 - @max_log_lines 10000 - - def run(log_file) do - trim_log_periodically(log_file) - end - - defp trim_log_periodically(log_file) do - Process.sleep(@trim_interval) - - try do - trim_log_file(log_file) - rescue - _ -> :ok - end - - trim_log_periodically(log_file) - end - - defp trim_log_file(log_file) do - if File.exists?(log_file) do - content = File.read!(log_file) - lines = String.split(content, "\n") - - if length(lines) > @max_log_lines do - # Keep only the last @max_log_lines lines - trimmed_lines = lines |> Enum.take(-@max_log_lines) - trimmed_content = Enum.join(trimmed_lines, "\n") - - # Write to a temporary file and rename for atomic operation - temp_file = log_file <> ".tmp" - File.write!(temp_file, trimmed_content) - File.rename!(temp_file, log_file) - end - end - end -end - -LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751855026237.exs b/log_trimmer_1751855026237.exs deleted file mode 100644 index 9ef26a8..0000000 --- a/log_trimmer_1751855026237.exs +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env elixir - -defmodule LogTrimmer do - @trim_interval 300000 - @max_log_lines 10000 - - def run(log_file) do - trim_log_periodically(log_file) - end - - defp trim_log_periodically(log_file) do - Process.sleep(@trim_interval) - - try do - trim_log_file(log_file) - rescue - _ -> :ok - end - - trim_log_periodically(log_file) - end - - defp trim_log_file(log_file) do - if File.exists?(log_file) do - content = File.read!(log_file) - lines = String.split(content, "\n") - - if length(lines) > @max_log_lines do - # Keep only the last @max_log_lines lines - trimmed_lines = lines |> Enum.take(-@max_log_lines) - trimmed_content = Enum.join(trimmed_lines, "\n") - - # Write to a temporary file and rename for atomic operation - temp_file = log_file <> ".tmp" - File.write!(temp_file, trimmed_content) - File.rename!(temp_file, log_file) - end - end - end -end - -LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751855102686.exs b/log_trimmer_1751855102686.exs deleted file mode 100644 index 9ef26a8..0000000 --- a/log_trimmer_1751855102686.exs +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env elixir - -defmodule LogTrimmer do - @trim_interval 300000 - @max_log_lines 10000 - - def run(log_file) do - trim_log_periodically(log_file) - end - - defp trim_log_periodically(log_file) do - Process.sleep(@trim_interval) - - try do - trim_log_file(log_file) - rescue - _ -> :ok - end - - trim_log_periodically(log_file) - end - - defp trim_log_file(log_file) do - if File.exists?(log_file) do - content = File.read!(log_file) - lines = String.split(content, "\n") - - if length(lines) > @max_log_lines do - # Keep only the last @max_log_lines lines - trimmed_lines = lines |> Enum.take(-@max_log_lines) - trimmed_content = Enum.join(trimmed_lines, "\n") - - # Write to a temporary file and rename for atomic operation - temp_file = log_file <> ".tmp" - File.write!(temp_file, trimmed_content) - File.rename!(temp_file, log_file) - end - end - end -end - -LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751855180331.exs b/log_trimmer_1751855180331.exs deleted file mode 100644 index 9ef26a8..0000000 --- a/log_trimmer_1751855180331.exs +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env elixir - -defmodule LogTrimmer do - @trim_interval 300000 - @max_log_lines 10000 - - def run(log_file) do - trim_log_periodically(log_file) - end - - defp trim_log_periodically(log_file) do - Process.sleep(@trim_interval) - - try do - trim_log_file(log_file) - rescue - _ -> :ok - end - - trim_log_periodically(log_file) - end - - defp trim_log_file(log_file) do - if File.exists?(log_file) do - content = File.read!(log_file) - lines = String.split(content, "\n") - - if length(lines) > @max_log_lines do - # Keep only the last @max_log_lines lines - trimmed_lines = lines |> Enum.take(-@max_log_lines) - trimmed_content = Enum.join(trimmed_lines, "\n") - - # Write to a temporary file and rename for atomic operation - temp_file = log_file <> ".tmp" - File.write!(temp_file, trimmed_content) - File.rename!(temp_file, log_file) - end - end - end -end - -LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751855277743.exs b/log_trimmer_1751855277743.exs deleted file mode 100644 index 9ef26a8..0000000 --- a/log_trimmer_1751855277743.exs +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env elixir - -defmodule LogTrimmer do - @trim_interval 300000 - @max_log_lines 10000 - - def run(log_file) do - trim_log_periodically(log_file) - end - - defp trim_log_periodically(log_file) do - Process.sleep(@trim_interval) - - try do - trim_log_file(log_file) - rescue - _ -> :ok - end - - trim_log_periodically(log_file) - end - - defp trim_log_file(log_file) do - if File.exists?(log_file) do - content = File.read!(log_file) - lines = String.split(content, "\n") - - if length(lines) > @max_log_lines do - # Keep only the last @max_log_lines lines - trimmed_lines = lines |> Enum.take(-@max_log_lines) - trimmed_content = Enum.join(trimmed_lines, "\n") - - # Write to a temporary file and rename for atomic operation - temp_file = log_file <> ".tmp" - File.write!(temp_file, trimmed_content) - File.rename!(temp_file, log_file) - end - end - end -end - -LogTrimmer.run("geo.log") diff --git a/log_trimmer_1751855689696.exs b/log_trimmer_1751855689696.exs deleted file mode 100644 index 9ef26a8..0000000 --- a/log_trimmer_1751855689696.exs +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env elixir - -defmodule LogTrimmer do - @trim_interval 300000 - @max_log_lines 10000 - - def run(log_file) do - trim_log_periodically(log_file) - end - - defp trim_log_periodically(log_file) do - Process.sleep(@trim_interval) - - try do - trim_log_file(log_file) - rescue - _ -> :ok - end - - trim_log_periodically(log_file) - end - - defp trim_log_file(log_file) do - if File.exists?(log_file) do - content = File.read!(log_file) - lines = String.split(content, "\n") - - if length(lines) > @max_log_lines do - # Keep only the last @max_log_lines lines - trimmed_lines = lines |> Enum.take(-@max_log_lines) - trimmed_content = Enum.join(trimmed_lines, "\n") - - # Write to a temporary file and rename for atomic operation - temp_file = log_file <> ".tmp" - File.write!(temp_file, trimmed_content) - File.rename!(temp_file, log_file) - end - end - end -end - -LogTrimmer.run("geo.log") diff --git a/test/geo/log_trimmer_test.exs b/test/geo/log_trimmer_test.exs new file mode 100644 index 0000000..d321fe5 --- /dev/null +++ b/test/geo/log_trimmer_test.exs @@ -0,0 +1,89 @@ +defmodule Geo.LogTrimmerTest do + use ExUnit.Case, async: true + + alias Geo.LogTrimmer + + @test_log_file "test_geo.log" + + setup do + # Clean up any existing test log file + File.rm(@test_log_file) + + # Stop any running LogTrimmer to avoid conflicts + LogTrimmer.stop() + + on_exit(fn -> + LogTrimmer.stop() + File.rm(@test_log_file) + end) + + :ok + end + + test "starts and stops correctly" do + # Start the LogTrimmer + assert {:ok, pid} = LogTrimmer.start_link(@test_log_file) + assert Process.alive?(pid) + + # Check status + assert {:ok, status} = LogTrimmer.status() + assert status.log_file == @test_log_file + assert status.trim_count == 0 + + # Stop the LogTrimmer + assert :ok = LogTrimmer.stop() + refute Process.alive?(pid) + end + + test "prevents multiple instances" do + # Start first instance + assert {:ok, _pid1} = LogTrimmer.start_link(@test_log_file) + + # Try to start second instance - should fail + assert {:error, {:already_started, _pid}} = LogTrimmer.start_link(@test_log_file) + + # Clean up + LogTrimmer.stop() + end + + test "trims log file when it exceeds max lines" do + # Create a log file with more than 10,000 lines + max_lines = 10_000 + content = String.duplicate("This is a test log line\n", max_lines + 1000) + File.write!(@test_log_file, content) + + # Start LogTrimmer + {:ok, _pid} = LogTrimmer.start_link(@test_log_file) + + # Manually trigger a trim + LogTrimmer.trim_now() + + # Give it a moment to process + Process.sleep(100) + + # Check that the file was trimmed + trimmed_content = File.read!(@test_log_file) + trimmed_lines = String.split(trimmed_content, "\n") + + # Should have exactly max_lines (the last line might be empty due to trailing newline) + assert length(trimmed_lines) <= max_lines + 1 + + # Clean up + LogTrimmer.stop() + end + + test "handles non-existent log file gracefully" do + # Start LogTrimmer with non-existent file + assert {:ok, _pid} = LogTrimmer.start_link("non_existent.log") + + # Should not crash when trying to trim + LogTrimmer.trim_now() + Process.sleep(100) + + # Should still be running + assert {:ok, _status} = LogTrimmer.status() + + # Clean up + LogTrimmer.stop() + end +end