summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.env.example42
-rw-r--r--.gitattributes3
-rw-r--r--.github/workflows/update.yml66
-rw-r--r--.gitignore56
-rw-r--r--.npmrc1
-rw-r--r--.prettierignore9
-rw-r--r--.prettierrc17
-rw-r--r--.serena/.gitignore1
-rw-r--r--.serena/memories/project_overview.md39
-rw-r--r--.serena/project.yml84
-rw-r--r--AGENTS.md184
-rw-r--r--CLAUDE.md287
-rw-r--r--README.md44
-rw-r--r--bun.lock370
-rw-r--r--extensions.json30
-rw-r--r--package.json40
-rw-r--r--scripts/cache.ts200
-rw-r--r--scripts/cache/files.ts138
-rw-r--r--scripts/cache/lock.ts220
-rw-r--r--scripts/cache/logger.ts152
-rw-r--r--scripts/cache/manifest.ts76
-rw-r--r--scripts/cache/metadata.ts96
-rw-r--r--scripts/cache/s3.ts117
-rw-r--r--scripts/cache/utils.ts85
-rw-r--r--scripts/config.ts20
-rw-r--r--scripts/meilisearch.ts158
-rw-r--r--scripts/types.ts31
-rw-r--r--scripts/update.ts183
-rw-r--r--scripts/worker.ts6
-rw-r--r--src/app.css563
-rw-r--r--src/app.d.ts13
-rw-r--r--src/app.html13
-rw-r--r--src/lib/components/ExtensionCard.svelte47
-rw-r--r--src/lib/components/ExtensionCategory.svelte24
-rw-r--r--src/lib/components/ExtensionRow.svelte47
-rw-r--r--src/lib/components/Footer.svelte16
-rw-r--r--src/lib/components/MirrorSelector.svelte26
-rw-r--r--src/lib/search/debounce.ts30
-rw-r--r--src/lib/search/meilisearch.ts125
-rw-r--r--src/lib/search/types.ts27
-rw-r--r--src/lib/search/utils.ts17
-rw-r--r--src/lib/stores/mirror.ts3
-rw-r--r--src/lib/types.ts26
-rw-r--r--src/routes/+layout.svelte28
-rw-r--r--src/routes/+layout.ts10
-rw-r--r--src/routes/+page.svelte21
-rw-r--r--src/routes/search/+page.svelte322
-rw-r--r--static/favicon.icobin0 -> 308321 bytes
-rw-r--r--static/robots.txt3
-rw-r--r--svelte.config.js15
-rw-r--r--tests/cache-files.test.ts100
-rw-r--r--tests/cache-format.test.ts27
-rw-r--r--tests/cache-lock.test.ts141
-rw-r--r--tests/cache-manifest.test.ts76
-rw-r--r--tests/cache-metadata.test.ts21
-rw-r--r--tests/cache-utils.test.ts23
-rw-r--r--tests/debounce.test.ts60
-rw-r--r--tests/logger.test.ts26
-rw-r--r--tests/search-utils.test.ts24
-rw-r--r--tsconfig.json20
-rw-r--r--vite.config.ts6
-rw-r--r--wrangler.toml5
62 files changed, 4660 insertions, 0 deletions
diff --git a/.env.example b/.env.example
new file mode 100644
index 0000000..d23ce6b
--- /dev/null
+++ b/.env.example
@@ -0,0 +1,42 @@
+# S3-compatible storage configuration
+# The caching system supports any S3-compatible service including:
+# - Cloudflare R2
+# - Backblaze B2
+# - AWS S3
+# - MinIO
+# - DigitalOcean Spaces
+# And more...
+
+# S3 endpoint URL (required)
+# Examples:
+# Cloudflare R2: https://<ACCOUNT_ID>.r2.cloudflarestorage.com
+# Backblaze B2: https://s3.<REGION>.backblazeb2.com
+# AWS S3: https://s3.<REGION>.amazonaws.com
+# MinIO: http://localhost:9000
+S3_ENDPOINT=
+
+# S3 access key ID (required)
+S3_ACCESS_KEY_ID=
+
+# S3 secret access key (required)
+S3_SECRET_ACCESS_KEY=
+
+# S3 bucket name (required)
+S3_BUCKET_NAME=
+
+# S3 region (optional)
+# Use "auto" for Cloudflare R2
+# For AWS S3 or Backblaze B2, use the appropriate region (e.g., "us-east-1", "eu-central-003")
+S3_REGION=
+
+# Meilisearch configuration (for indexing)
+MEILISEARCH_HOST=http://localhost:7700
+MEILISEARCH_MASTER_KEY=masterKey
+
+# Meilisearch configuration (for frontend - optional)
+# Only set VITE_MEILISEARCH_HOST if you want to use Meilisearch search in the browser
+# Leave empty to use client-side Fuse.js search instead
+VITE_MEILISEARCH_HOST=https://search.x.noz.one
+
+# Search-only API key for frontend (read-only access)
+VITE_MEILISEARCH_DEFAULT_SEARCH_KEY=3266e98f431f6e28db4c804bd4d6b95fc43c8510a693969ef76d5ec3ec62a204 \ No newline at end of file
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 0000000..314766e
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1,3 @@
+* text=auto eol=lf
+*.{cmd,[cC][mM][dD]} text eol=crlf
+*.{bat,[bB][aA][tT]} text eol=crlf
diff --git a/.github/workflows/update.yml b/.github/workflows/update.yml
new file mode 100644
index 0000000..053878e
--- /dev/null
+++ b/.github/workflows/update.yml
@@ -0,0 +1,66 @@
+name: Update Extensions
+
+on:
+ schedule:
+ - cron: '0 */4 * * *'
+ workflow_dispatch:
+ push:
+ branches:
+ - main
+
+permissions:
+ contents: write
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: false
+
+jobs:
+ update:
+ runs-on: ubuntu-latest
+ if: github.event_name != 'push' || github.event.head_commit.author.name != 'github-actions[bot]'
+ outputs:
+ updated: ${{ steps.update.outputs.updated }}
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+
+ - name: Install bun
+ uses: oven-sh/setup-bun@735343b667d3e6f658f44d0eca948eb6282f2b76 # v2.0.2
+ with:
+ bun-version: latest
+
+ - name: Install dependencies
+ run: bun install --frozen-lockfile
+
+ - name: Update extensions (quick)
+ id: update
+ run: bun run update --quick
+
+ - name: Commit and push changes
+ if: steps.update.outputs.updated == 'true'
+ run: |
+ git config --local user.name "github-actions[bot]"
+ git config --local user.email "github-actions[bot]@users.noreply.github.com"
+ git add extensions.json
+ git diff-index --quiet HEAD || git commit -m "chore: update extensions.json"
+ git push
+
+ sync-to-gitlab:
+ runs-on: ubuntu-latest
+ needs: update
+ if: needs.update.outputs.updated == 'true' || github.event_name == 'workflow_dispatch'
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ with:
+ ref: main
+ fetch-depth: 0
+
+ - name: Sync to GitLab
+ uses: pixta-dev/repository-mirroring-action@674e65a7d483ca28dafaacba0d07351bdcc8bd75 # v1.1.1
+ with:
+ target_repo_url: ${{ secrets.GITLAB_REPO }}
+ ssh_private_key: ${{ secrets.GITLAB_SSH }}
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..0f33b73
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,56 @@
+node_modules
+
+# Output
+.output
+.vercel
+.netlify
+.wrangler
+/.svelte-kit
+/build
+
+# OS
+.DS_Store
+Thumbs.db
+
+# Env
+.env
+.env.*
+!.env.example
+!.env.test
+
+# Vite
+vite.config.js.timestamp-*
+vite.config.ts.timestamp-*
+
+# output
+out
+dist
+*.tgz
+
+# code coverage
+coverage
+*.lcov
+
+# logs
+logs
+_.log
+report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
+
+# caches
+.eslintcache
+.cache
+*.tsbuildinfo
+
+# IntelliJ based IDEs
+.idea
+
+# Extensions
+tmp/
+extensions/
+
+# Generated static files
+static/extensions.json
+static/data.json
+static/keiyoushi/
+static/yuzono/
+static/kohi-den/
diff --git a/.npmrc b/.npmrc
new file mode 100644
index 0000000..b6f27f1
--- /dev/null
+++ b/.npmrc
@@ -0,0 +1 @@
+engine-strict=true
diff --git a/.prettierignore b/.prettierignore
new file mode 100644
index 0000000..7d74fe2
--- /dev/null
+++ b/.prettierignore
@@ -0,0 +1,9 @@
+# Package Managers
+package-lock.json
+pnpm-lock.yaml
+yarn.lock
+bun.lock
+bun.lockb
+
+# Miscellaneous
+/static/
diff --git a/.prettierrc b/.prettierrc
new file mode 100644
index 0000000..6b0167a
--- /dev/null
+++ b/.prettierrc
@@ -0,0 +1,17 @@
+{
+ "useTabs": false,
+ "tabWidth": 4,
+ "singleQuote": true,
+ "trailingComma": "none",
+ "printWidth": 100,
+ "endOfLine": "lf",
+ "plugins": ["prettier-plugin-svelte"],
+ "overrides": [
+ {
+ "files": "*.svelte",
+ "options": {
+ "parser": "svelte"
+ }
+ }
+ ]
+}
diff --git a/.serena/.gitignore b/.serena/.gitignore
new file mode 100644
index 0000000..14d86ad
--- /dev/null
+++ b/.serena/.gitignore
@@ -0,0 +1 @@
+/cache
diff --git a/.serena/memories/project_overview.md b/.serena/memories/project_overview.md
new file mode 100644
index 0000000..2d94b70
--- /dev/null
+++ b/.serena/memories/project_overview.md
@@ -0,0 +1,39 @@
+# Project: x (Mihon & Aniyomi Extensions Aggregator)
+
+## Overview
+
+A repository aggregator for Mihon and Aniyomi extensions that automatically syncs from multiple upstream sources.
+
+## Tech Stack
+
+- **Runtime/Package Manager:** Bun
+- **Frontend:** SvelteKit, Vite
+- **Language:** TypeScript
+- **Formatting:** Prettier
+
+## Architecture
+
+- **`src/`:** SvelteKit frontend application.
+- **`scripts/`:** Bun scripts for project maintenance.
+- **`static/`:** Static assets and generated `data.json`.
+- **`extensions.json`:** Configuration file defining extension sources.
+
+## Scripts & Automation
+
+- **`scripts/update.ts`:** The core script.
+ - Updates extensions from upstream Git repositories.
+ - Generates `static/data.json` for the frontend.
+ - Supports `--generate-only` flag to generate `data.json` without fetching updates (used for build).
+- **Refactoring (Dec 2025):**
+ - `scripts/index.ts` was merged into `scripts/update.ts` to reduce redundancy.
+ - `scripts/clean.ts` was removed as it was unused.
+ - `package.json` build script updated to use `bun run scripts/update.ts --generate-only`.
+
+## Standards
+
+- **Line Endings:** LF (Line Feed) is enforced project-wide via `.gitattributes` and `.prettierrc` to ensure cross-platform consistency (Windows/Linux/CI).
+
+## Deployment
+
+- **Targets:** GitHub Pages and Cloudflare Workers.
+- **CI/CD:** GitHub Actions workflow (`.github/workflows/update.yml`) handles updates every 4 hours and on dispatch.
diff --git a/.serena/project.yml b/.serena/project.yml
new file mode 100644
index 0000000..4e61aec
--- /dev/null
+++ b/.serena/project.yml
@@ -0,0 +1,84 @@
+# list of languages for which language servers are started; choose from:
+# al bash clojure cpp csharp csharp_omnisharp
+# dart elixir elm erlang fortran go
+# haskell java julia kotlin lua markdown
+# nix perl php python python_jedi r
+# rego ruby ruby_solargraph rust scala swift
+# terraform typescript typescript_vts yaml zig
+# Note:
+# - For C, use cpp
+# - For JavaScript, use typescript
+# Special requirements:
+# - csharp: Requires the presence of a .sln file in the project folder.
+# When using multiple languages, the first language server that supports a given file will be used for that file.
+# The first language is the default language and the respective language server will be used as a fallback.
+# Note that when using the JetBrains backend, language servers are not used and this list is correspondingly ignored.
+languages:
+ - typescript
+
+# the encoding used by text files in the project
+# For a list of possible encodings, see https://docs.python.org/3.11/library/codecs.html#standard-encodings
+encoding: 'utf-8'
+
+# whether to use the project's gitignore file to ignore files
+# Added on 2025-04-07
+ignore_all_files_in_gitignore: true
+
+# list of additional paths to ignore
+# same syntax as gitignore, so you can use * and **
+# Was previously called `ignored_dirs`, please update your config if you are using that.
+# Added (renamed) on 2025-04-07
+ignored_paths: []
+
+# whether the project is in read-only mode
+# If set to true, all editing tools will be disabled and attempts to use them will result in an error
+# Added on 2025-04-18
+read_only: false
+
+# list of tool names to exclude. We recommend not excluding any tools, see the readme for more details.
+# Below is the complete list of tools for convenience.
+# To make sure you have the latest list of tools, and to view their descriptions,
+# execute `uv run scripts/print_tool_overview.py`.
+#
+# * `activate_project`: Activates a project by name.
+# * `check_onboarding_performed`: Checks whether project onboarding was already performed.
+# * `create_text_file`: Creates/overwrites a file in the project directory.
+# * `delete_lines`: Deletes a range of lines within a file.
+# * `delete_memory`: Deletes a memory from Serena's project-specific memory store.
+# * `execute_shell_command`: Executes a shell command.
+# * `find_referencing_code_snippets`: Finds code snippets in which the symbol at the given location is referenced.
+# * `find_referencing_symbols`: Finds symbols that reference the symbol at the given location (optionally filtered by type).
+# * `find_symbol`: Performs a global (or local) search for symbols with/containing a given name/substring (optionally filtered by type).
+# * `get_current_config`: Prints the current configuration of the agent, including the active and available projects, tools, contexts, and modes.
+# * `get_symbols_overview`: Gets an overview of the top-level symbols defined in a given file.
+# * `initial_instructions`: Gets the initial instructions for the current project.
+# Should only be used in settings where the system prompt cannot be set,
+# e.g. in clients you have no control over, like Claude Desktop.
+# * `insert_after_symbol`: Inserts content after the end of the definition of a given symbol.
+# * `insert_at_line`: Inserts content at a given line in a file.
+# * `insert_before_symbol`: Inserts content before the beginning of the definition of a given symbol.
+# * `list_dir`: Lists files and directories in the given directory (optionally with recursion).
+# * `list_memories`: Lists memories in Serena's project-specific memory store.
+# * `onboarding`: Performs onboarding (identifying the project structure and essential tasks, e.g. for testing or building).
+# * `prepare_for_new_conversation`: Provides instructions for preparing for a new conversation (in order to continue with the necessary context).
+# * `read_file`: Reads a file within the project directory.
+# * `read_memory`: Reads the memory with the given name from Serena's project-specific memory store.
+# * `remove_project`: Removes a project from the Serena configuration.
+# * `replace_lines`: Replaces a range of lines within a file with new content.
+# * `replace_symbol_body`: Replaces the full definition of a symbol.
+# * `restart_language_server`: Restarts the language server, may be necessary when edits not through Serena happen.
+# * `search_for_pattern`: Performs a search for a pattern in the project.
+# * `summarize_changes`: Provides instructions for summarizing the changes made to the codebase.
+# * `switch_modes`: Activates modes by providing a list of their names
+# * `think_about_collected_information`: Thinking tool for pondering the completeness of collected information.
+# * `think_about_task_adherence`: Thinking tool for determining whether the agent is still on track with the current task.
+# * `think_about_whether_you_are_done`: Thinking tool for determining whether the task is truly completed.
+# * `write_memory`: Writes a named memory (for future reference) to Serena's project-specific memory store.
+excluded_tools: []
+
+# initial prompt for the project. It will always be given to the LLM upon activating the project
+# (contrary to the memories, which are loaded on demand).
+initial_prompt: ''
+
+project_name: 'x'
+included_optional_tools: []
diff --git a/AGENTS.md b/AGENTS.md
new file mode 100644
index 0000000..005c5b0
--- /dev/null
+++ b/AGENTS.md
@@ -0,0 +1,184 @@
+# Agent Instructions for Mihon/Aniyomi Extensions Aggregator
+
+This repository is a SvelteKit project managed with **Bun**. It aggregates extensions for Mihon and Aniyomi, serving them as a static site with an API-like structure in `static/`.
+
+## 1. Environment & Toolchain
+
+- **Runtime**: `bun` (Do not use `npm` or `yarn` or `pnpm`).
+- **Framework**: SvelteKit (Svelte 5).
+- **Language**: TypeScript (Strict mode).
+- **Search**: Meilisearch (client-side integration).
+
+## 2. Development Commands
+
+### Build & Run
+
+- **Install dependencies**:
+ ```bash
+ bun install
+ ```
+- **Start dev server**:
+ ```bash
+ bun run dev
+ ```
+- **Build production output**:
+ ```bash
+ bun run build
+ ```
+ _Note: This runs `bun run update --generate-only` first to regenerate `static/data.json` before Vite builds._
+
+### Linting & Verification
+
+- **Type Check**:
+ ```bash
+ bun run check
+ ```
+ _Always run this after making TypeScript or Svelte changes. There are no unit tests, so strict type checking is your primary safety net._
+- **Format Code**:
+ ```bash
+ bun run format
+ ```
+- **Verify Formatting**:
+ ```bash
+ bun run lint
+ ```
+
+### Data Management
+
+- **Update Extensions**:
+ ```bash
+ bun run update
+ ```
+ _Fetches upstream data. Use `--generate-only` to just rebuild `data.json` locally._
+
+### Testing
+
+- **Run all tests**:
+ ```bash
+ bun test
+ ```
+- **Run a single test file**:
+ ```bash
+ bun test tests/debounce.test.ts
+ ```
+- **Run tests with watch mode**:
+ ```bash
+ bun test --watch
+ ```
+
+## 3. Testing Strategy
+
+**Test Framework**: Uses `bun:test` built-in test runner with TypeScript support.
+
+**Test Coverage**:
+
+- Utility functions in `src/lib/search/` (debouncing, source name formatting)
+- Logger utilities in `scripts/cache/` (transfer stats formatting)
+- Cache manifest operations in `scripts/cache/` (finding caches by key/prefix)
+- File operations in `scripts/cache/` (checksums, directory management)
+- Cache lock utilities (instance ID generation, staleness detection)
+- Cache metadata key generation
+- Cache utility functions (key generation, byte formatting)
+
+**Agent Protocol for Verification**:
+
+1. **Run Tests**: Execute `bun test` to verify existing functionality.
+2. **Write Tests**: When adding utility functions, add corresponding test files in `tests/`.
+3. **Type Check**: Always run `bun run check` after making changes.
+4. **Build Verification**: Run `bun run build` to ensure static adapter and prerendering complete successfully.
+
+## 4. Code Style & Conventions
+
+### Formatting (Enforced by Prettier)
+
+- **Indentation**: 4 spaces.
+- **Quotes**: Single quotes (`'`).
+- **Trailing Commas**: `none`.
+- **Line Width**: 100 characters.
+- **Line Endings**: LF (`\n`).
+
+### TypeScript
+
+- **Strict Mode**: Enabled. No `any` types unless absolutely necessary.
+- **Imports**: Use standard ESM imports.
+ - SvelteKit aliases: `$lib/` is mapped to `src/lib/`.
+- **Interfaces**: Define specific interfaces for data structures (see `src/lib/types.ts` or `src/lib/search/types.ts`).
+
+### Svelte Components (Svelte 5)
+
+- Use `<script lang="ts">`.
+- Components located in `src/lib/components`.
+- Pages in `src/routes`.
+- Use `app.css` for global styles; component-scoped styles are preferred for components.
+
+### Naming Conventions
+
+- **Files**:
+ - Svelte components: `PascalCase.svelte` (e.g., `ExtensionCard.svelte`).
+ - TS utilities: `camelCase.ts` (e.g., `meilisearch.ts`).
+ - SvelteKit routes: `+page.svelte`, `+layout.svelte`, `+server.ts`.
+- **Variables/Functions**: `camelCase`.
+- **Types/Interfaces**: `PascalCase`.
+
+## 5. Architecture Overview
+
+### Backend / Scripts (`scripts/`)
+
+Logic for fetching, updating, and caching extensions lives here, not in the SvelteKit app.
+
+- `update.ts`: Entry point for updates.
+- `cache/`: logic for S3/R2 caching mechanism.
+- `config.ts`: Configuration for domains and file paths.
+
+### Frontend (`src/`)
+
+- **Data Source**: The app fetches `data.json` (generated by scripts) via `fetch` in `+layout.ts`.
+- **Search**: Uses Meilisearch. Logic is in `src/lib/search/`.
+- **Routing**: Static routing. The "API" is just static JSON files hosted in the same directory structure.
+
+### Cache System
+
+- Uses `tar.zst` archives stored in S3-compatible storage (R2/B2).
+- **Do not modify** cache logic (`scripts/cache/`) without fully understanding the manifest and distributed locking system described in `CLAUDE.md`.
+
+## 6. Common Tasks
+
+**Adding a new Component**
+
+1. Create `src/lib/components/Name.svelte`.
+2. Add necessary props/state using Svelte 5 syntax.
+3. Run `bun run format` to ensure style.
+
+**Modifying Extension Logic**
+
+1. Edit `scripts/update.ts` or `scripts/config.ts`.
+2. Run `bun run check` to verify types.
+3. Test by running `bun run update --generate-only`.
+
+**Updating Dependencies**
+
+1. Use `bun add <package>` or `bun add -d <package>`.
+2. Do not update `bun.lock` manually.
+
+**Writing Tests**
+
+1. Create test files in `tests/` with the pattern `<module>.test.ts`.
+2. Use `bun:test` test runner: `import { test, expect } from 'bun:test'`.
+3. Use `Bun.sleep(ms)` for async test delays.
+4. Run `bun test` to verify tests pass.
+
+## 7. Error Handling
+
+- **Scripts**: Use try/catch blocks in async functions. Log errors explicitly to console (see `scripts/cache/logger.ts`).
+- **Frontend**: Handle fetch failures gracefully (e.g., if `data.json` fails to load).
+- **Type Safety**: Avoid non-null assertions (`!`) if possible; use optional chaining (`?.`) and nullish coalescing (`??`).
+
+## 8. Deployment
+
+- **CI**: GitHub Actions (`.github/workflows/update.yml`) handles updates and mirroring.
+- **Environment Variables**: Local dev requires `.env`. See `.env.example`.
+ - `S3_*` variables are required for full update/cache operations.
+
+---
+
+_Generated for AI Agents interacting with this codebase._
diff --git a/CLAUDE.md b/CLAUDE.md
new file mode 100644
index 0000000..7c16e88
--- /dev/null
+++ b/CLAUDE.md
@@ -0,0 +1,287 @@
+# CLAUDE.md
+
+This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
+
+## Project Overview
+
+This is a Mihon & Aniyomi extensions repository aggregator that:
+
+- Fetches and mirrors extension repositories from multiple sources
+- Builds a static website to browse and add extensions
+- Uses R2 cache for fast extension data restoration
+- Mirrors repository to GitLab for backup
+- Automatically updates extensions every 4 hours via GitHub Actions
+
+## Development Commands
+
+```bash
+# Install dependencies
+bun install
+
+# Start development server with hot reload
+bun run dev
+
+# Update extensions from upstream sources
+bun run update
+
+# Build the static site
+bun run build
+
+# Preview the production build
+bun run preview
+
+# Format code with Prettier
+bun run format
+
+# Check code formatting
+bun run lint
+```
+
+## Architecture
+
+### Core Build System
+
+The build process uses Vite with SvelteKit and custom scripts:
+
+1. **`scripts/update.ts`**: Main extension update and data generation script
+ - Fetches extensions from upstream git repositories
+ - Reads `extensions.json` configuration at project root
+ - Checks remote git commit hashes for updates
+ - Supports multiple modes:
+ - `--generate-only`: Regenerate `data.json` without fetching updates or cache operations
+ - `--quick`: Fast mode that only updates `extensions.json` with new hashes (used in CI)
+ - `--no-cache`: Disable cache operations
+ - Default mode: Full update with cache operations
+ - Cache behavior:
+ - Restores `static/` directory from S3 cache (only in full mode)
+ - Uses manifest-based cache resolution with fallback prefixes
+ - Clones repositories to `tmp/` and copies configured files to `static/`
+ - Updates `extensions.json` with new commit hashes only after successful clone/copy
+ - Uploads updated `static/` directory to S3 cache
+ - Cleans up old cache entries (keeps most recent 10, max age 30 days)
+ - Generates `static/data.json` with extension metadata
+ - Sets `updated` output for CI/CD workflows (only when changes occur)
+
+2. **Build order**: `update --generate-only → vite build`
+ - First, `data.json` is regenerated from current extension state
+ - Then Vite builds the SvelteKit app into `dist/`
+ - Static assets from `static/` are included in the build
+
+### Extension Configuration
+
+Extensions are defined in `extensions.json` at the root with a nested structure by category:
+
+```json
+{
+ "mihon": {
+ "keiyoushi": {
+ "name": "Keiyoushi",
+ "source": "https://github.com/keiyoushi/extensions",
+ "path": "/keiyoushi/index.min.json",
+ "commit": "..."
+ },
+ "yuzono/manga": {
+ "name": "Yuzono Manga",
+ "source": "https://github.com/yuzono/manga-repo",
+ "path": "/yuzono/manga/index.min.json",
+ "commit": "..."
+ }
+ },
+ "aniyomi": {
+ "kohi-den": { ... }
+ }
+}
+```
+
+Each extension specifies:
+
+- `source`: Git repository URL
+- `path`: URL path for the extension index (can include subdirectories)
+- `commit`: Current tracked commit hash
+- The key (e.g., "keiyoushi", "yuzono/manga") is used as the directory name in `static/` and as part of the URL path
+
+### Files Copied from Extensions
+
+Defined in `scripts/config.ts` as `filesToCopy`:
+
+- `index.json` - Full extension index
+- `index.min.json` - Minified extension index
+- `repo.json` - Repository metadata
+- `apk/` - APK files directory
+- `icon/` - Icon files directory
+
+### Frontend
+
+The frontend is a SvelteKit static site built with Vite:
+
+- **SvelteKit**: Framework with static adapter (`@sveltejs/adapter-static`)
+- **Svelte 5**: Modern reactive UI framework
+- **TypeScript**: Type-safe component development
+- **Meilisearch**: Advanced search with filtering and faceting
+- **Prerendering**: All pages are prerendered at build time (`prerender = true`)
+
+**Frontend Structure**:
+
+- `src/routes/+layout.ts` - Loads `data.json` and provides it to all pages
+- `src/routes/+page.svelte` - Home page with extension categories
+- `src/routes/search/+page.svelte` - Search page with all extensions
+- `src/lib/components/` - Reusable UI components:
+ - `ExtensionCategory.svelte` - Category display with repos
+ - `ExtensionCard.svelte` - Individual repo card
+ - `ExtensionRow.svelte` - Table row for search results with NSFW badge
+ - `MirrorSelector.svelte` - Domain selection dropdown
+ - `Footer.svelte` - Page footer
+- `src/lib/stores/` - Svelte stores for state management
+- `src/lib/types.ts` - TypeScript type definitions
+- `src/lib/search/` - Search functionality with Meilisearch integration:
+ - `meilisearch.ts` - Meilisearch client and search operations
+ - `debounce.ts` - Search debouncing utilities
+ - `types.ts` - Search-specific type definitions
+ - `utils.ts` - Search helper functions
+- `src/app.css` - Global styles including NSFW badge styling
+- `src/app.html` - HTML template
+
+The frontend:
+
+1. Fetches `data.json` on initial load via `+layout.ts`
+2. Displays extension repositories grouped by category (mihon/aniyomi)
+3. Provides mirror domain selection for extension URLs
+4. Offers "Add Repo" links using `tachiyomi://` or `aniyomi://` protocols
+5. Search page allows browsing all individual extensions from all repos with advanced filtering
+6. Shows NSFW badge for extensions with adult content (when `nsfw: 1`)
+
+### Extension Data Structure
+
+Extensions fetched from upstream repositories contain:
+
+- `name`: Display name (e.g., "Tachiyomi: MangaDex")
+- `pkg`: Package identifier (e.g., "eu.kanade.tachiyomi.extension.en.mangadex")
+- `version`: Version string
+- `lang`: Language code (e.g., "en", "all")
+- `apk`: APK filename
+- `nsfw`: Integer flag (1 = NSFW content, 0 = safe)
+
+### Caching System
+
+The project uses S3-compatible storage (Cloudflare R2, Backblaze B2, AWS S3, etc.) for distributed caching of extension data with a sophisticated manifest-based system:
+
+- **Cache Storage**: Compressed tar.zst files stored in S3 bucket
+- **Compression**: Uses tar with zstd compression for fast compression/decompression
+- **Manifest System**: JSON manifest tracks all cache entries with metadata for resolution
+- **Distributed Locking**: Uses S3 metadata and conditional writes with instance IDs to coordinate updates
+- **Cache Validation**: Validates cache integrity using file checksums before restoration
+- **Automatic Cleanup**: Keeps most recent 10 caches, removes entries older than 30 days
+- **Cache Scripts**: Located in `scripts/`
+ - `cache.ts`: Main cache orchestration (restore and save operations with locking)
+ - `cache/files.ts`: Tar archive creation/extraction with zstd compression and checksum validation
+ - `cache/lock.ts`: Distributed lock implementation using S3 metadata and instance IDs
+ - `cache/manifest.ts`: Cache manifest management for tracking and resolving cache entries
+ - `cache/metadata.ts`: Cache metadata storage (checksums, timestamps) for validation
+ - `cache/s3.ts`: S3 client wrapper, cache resolution, and cleanup logic
+ - `cache/utils.ts`: Shared utilities and constants
+ - `cache/logger.ts`: Logging utilities for transfers and progress
+
+**S3 Configuration**:
+
+Required environment variables in `.env`:
+
+- `S3_ENDPOINT`: S3 endpoint URL
+ - Cloudflare R2: `https://<ACCOUNT_ID>.r2.cloudflarestorage.com`
+ - Backblaze B2: `https://s3.<REGION>.backblazeb2.com`
+ - AWS S3: `https://s3.<REGION>.amazonaws.com`
+- `S3_ACCESS_KEY_ID`: Access key ID
+- `S3_SECRET_ACCESS_KEY`: Secret access key
+- `S3_BUCKET_NAME`: Bucket name
+- `S3_REGION`: Region (optional, use "auto" for R2)
+
+**Cache Flow**:
+
+**Restore**:
+
+1. Resolve cache key using manifest (exact match or prefix fallback)
+2. Validate local cache using checksums (skip download if valid)
+3. Download tar.zst file from S3
+4. Extract to `static/` directory
+5. Update access timestamps in manifest
+
+**Save**:
+
+1. Acquire distributed lock using instance ID
+2. Compress `static/` directory to tar.zst with checksums
+3. Upload to S3 with streaming multipart upload
+4. Save metadata (checksums, file list) to S3
+5. Update manifest with new cache entry
+6. Clean up old cache entries (keep 10 most recent, max age 30 days)
+7. Release lock
+
+### Deployment
+
+The workflow is configured in `.github/workflows/update.yml`:
+
+- **GitLab Mirror**: Syncs entire repository to GitLab using SSH for backup and redundancy
+
+The workflow runs on:
+
+- Schedule: Every 4 hours (`0 */4 * * *`)
+- Manual: `workflow_dispatch`
+- Push to main branch (excluding bot commits)
+
+The workflow has two jobs:
+
+- `update`: Updates extensions using quick mode, commits changes to `extensions.json`
+- `sync-to-gitlab`: Mirrors repository to GitLab (runs after update when changes occur)
+
+Deployments and mirroring run when:
+
+- Extensions have updates (`updated=true` from `update.ts`)
+- Workflow is manually triggered (`workflow_dispatch`)
+
+## Important Patterns
+
+### Update Logic
+
+The update script (`scripts/update.ts`) uses a dual-config sync system:
+
+- `extensions.json` (root): The desired/target state
+- `static/data.json`: Contains the successfully synced state (what's actually downloaded)
+
+Update flow:
+
+1. Compare remote hash with synced hash (from `data.json`)
+2. If different, or if files are missing in `static/`, queue for update
+3. Clone and copy files for each extension
+4. **Only after successful clone/copy**, update `extensions.json` with new commit hash
+5. Generate new `data.json` with updated commit info
+6. Failed clones don't update hashes, ensuring retry on next run
+
+CI behavior:
+
+- In CI: only downloads if there are actual hash changes
+- Locally (non-CI): always downloads to restore missing files
+- Manual workflow triggers: force downloads regardless of hash changes
+- Sets `updated` output for CI/CD workflows based on successful updates only
+
+### CI Skip Pattern
+
+Commits from the workflow use `[skip ci]` to prevent recursive builds:
+
+```bash
+git commit -m "chore: update extensions.json"
+```
+
+### Code Standards
+
+- **Line Endings**: LF (Line Feed) is enforced project-wide via:
+ - `.gitattributes`: `* text=auto eol=lf`
+ - `.prettierrc`: `"endOfLine": "lf"`
+ - This ensures cross-platform consistency (Windows/Linux/CI)
+
+- **Formatting**: Prettier with:
+ - 4 spaces for indentation
+ - Single quotes
+ - No trailing commas
+ - 100 character line width
+
+### Configuration
+
+All deployable domains are listed in `scripts/config.ts` under `config.domains`. The frontend allows users to select which mirror to use for extension URLs.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..2c60ab8
--- /dev/null
+++ b/README.md
@@ -0,0 +1,44 @@
+# Mihon & Aniyomi Extensions
+
+A repository aggregator for Mihon and Aniyomi extensions that automatically syncs from multiple upstream sources.
+
+## Links
+
+- [x.noz.one](https://x.noz.one)
+- [x.ujol.dev](https://x.ujol.dev)
+- [x.ujol.workers.dev](https://x.ujol.workers.dev)
+
+## Features
+
+- Automatic updates every 4 hours from upstream extension repositories
+- Static website for browsing and adding extensions
+- Built with SvelteKit and Vite
+- Search functionality for all extensions across repositories
+
+## Extension Sources
+
+### Mihon Extensions
+
+- **Keiyoushi** - Community-maintained Mihon extensions
+- **Yuzono Manga** - Manga extensions
+
+### Aniyomi Extensions
+
+- **Kohi-den** - Anime extensions
+- **Yuzono Anime** - Anime extensions
+
+## Development
+
+```bash
+# Install dependencies
+bun install
+
+# Start development server
+bun run dev
+
+# Update extensions from upstream
+bun run update
+
+# Build for production
+bun run build
+```
diff --git a/bun.lock b/bun.lock
new file mode 100644
index 0000000..ed9f0f8
--- /dev/null
+++ b/bun.lock
@@ -0,0 +1,370 @@
+{
+ "lockfileVersion": 1,
+ "configVersion": 1,
+ "workspaces": {
+ "": {
+ "name": "x",
+ "devDependencies": {
+ "@sveltejs/adapter-static": "^3.0.10",
+ "@sveltejs/kit": "^2.49.2",
+ "@sveltejs/vite-plugin-svelte": "^6.2.1",
+ "@types/bun": "^1.3.4",
+ "meilisearch": "^0.54.0",
+ "prettier": "^3.7.4",
+ "prettier-plugin-svelte": "^3.4.1",
+ "svelte": "^5.46.0",
+ "svelte-check": "^4.3.4",
+ "typescript": "^5.9.3",
+ "vite": "^7.3.0",
+ "wrangler": "^4.55.0",
+ },
+ },
+ },
+ "packages": {
+ "@cloudflare/kv-asset-handler": ["@cloudflare/kv-asset-handler@0.4.1", "", { "dependencies": { "mime": "^3.0.0" } }, "sha512-Nu8ahitGFFJztxUml9oD/DLb7Z28C8cd8F46IVQ7y5Btz575pvMY8AqZsXkX7Gds29eCKdMgIHjIvzskHgPSFg=="],
+
+ "@cloudflare/unenv-preset": ["@cloudflare/unenv-preset@2.7.13", "", { "peerDependencies": { "unenv": "2.0.0-rc.24", "workerd": "^1.20251202.0" }, "optionalPeers": ["workerd"] }, "sha512-NulO1H8R/DzsJguLC0ndMuk4Ufv0KSlN+E54ay9rn9ZCQo0kpAPwwh3LhgpZ96a3Dr6L9LqW57M4CqC34iLOvw=="],
+
+ "@cloudflare/workerd-darwin-64": ["@cloudflare/workerd-darwin-64@1.20251213.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-29mPlP7xgyik85EHotrakuQur5WfuAR4tRAntRFwLEFnB88RB7br6Me9wb15itu/1l9nMyimZWhBMAfnEs5PQw=="],
+
+ "@cloudflare/workerd-darwin-arm64": ["@cloudflare/workerd-darwin-arm64@1.20251213.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-gn4nIg7hbGyHxyNdVqDmSvgMfgytFr4Z/OXGp2ZorP1+OKeGLvfQ70LEEYY/kZwSsbOqEYDXyU6LzPj4n86NZQ=="],
+
+ "@cloudflare/workerd-linux-64": ["@cloudflare/workerd-linux-64@1.20251213.0", "", { "os": "linux", "cpu": "x64" }, "sha512-zMO9tV4aGDZnRfsWg5MC1mbXaRdutDcMeqH5XMzGHsuKO66tbBipV38gX76PLqxKH+UfbE3Uo3jk3iqIuPEF3g=="],
+
+ "@cloudflare/workerd-linux-arm64": ["@cloudflare/workerd-linux-arm64@1.20251213.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-8pQk1dCzdyZdJXehIhxkFMTc5lTLxzqmxskCGlpbem/pWIPTAEjt25OFCxq5Z3iU/x/kI8tcQdYRYx77KS32mQ=="],
+
+ "@cloudflare/workerd-windows-64": ["@cloudflare/workerd-windows-64@1.20251213.0", "", { "os": "win32", "cpu": "x64" }, "sha512-QBwfyZXTzI2JHLS7ZEuVVMC81PAQyNxPdcv9Dxd8wvV4QYF7B97h9pUtaBnqUdlBwL6e3O8QniYkOl8c7bEFJw=="],
+
+ "@cspotcode/source-map-support": ["@cspotcode/source-map-support@0.8.1", "", { "dependencies": { "@jridgewell/trace-mapping": "0.3.9" } }, "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw=="],
+
+ "@emnapi/runtime": ["@emnapi/runtime@1.7.1", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-PVtJr5CmLwYAU9PZDMITZoR5iAOShYREoR45EyyLrbntV50mdePTgUn4AmOw90Ifcj+x2kRjdzr1HP3RrNiHGA=="],
+
+ "@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.27.0", "", { "os": "aix", "cpu": "ppc64" }, "sha512-KuZrd2hRjz01y5JK9mEBSD3Vj3mbCvemhT466rSuJYeE/hjuBrHfjjcjMdTm/sz7au+++sdbJZJmuBwQLuw68A=="],
+
+ "@esbuild/android-arm": ["@esbuild/android-arm@0.27.0", "", { "os": "android", "cpu": "arm" }, "sha512-j67aezrPNYWJEOHUNLPj9maeJte7uSMM6gMoxfPC9hOg8N02JuQi/T7ewumf4tNvJadFkvLZMlAq73b9uwdMyQ=="],
+
+ "@esbuild/android-arm64": ["@esbuild/android-arm64@0.27.0", "", { "os": "android", "cpu": "arm64" }, "sha512-CC3vt4+1xZrs97/PKDkl0yN7w8edvU2vZvAFGD16n9F0Cvniy5qvzRXjfO1l94efczkkQE6g1x0i73Qf5uthOQ=="],
+
+ "@esbuild/android-x64": ["@esbuild/android-x64@0.27.0", "", { "os": "android", "cpu": "x64" }, "sha512-wurMkF1nmQajBO1+0CJmcN17U4BP6GqNSROP8t0X/Jiw2ltYGLHpEksp9MpoBqkrFR3kv2/te6Sha26k3+yZ9Q=="],
+
+ "@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.27.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-uJOQKYCcHhg07DL7i8MzjvS2LaP7W7Pn/7uA0B5S1EnqAirJtbyw4yC5jQ5qcFjHK9l6o/MX9QisBg12kNkdHg=="],
+
+ "@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.27.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-8mG6arH3yB/4ZXiEnXof5MK72dE6zM9cDvUcPtxhUZsDjESl9JipZYW60C3JGreKCEP+p8P/72r69m4AZGJd5g=="],
+
+ "@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.27.0", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-9FHtyO988CwNMMOE3YIeci+UV+x5Zy8fI2qHNpsEtSF83YPBmE8UWmfYAQg6Ux7Gsmd4FejZqnEUZCMGaNQHQw=="],
+
+ "@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.27.0", "", { "os": "freebsd", "cpu": "x64" }, "sha512-zCMeMXI4HS/tXvJz8vWGexpZj2YVtRAihHLk1imZj4efx1BQzN76YFeKqlDr3bUWI26wHwLWPd3rwh6pe4EV7g=="],
+
+ "@esbuild/linux-arm": ["@esbuild/linux-arm@0.27.0", "", { "os": "linux", "cpu": "arm" }, "sha512-t76XLQDpxgmq2cNXKTVEB7O7YMb42atj2Re2Haf45HkaUpjM2J0UuJZDuaGbPbamzZ7bawyGFUkodL+zcE+jvQ=="],
+
+ "@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.27.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-AS18v0V+vZiLJyi/4LphvBE+OIX682Pu7ZYNsdUHyUKSoRwdnOsMf6FDekwoAFKej14WAkOef3zAORJgAtXnlQ=="],
+
+ "@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.27.0", "", { "os": "linux", "cpu": "ia32" }, "sha512-Mz1jxqm/kfgKkc/KLHC5qIujMvnnarD9ra1cEcrs7qshTUSksPihGrWHVG5+osAIQ68577Zpww7SGapmzSt4Nw=="],
+
+ "@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.27.0", "", { "os": "linux", "cpu": "none" }, "sha512-QbEREjdJeIreIAbdG2hLU1yXm1uu+LTdzoq1KCo4G4pFOLlvIspBm36QrQOar9LFduavoWX2msNFAAAY9j4BDg=="],
+
+ "@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.27.0", "", { "os": "linux", "cpu": "none" }, "sha512-sJz3zRNe4tO2wxvDpH/HYJilb6+2YJxo/ZNbVdtFiKDufzWq4JmKAiHy9iGoLjAV7r/W32VgaHGkk35cUXlNOg=="],
+
+ "@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.27.0", "", { "os": "linux", "cpu": "ppc64" }, "sha512-z9N10FBD0DCS2dmSABDBb5TLAyF1/ydVb+N4pi88T45efQ/w4ohr/F/QYCkxDPnkhkp6AIpIcQKQ8F0ANoA2JA=="],
+
+ "@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.27.0", "", { "os": "linux", "cpu": "none" }, "sha512-pQdyAIZ0BWIC5GyvVFn5awDiO14TkT/19FTmFcPdDec94KJ1uZcmFs21Fo8auMXzD4Tt+diXu1LW1gHus9fhFQ=="],
+
+ "@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.27.0", "", { "os": "linux", "cpu": "s390x" }, "sha512-hPlRWR4eIDDEci953RI1BLZitgi5uqcsjKMxwYfmi4LcwyWo2IcRP+lThVnKjNtk90pLS8nKdroXYOqW+QQH+w=="],
+
+ "@esbuild/linux-x64": ["@esbuild/linux-x64@0.27.0", "", { "os": "linux", "cpu": "x64" }, "sha512-1hBWx4OUJE2cab++aVZ7pObD6s+DK4mPGpemtnAORBvb5l/g5xFGk0vc0PjSkrDs0XaXj9yyob3d14XqvnQ4gw=="],
+
+ "@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.27.0", "", { "os": "none", "cpu": "arm64" }, "sha512-6m0sfQfxfQfy1qRuecMkJlf1cIzTOgyaeXaiVaaki8/v+WB+U4hc6ik15ZW6TAllRlg/WuQXxWj1jx6C+dfy3w=="],
+
+ "@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.27.0", "", { "os": "none", "cpu": "x64" }, "sha512-xbbOdfn06FtcJ9d0ShxxvSn2iUsGd/lgPIO2V3VZIPDbEaIj1/3nBBe1AwuEZKXVXkMmpr6LUAgMkLD/4D2PPA=="],
+
+ "@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.27.0", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-fWgqR8uNbCQ/GGv0yhzttj6sU/9Z5/Sv/VGU3F5OuXK6J6SlriONKrQ7tNlwBrJZXRYk5jUhuWvF7GYzGguBZQ=="],
+
+ "@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.27.0", "", { "os": "openbsd", "cpu": "x64" }, "sha512-aCwlRdSNMNxkGGqQajMUza6uXzR/U0dIl1QmLjPtRbLOx3Gy3otfFu/VjATy4yQzo9yFDGTxYDo1FfAD9oRD2A=="],
+
+ "@esbuild/openharmony-arm64": ["@esbuild/openharmony-arm64@0.27.0", "", { "os": "none", "cpu": "arm64" }, "sha512-nyvsBccxNAsNYz2jVFYwEGuRRomqZ149A39SHWk4hV0jWxKM0hjBPm3AmdxcbHiFLbBSwG6SbpIcUbXjgyECfA=="],
+
+ "@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.27.0", "", { "os": "sunos", "cpu": "x64" }, "sha512-Q1KY1iJafM+UX6CFEL+F4HRTgygmEW568YMqDA5UV97AuZSm21b7SXIrRJDwXWPzr8MGr75fUZPV67FdtMHlHA=="],
+
+ "@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.27.0", "", { "os": "win32", "cpu": "arm64" }, "sha512-W1eyGNi6d+8kOmZIwi/EDjrL9nxQIQ0MiGqe/AWc6+IaHloxHSGoeRgDRKHFISThLmsewZ5nHFvGFWdBYlgKPg=="],
+
+ "@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.27.0", "", { "os": "win32", "cpu": "ia32" }, "sha512-30z1aKL9h22kQhilnYkORFYt+3wp7yZsHWus+wSKAJR8JtdfI76LJ4SBdMsCopTR3z/ORqVu5L1vtnHZWVj4cQ=="],
+
+ "@esbuild/win32-x64": ["@esbuild/win32-x64@0.27.0", "", { "os": "win32", "cpu": "x64" }, "sha512-aIitBcjQeyOhMTImhLZmtxfdOcuNRpwlPNmlFKPcHQYPhEssw75Cl1TSXJXpMkzaua9FUetx/4OQKq7eJul5Cg=="],
+
+ "@img/sharp-darwin-arm64": ["@img/sharp-darwin-arm64@0.33.5", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-arm64": "1.0.4" }, "os": "darwin", "cpu": "arm64" }, "sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ=="],
+
+ "@img/sharp-darwin-x64": ["@img/sharp-darwin-x64@0.33.5", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-x64": "1.0.4" }, "os": "darwin", "cpu": "x64" }, "sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q=="],
+
+ "@img/sharp-libvips-darwin-arm64": ["@img/sharp-libvips-darwin-arm64@1.0.4", "", { "os": "darwin", "cpu": "arm64" }, "sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg=="],
+
+ "@img/sharp-libvips-darwin-x64": ["@img/sharp-libvips-darwin-x64@1.0.4", "", { "os": "darwin", "cpu": "x64" }, "sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ=="],
+
+ "@img/sharp-libvips-linux-arm": ["@img/sharp-libvips-linux-arm@1.0.5", "", { "os": "linux", "cpu": "arm" }, "sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g=="],
+
+ "@img/sharp-libvips-linux-arm64": ["@img/sharp-libvips-linux-arm64@1.0.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA=="],
+
+ "@img/sharp-libvips-linux-s390x": ["@img/sharp-libvips-linux-s390x@1.0.4", "", { "os": "linux", "cpu": "s390x" }, "sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA=="],
+
+ "@img/sharp-libvips-linux-x64": ["@img/sharp-libvips-linux-x64@1.0.4", "", { "os": "linux", "cpu": "x64" }, "sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw=="],
+
+ "@img/sharp-libvips-linuxmusl-arm64": ["@img/sharp-libvips-linuxmusl-arm64@1.0.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA=="],
+
+ "@img/sharp-libvips-linuxmusl-x64": ["@img/sharp-libvips-linuxmusl-x64@1.0.4", "", { "os": "linux", "cpu": "x64" }, "sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw=="],
+
+ "@img/sharp-linux-arm": ["@img/sharp-linux-arm@0.33.5", "", { "optionalDependencies": { "@img/sharp-libvips-linux-arm": "1.0.5" }, "os": "linux", "cpu": "arm" }, "sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ=="],
+
+ "@img/sharp-linux-arm64": ["@img/sharp-linux-arm64@0.33.5", "", { "optionalDependencies": { "@img/sharp-libvips-linux-arm64": "1.0.4" }, "os": "linux", "cpu": "arm64" }, "sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA=="],
+
+ "@img/sharp-linux-s390x": ["@img/sharp-linux-s390x@0.33.5", "", { "optionalDependencies": { "@img/sharp-libvips-linux-s390x": "1.0.4" }, "os": "linux", "cpu": "s390x" }, "sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q=="],
+
+ "@img/sharp-linux-x64": ["@img/sharp-linux-x64@0.33.5", "", { "optionalDependencies": { "@img/sharp-libvips-linux-x64": "1.0.4" }, "os": "linux", "cpu": "x64" }, "sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA=="],
+
+ "@img/sharp-linuxmusl-arm64": ["@img/sharp-linuxmusl-arm64@0.33.5", "", { "optionalDependencies": { "@img/sharp-libvips-linuxmusl-arm64": "1.0.4" }, "os": "linux", "cpu": "arm64" }, "sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g=="],
+
+ "@img/sharp-linuxmusl-x64": ["@img/sharp-linuxmusl-x64@0.33.5", "", { "optionalDependencies": { "@img/sharp-libvips-linuxmusl-x64": "1.0.4" }, "os": "linux", "cpu": "x64" }, "sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw=="],
+
+ "@img/sharp-wasm32": ["@img/sharp-wasm32@0.33.5", "", { "dependencies": { "@emnapi/runtime": "^1.2.0" }, "cpu": "none" }, "sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg=="],
+
+ "@img/sharp-win32-ia32": ["@img/sharp-win32-ia32@0.33.5", "", { "os": "win32", "cpu": "ia32" }, "sha512-T36PblLaTwuVJ/zw/LaH0PdZkRz5rd3SmMHX8GSmR7vtNSP5Z6bQkExdSK7xGWyxLw4sUknBuugTelgw2faBbQ=="],
+
+ "@img/sharp-win32-x64": ["@img/sharp-win32-x64@0.33.5", "", { "os": "win32", "cpu": "x64" }, "sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg=="],
+
+ "@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.13", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA=="],
+
+ "@jridgewell/remapping": ["@jridgewell/remapping@2.3.5", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ=="],
+
+ "@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="],
+
+ "@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.5", "", {}, "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og=="],
+
+ "@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.31", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw=="],
+
+ "@polka/url": ["@polka/url@1.0.0-next.29", "", {}, "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww=="],
+
+ "@poppinss/colors": ["@poppinss/colors@4.1.5", "", { "dependencies": { "kleur": "^4.1.5" } }, "sha512-FvdDqtcRCtz6hThExcFOgW0cWX+xwSMWcRuQe5ZEb2m7cVQOAVZOIMt+/v9RxGiD9/OY16qJBXK4CVKWAPalBw=="],
+
+ "@poppinss/dumper": ["@poppinss/dumper@0.6.5", "", { "dependencies": { "@poppinss/colors": "^4.1.5", "@sindresorhus/is": "^7.0.2", "supports-color": "^10.0.0" } }, "sha512-NBdYIb90J7LfOI32dOewKI1r7wnkiH6m920puQ3qHUeZkxNkQiFnXVWoE6YtFSv6QOiPPf7ys6i+HWWecDz7sw=="],
+
+ "@poppinss/exception": ["@poppinss/exception@1.2.2", "", {}, "sha512-m7bpKCD4QMlFCjA/nKTs23fuvoVFoA83brRKmObCUNmi/9tVu8Ve3w4YQAnJu4q3Tjf5fr685HYIC/IA2zHRSg=="],
+
+ "@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.53.3", "", { "os": "android", "cpu": "arm" }, "sha512-mRSi+4cBjrRLoaal2PnqH82Wqyb+d3HsPUN/W+WslCXsZsyHa9ZeQQX/pQsZaVIWDkPcpV6jJ+3KLbTbgnwv8w=="],
+
+ "@rollup/rollup-android-arm64": ["@rollup/rollup-android-arm64@4.53.3", "", { "os": "android", "cpu": "arm64" }, "sha512-CbDGaMpdE9sh7sCmTrTUyllhrg65t6SwhjlMJsLr+J8YjFuPmCEjbBSx4Z/e4SmDyH3aB5hGaJUP2ltV/vcs4w=="],
+
+ "@rollup/rollup-darwin-arm64": ["@rollup/rollup-darwin-arm64@4.53.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-Nr7SlQeqIBpOV6BHHGZgYBuSdanCXuw09hon14MGOLGmXAFYjx1wNvquVPmpZnl0tLjg25dEdr4IQ6GgyToCUA=="],
+
+ "@rollup/rollup-darwin-x64": ["@rollup/rollup-darwin-x64@4.53.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-DZ8N4CSNfl965CmPktJ8oBnfYr3F8dTTNBQkRlffnUarJ2ohudQD17sZBa097J8xhQ26AwhHJ5mvUyQW8ddTsQ=="],
+
+ "@rollup/rollup-freebsd-arm64": ["@rollup/rollup-freebsd-arm64@4.53.3", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-yMTrCrK92aGyi7GuDNtGn2sNW+Gdb4vErx4t3Gv/Tr+1zRb8ax4z8GWVRfr3Jw8zJWvpGHNpss3vVlbF58DZ4w=="],
+
+ "@rollup/rollup-freebsd-x64": ["@rollup/rollup-freebsd-x64@4.53.3", "", { "os": "freebsd", "cpu": "x64" }, "sha512-lMfF8X7QhdQzseM6XaX0vbno2m3hlyZFhwcndRMw8fbAGUGL3WFMBdK0hbUBIUYcEcMhVLr1SIamDeuLBnXS+Q=="],
+
+ "@rollup/rollup-linux-arm-gnueabihf": ["@rollup/rollup-linux-arm-gnueabihf@4.53.3", "", { "os": "linux", "cpu": "arm" }, "sha512-k9oD15soC/Ln6d2Wv/JOFPzZXIAIFLp6B+i14KhxAfnq76ajt0EhYc5YPeX6W1xJkAdItcVT+JhKl1QZh44/qw=="],
+
+ "@rollup/rollup-linux-arm-musleabihf": ["@rollup/rollup-linux-arm-musleabihf@4.53.3", "", { "os": "linux", "cpu": "arm" }, "sha512-vTNlKq+N6CK/8UktsrFuc+/7NlEYVxgaEgRXVUVK258Z5ymho29skzW1sutgYjqNnquGwVUObAaxae8rZ6YMhg=="],
+
+ "@rollup/rollup-linux-arm64-gnu": ["@rollup/rollup-linux-arm64-gnu@4.53.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-RGrFLWgMhSxRs/EWJMIFM1O5Mzuz3Xy3/mnxJp/5cVhZ2XoCAxJnmNsEyeMJtpK+wu0FJFWz+QF4mjCA7AUQ3w=="],
+
+ "@rollup/rollup-linux-arm64-musl": ["@rollup/rollup-linux-arm64-musl@4.53.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-kASyvfBEWYPEwe0Qv4nfu6pNkITLTb32p4yTgzFCocHnJLAHs+9LjUu9ONIhvfT/5lv4YS5muBHyuV84epBo/A=="],
+
+ "@rollup/rollup-linux-loong64-gnu": ["@rollup/rollup-linux-loong64-gnu@4.53.3", "", { "os": "linux", "cpu": "none" }, "sha512-JiuKcp2teLJwQ7vkJ95EwESWkNRFJD7TQgYmCnrPtlu50b4XvT5MOmurWNrCj3IFdyjBQ5p9vnrX4JM6I8OE7g=="],
+
+ "@rollup/rollup-linux-ppc64-gnu": ["@rollup/rollup-linux-ppc64-gnu@4.53.3", "", { "os": "linux", "cpu": "ppc64" }, "sha512-EoGSa8nd6d3T7zLuqdojxC20oBfNT8nexBbB/rkxgKj5T5vhpAQKKnD+h3UkoMuTyXkP5jTjK/ccNRmQrPNDuw=="],
+
+ "@rollup/rollup-linux-riscv64-gnu": ["@rollup/rollup-linux-riscv64-gnu@4.53.3", "", { "os": "linux", "cpu": "none" }, "sha512-4s+Wped2IHXHPnAEbIB0YWBv7SDohqxobiiPA1FIWZpX+w9o2i4LezzH/NkFUl8LRci/8udci6cLq+jJQlh+0g=="],
+
+ "@rollup/rollup-linux-riscv64-musl": ["@rollup/rollup-linux-riscv64-musl@4.53.3", "", { "os": "linux", "cpu": "none" }, "sha512-68k2g7+0vs2u9CxDt5ktXTngsxOQkSEV/xBbwlqYcUrAVh6P9EgMZvFsnHy4SEiUl46Xf0IObWVbMvPrr2gw8A=="],
+
+ "@rollup/rollup-linux-s390x-gnu": ["@rollup/rollup-linux-s390x-gnu@4.53.3", "", { "os": "linux", "cpu": "s390x" }, "sha512-VYsFMpULAz87ZW6BVYw3I6sWesGpsP9OPcyKe8ofdg9LHxSbRMd7zrVrr5xi/3kMZtpWL/wC+UIJWJYVX5uTKg=="],
+
+ "@rollup/rollup-linux-x64-gnu": ["@rollup/rollup-linux-x64-gnu@4.53.3", "", { "os": "linux", "cpu": "x64" }, "sha512-3EhFi1FU6YL8HTUJZ51imGJWEX//ajQPfqWLI3BQq4TlvHy4X0MOr5q3D2Zof/ka0d5FNdPwZXm3Yyib/UEd+w=="],
+
+ "@rollup/rollup-linux-x64-musl": ["@rollup/rollup-linux-x64-musl@4.53.3", "", { "os": "linux", "cpu": "x64" }, "sha512-eoROhjcc6HbZCJr+tvVT8X4fW3/5g/WkGvvmwz/88sDtSJzO7r/blvoBDgISDiCjDRZmHpwud7h+6Q9JxFwq1Q=="],
+
+ "@rollup/rollup-openharmony-arm64": ["@rollup/rollup-openharmony-arm64@4.53.3", "", { "os": "none", "cpu": "arm64" }, "sha512-OueLAWgrNSPGAdUdIjSWXw+u/02BRTcnfw9PN41D2vq/JSEPnJnVuBgw18VkN8wcd4fjUs+jFHVM4t9+kBSNLw=="],
+
+ "@rollup/rollup-win32-arm64-msvc": ["@rollup/rollup-win32-arm64-msvc@4.53.3", "", { "os": "win32", "cpu": "arm64" }, "sha512-GOFuKpsxR/whszbF/bzydebLiXIHSgsEUp6M0JI8dWvi+fFa1TD6YQa4aSZHtpmh2/uAlj/Dy+nmby3TJ3pkTw=="],
+
+ "@rollup/rollup-win32-ia32-msvc": ["@rollup/rollup-win32-ia32-msvc@4.53.3", "", { "os": "win32", "cpu": "ia32" }, "sha512-iah+THLcBJdpfZ1TstDFbKNznlzoxa8fmnFYK4V67HvmuNYkVdAywJSoteUszvBQ9/HqN2+9AZghbajMsFT+oA=="],
+
+ "@rollup/rollup-win32-x64-gnu": ["@rollup/rollup-win32-x64-gnu@4.53.3", "", { "os": "win32", "cpu": "x64" }, "sha512-J9QDiOIZlZLdcot5NXEepDkstocktoVjkaKUtqzgzpt2yWjGlbYiKyp05rWwk4nypbYUNoFAztEgixoLaSETkg=="],
+
+ "@rollup/rollup-win32-x64-msvc": ["@rollup/rollup-win32-x64-msvc@4.53.3", "", { "os": "win32", "cpu": "x64" }, "sha512-UhTd8u31dXadv0MopwGgNOBpUVROFKWVQgAg5N1ESyCz8AuBcMqm4AuTjrwgQKGDfoFuz02EuMRHQIw/frmYKQ=="],
+
+ "@sindresorhus/is": ["@sindresorhus/is@7.1.1", "", {}, "sha512-rO92VvpgMc3kfiTjGT52LEtJ8Yc5kCWhZjLQ3LwlA4pSgPpQO7bVpYXParOD8Jwf+cVQECJo3yP/4I8aZtUQTQ=="],
+
+ "@speed-highlight/core": ["@speed-highlight/core@1.2.12", "", {}, "sha512-uilwrK0Ygyri5dToHYdZSjcvpS2ZwX0w5aSt3GCEN9hrjxWCoeV4Z2DTXuxjwbntaLQIEEAlCeNQss5SoHvAEA=="],
+
+ "@standard-schema/spec": ["@standard-schema/spec@1.0.0", "", {}, "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA=="],
+
+ "@sveltejs/acorn-typescript": ["@sveltejs/acorn-typescript@1.0.8", "", { "peerDependencies": { "acorn": "^8.9.0" } }, "sha512-esgN+54+q0NjB0Y/4BomT9samII7jGwNy/2a3wNZbT2A2RpmXsXwUt24LvLhx6jUq2gVk4cWEvcRO6MFQbOfNA=="],
+
+ "@sveltejs/adapter-static": ["@sveltejs/adapter-static@3.0.10", "", { "peerDependencies": { "@sveltejs/kit": "^2.0.0" } }, "sha512-7D9lYFWJmB7zxZyTE/qxjksvMqzMuYrrsyh1f4AlZqeZeACPRySjbC3aFiY55wb1tWUaKOQG9PVbm74JcN2Iew=="],
+
+ "@sveltejs/kit": ["@sveltejs/kit@2.49.2", "", { "dependencies": { "@standard-schema/spec": "^1.0.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/cookie": "^0.6.0", "acorn": "^8.14.1", "cookie": "^0.6.0", "devalue": "^5.3.2", "esm-env": "^1.2.2", "kleur": "^4.1.5", "magic-string": "^0.30.5", "mrmime": "^2.0.0", "sade": "^1.8.1", "set-cookie-parser": "^2.6.0", "sirv": "^3.0.0" }, "peerDependencies": { "@opentelemetry/api": "^1.0.0", "@sveltejs/vite-plugin-svelte": "^3.0.0 || ^4.0.0-next.1 || ^5.0.0 || ^6.0.0-next.0", "svelte": "^4.0.0 || ^5.0.0-next.0", "vite": "^5.0.3 || ^6.0.0 || ^7.0.0-beta.0" }, "optionalPeers": ["@opentelemetry/api"], "bin": { "svelte-kit": "svelte-kit.js" } }, "sha512-Vp3zX/qlwerQmHMP6x0Ry1oY7eKKRcOWGc2P59srOp4zcqyn+etJyQpELgOi4+ZSUgteX8Y387NuwruLgGXLUQ=="],
+
+ "@sveltejs/vite-plugin-svelte": ["@sveltejs/vite-plugin-svelte@6.2.1", "", { "dependencies": { "@sveltejs/vite-plugin-svelte-inspector": "^5.0.0", "debug": "^4.4.1", "deepmerge": "^4.3.1", "magic-string": "^0.30.17", "vitefu": "^1.1.1" }, "peerDependencies": { "svelte": "^5.0.0", "vite": "^6.3.0 || ^7.0.0" } }, "sha512-YZs/OSKOQAQCnJvM/P+F1URotNnYNeU3P2s4oIpzm1uFaqUEqRxUB0g5ejMjEb5Gjb9/PiBI5Ktrq4rUUF8UVQ=="],
+
+ "@sveltejs/vite-plugin-svelte-inspector": ["@sveltejs/vite-plugin-svelte-inspector@5.0.1", "", { "dependencies": { "debug": "^4.4.1" }, "peerDependencies": { "@sveltejs/vite-plugin-svelte": "^6.0.0-next.0", "svelte": "^5.0.0", "vite": "^6.3.0 || ^7.0.0" } }, "sha512-ubWshlMk4bc8mkwWbg6vNvCeT7lGQojE3ijDh3QTR6Zr/R+GXxsGbyH4PExEPpiFmqPhYiVSVmHBjUcVc1JIrA=="],
+
+ "@types/bun": ["@types/bun@1.3.4", "", { "dependencies": { "bun-types": "1.3.4" } }, "sha512-EEPTKXHP+zKGPkhRLv+HI0UEX8/o+65hqARxLy8Ov5rIxMBPNTjeZww00CIihrIQGEQBYg+0roO5qOnS/7boGA=="],
+
+ "@types/cookie": ["@types/cookie@0.6.0", "", {}, "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA=="],
+
+ "@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="],
+
+ "@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="],
+
+ "acorn": ["acorn@8.15.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg=="],
+
+ "acorn-walk": ["acorn-walk@8.3.2", "", {}, "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A=="],
+
+ "aria-query": ["aria-query@5.3.2", "", {}, "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw=="],
+
+ "axobject-query": ["axobject-query@4.1.0", "", {}, "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ=="],
+
+ "blake3-wasm": ["blake3-wasm@2.1.5", "", {}, "sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g=="],
+
+ "bun-types": ["bun-types@1.3.4", "", { "dependencies": { "@types/node": "*" } }, "sha512-5ua817+BZPZOlNaRgGBpZJOSAQ9RQ17pkwPD0yR7CfJg+r8DgIILByFifDTa+IPDDxzf5VNhtNlcKqFzDgJvlQ=="],
+
+ "chokidar": ["chokidar@4.0.3", "", { "dependencies": { "readdirp": "^4.0.1" } }, "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA=="],
+
+ "clsx": ["clsx@2.1.1", "", {}, "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA=="],
+
+ "color": ["color@4.2.3", "", { "dependencies": { "color-convert": "^2.0.1", "color-string": "^1.9.0" } }, "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A=="],
+
+ "color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="],
+
+ "color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="],
+
+ "color-string": ["color-string@1.9.1", "", { "dependencies": { "color-name": "^1.0.0", "simple-swizzle": "^0.2.2" } }, "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg=="],
+
+ "cookie": ["cookie@0.6.0", "", {}, "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw=="],
+
+ "debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="],
+
+ "deepmerge": ["deepmerge@4.3.1", "", {}, "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A=="],
+
+ "detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
+
+ "devalue": ["devalue@5.5.0", "", {}, "sha512-69sM5yrHfFLJt0AZ9QqZXGCPfJ7fQjvpln3Rq5+PS03LD32Ost1Q9N+eEnaQwGRIriKkMImXD56ocjQmfjbV3w=="],
+
+ "error-stack-parser-es": ["error-stack-parser-es@1.0.5", "", {}, "sha512-5qucVt2XcuGMcEGgWI7i+yZpmpByQ8J1lHhcL7PwqCwu9FPP3VUXzT4ltHe5i2z9dePwEHcDVOAfSnHsOlCXRA=="],
+
+ "esbuild": ["esbuild@0.27.0", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.27.0", "@esbuild/android-arm": "0.27.0", "@esbuild/android-arm64": "0.27.0", "@esbuild/android-x64": "0.27.0", "@esbuild/darwin-arm64": "0.27.0", "@esbuild/darwin-x64": "0.27.0", "@esbuild/freebsd-arm64": "0.27.0", "@esbuild/freebsd-x64": "0.27.0", "@esbuild/linux-arm": "0.27.0", "@esbuild/linux-arm64": "0.27.0", "@esbuild/linux-ia32": "0.27.0", "@esbuild/linux-loong64": "0.27.0", "@esbuild/linux-mips64el": "0.27.0", "@esbuild/linux-ppc64": "0.27.0", "@esbuild/linux-riscv64": "0.27.0", "@esbuild/linux-s390x": "0.27.0", "@esbuild/linux-x64": "0.27.0", "@esbuild/netbsd-arm64": "0.27.0", "@esbuild/netbsd-x64": "0.27.0", "@esbuild/openbsd-arm64": "0.27.0", "@esbuild/openbsd-x64": "0.27.0", "@esbuild/openharmony-arm64": "0.27.0", "@esbuild/sunos-x64": "0.27.0", "@esbuild/win32-arm64": "0.27.0", "@esbuild/win32-ia32": "0.27.0", "@esbuild/win32-x64": "0.27.0" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-jd0f4NHbD6cALCyGElNpGAOtWxSq46l9X/sWB0Nzd5er4Kz2YTm+Vl0qKFT9KUJvD8+fiO8AvoHhFvEatfVixA=="],
+
+ "esm-env": ["esm-env@1.2.2", "", {}, "sha512-Epxrv+Nr/CaL4ZcFGPJIYLWFom+YeV1DqMLHJoEd9SYRxNbaFruBwfEX/kkHUJf55j2+TUbmDcmuilbP1TmXHA=="],
+
+ "esrap": ["esrap@2.2.1", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" } }, "sha512-GiYWG34AN/4CUyaWAgunGt0Rxvr1PTMlGC0vvEov/uOQYWne2bpN03Um+k8jT+q3op33mKouP2zeJ6OlM+qeUg=="],
+
+ "exit-hook": ["exit-hook@2.2.1", "", {}, "sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw=="],
+
+ "fdir": ["fdir@6.5.0", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg=="],
+
+ "fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="],
+
+ "glob-to-regexp": ["glob-to-regexp@0.4.1", "", {}, "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw=="],
+
+ "is-arrayish": ["is-arrayish@0.3.4", "", {}, "sha512-m6UrgzFVUYawGBh1dUsWR5M2Clqic9RVXC/9f8ceNlv2IcO9j9J/z8UoCLPqtsPBFNzEpfR3xftohbfqDx8EQA=="],
+
+ "is-reference": ["is-reference@3.0.3", "", { "dependencies": { "@types/estree": "^1.0.6" } }, "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw=="],
+
+ "kleur": ["kleur@4.1.5", "", {}, "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ=="],
+
+ "locate-character": ["locate-character@3.0.0", "", {}, "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA=="],
+
+ "magic-string": ["magic-string@0.30.21", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.5" } }, "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ=="],
+
+ "meilisearch": ["meilisearch@0.54.0", "", {}, "sha512-b1bwJAEfj8C6hgSN88+/LvW3pe3nWC+thBS2seAbPZGakf/vzsLqppgZquiomzCr2GhU7U7H289625qhYe3rbw=="],
+
+ "mime": ["mime@3.0.0", "", { "bin": { "mime": "cli.js" } }, "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A=="],
+
+ "miniflare": ["miniflare@4.20251213.0", "", { "dependencies": { "@cspotcode/source-map-support": "0.8.1", "acorn": "8.14.0", "acorn-walk": "8.3.2", "exit-hook": "2.2.1", "glob-to-regexp": "0.4.1", "sharp": "^0.33.5", "stoppable": "1.1.0", "undici": "7.14.0", "workerd": "1.20251213.0", "ws": "8.18.0", "youch": "4.1.0-beta.10", "zod": "3.22.3" }, "bin": { "miniflare": "bootstrap.js" } }, "sha512-/Or0LuRA6dQMKvL7nztPWNOVXosrJRBiO0BdJX9LUIesyeAUWIZMPFmP9XX+cdny2fIUcqYcG4DuoL5JHxj95w=="],
+
+ "mri": ["mri@1.2.0", "", {}, "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA=="],
+
+ "mrmime": ["mrmime@2.0.1", "", {}, "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ=="],
+
+ "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
+
+ "nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="],
+
+ "path-to-regexp": ["path-to-regexp@6.3.0", "", {}, "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ=="],
+
+ "pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="],
+
+ "picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
+
+ "picomatch": ["picomatch@4.0.3", "", {}, "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q=="],
+
+ "postcss": ["postcss@8.5.6", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg=="],
+
+ "prettier": ["prettier@3.7.4", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA=="],
+
+ "prettier-plugin-svelte": ["prettier-plugin-svelte@3.4.1", "", { "peerDependencies": { "prettier": "^3.0.0", "svelte": "^3.2.0 || ^4.0.0-next.0 || ^5.0.0-next.0" } }, "sha512-xL49LCloMoZRvSwa6IEdN2GV6cq2IqpYGstYtMT+5wmml1/dClEoI0MZR78MiVPpu6BdQFfN0/y73yO6+br5Pg=="],
+
+ "readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="],
+
+ "rollup": ["rollup@4.53.3", "", { "dependencies": { "@types/estree": "1.0.8" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.53.3", "@rollup/rollup-android-arm64": "4.53.3", "@rollup/rollup-darwin-arm64": "4.53.3", "@rollup/rollup-darwin-x64": "4.53.3", "@rollup/rollup-freebsd-arm64": "4.53.3", "@rollup/rollup-freebsd-x64": "4.53.3", "@rollup/rollup-linux-arm-gnueabihf": "4.53.3", "@rollup/rollup-linux-arm-musleabihf": "4.53.3", "@rollup/rollup-linux-arm64-gnu": "4.53.3", "@rollup/rollup-linux-arm64-musl": "4.53.3", "@rollup/rollup-linux-loong64-gnu": "4.53.3", "@rollup/rollup-linux-ppc64-gnu": "4.53.3", "@rollup/rollup-linux-riscv64-gnu": "4.53.3", "@rollup/rollup-linux-riscv64-musl": "4.53.3", "@rollup/rollup-linux-s390x-gnu": "4.53.3", "@rollup/rollup-linux-x64-gnu": "4.53.3", "@rollup/rollup-linux-x64-musl": "4.53.3", "@rollup/rollup-openharmony-arm64": "4.53.3", "@rollup/rollup-win32-arm64-msvc": "4.53.3", "@rollup/rollup-win32-ia32-msvc": "4.53.3", "@rollup/rollup-win32-x64-gnu": "4.53.3", "@rollup/rollup-win32-x64-msvc": "4.53.3", "fsevents": "~2.3.2" }, "bin": { "rollup": "dist/bin/rollup" } }, "sha512-w8GmOxZfBmKknvdXU1sdM9NHcoQejwF/4mNgj2JuEEdRaHwwF12K7e9eXn1nLZ07ad+du76mkVsyeb2rKGllsA=="],
+
+ "sade": ["sade@1.8.1", "", { "dependencies": { "mri": "^1.1.0" } }, "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A=="],
+
+ "semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="],
+
+ "set-cookie-parser": ["set-cookie-parser@2.7.2", "", {}, "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw=="],
+
+ "sharp": ["sharp@0.33.5", "", { "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.3", "semver": "^7.6.3" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.33.5", "@img/sharp-darwin-x64": "0.33.5", "@img/sharp-libvips-darwin-arm64": "1.0.4", "@img/sharp-libvips-darwin-x64": "1.0.4", "@img/sharp-libvips-linux-arm": "1.0.5", "@img/sharp-libvips-linux-arm64": "1.0.4", "@img/sharp-libvips-linux-s390x": "1.0.4", "@img/sharp-libvips-linux-x64": "1.0.4", "@img/sharp-libvips-linuxmusl-arm64": "1.0.4", "@img/sharp-libvips-linuxmusl-x64": "1.0.4", "@img/sharp-linux-arm": "0.33.5", "@img/sharp-linux-arm64": "0.33.5", "@img/sharp-linux-s390x": "0.33.5", "@img/sharp-linux-x64": "0.33.5", "@img/sharp-linuxmusl-arm64": "0.33.5", "@img/sharp-linuxmusl-x64": "0.33.5", "@img/sharp-wasm32": "0.33.5", "@img/sharp-win32-ia32": "0.33.5", "@img/sharp-win32-x64": "0.33.5" } }, "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw=="],
+
+ "simple-swizzle": ["simple-swizzle@0.2.4", "", { "dependencies": { "is-arrayish": "^0.3.1" } }, "sha512-nAu1WFPQSMNr2Zn9PGSZK9AGn4t/y97lEm+MXTtUDwfP0ksAIX4nO+6ruD9Jwut4C49SB1Ws+fbXsm/yScWOHw=="],
+
+ "sirv": ["sirv@3.0.2", "", { "dependencies": { "@polka/url": "^1.0.0-next.24", "mrmime": "^2.0.0", "totalist": "^3.0.0" } }, "sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g=="],
+
+ "source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="],
+
+ "stoppable": ["stoppable@1.1.0", "", {}, "sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw=="],
+
+ "supports-color": ["supports-color@10.2.2", "", {}, "sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g=="],
+
+ "svelte": ["svelte@5.46.0", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "devalue": "^5.5.0", "esm-env": "^1.2.1", "esrap": "^2.2.1", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-ZhLtvroYxUxr+HQJfMZEDRsGsmU46x12RvAv/zi9584f5KOX7bUrEbhPJ7cKFmUvZTJXi/CFZUYwDC6M1FigPw=="],
+
+ "svelte-check": ["svelte-check@4.3.4", "", { "dependencies": { "@jridgewell/trace-mapping": "^0.3.25", "chokidar": "^4.0.1", "fdir": "^6.2.0", "picocolors": "^1.0.0", "sade": "^1.7.4" }, "peerDependencies": { "svelte": "^4.0.0 || ^5.0.0-next.0", "typescript": ">=5.0.0" }, "bin": { "svelte-check": "bin/svelte-check" } }, "sha512-DVWvxhBrDsd+0hHWKfjP99lsSXASeOhHJYyuKOFYJcP7ThfSCKgjVarE8XfuMWpS5JV3AlDf+iK1YGGo2TACdw=="],
+
+ "tinyglobby": ["tinyglobby@0.2.15", "", { "dependencies": { "fdir": "^6.5.0", "picomatch": "^4.0.3" } }, "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ=="],
+
+ "totalist": ["totalist@3.0.1", "", {}, "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ=="],
+
+ "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
+
+ "typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
+
+ "undici": ["undici@7.14.0", "", {}, "sha512-Vqs8HTzjpQXZeXdpsfChQTlafcMQaaIwnGwLam1wudSSjlJeQ3bw1j+TLPePgrCnCpUXx7Ba5Pdpf5OBih62NQ=="],
+
+ "undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="],
+
+ "unenv": ["unenv@2.0.0-rc.24", "", { "dependencies": { "pathe": "^2.0.3" } }, "sha512-i7qRCmY42zmCwnYlh9H2SvLEypEFGye5iRmEMKjcGi7zk9UquigRjFtTLz0TYqr0ZGLZhaMHl/foy1bZR+Cwlw=="],
+
+ "vite": ["vite@7.3.0", "", { "dependencies": { "esbuild": "^0.27.0", "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", "rollup": "^4.43.0", "tinyglobby": "^0.2.15" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", "less": "^4.0.0", "lightningcss": "^1.21.0", "sass": "^1.70.0", "sass-embedded": "^1.70.0", "stylus": ">=0.54.8", "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-dZwN5L1VlUBewiP6H9s2+B3e3Jg96D0vzN+Ry73sOefebhYr9f94wwkMNN/9ouoU8pV1BqA1d1zGk8928cx0rg=="],
+
+ "vitefu": ["vitefu@1.1.1", "", { "peerDependencies": { "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0-beta.0" }, "optionalPeers": ["vite"] }, "sha512-B/Fegf3i8zh0yFbpzZ21amWzHmuNlLlmJT6n7bu5e+pCHUKQIfXSYokrqOBGEMMe9UG2sostKQF9mml/vYaWJQ=="],
+
+ "workerd": ["workerd@1.20251213.0", "", { "optionalDependencies": { "@cloudflare/workerd-darwin-64": "1.20251213.0", "@cloudflare/workerd-darwin-arm64": "1.20251213.0", "@cloudflare/workerd-linux-64": "1.20251213.0", "@cloudflare/workerd-linux-arm64": "1.20251213.0", "@cloudflare/workerd-windows-64": "1.20251213.0" }, "bin": { "workerd": "bin/workerd" } }, "sha512-knLMSqmUKo7EO1wV69u8o2J+6RVDow3H5qK9f1tzk24fd4rEZXkR1cxFiYisfTRjk/Jl3/1URAkQRSDAiWE5RA=="],
+
+ "wrangler": ["wrangler@4.55.0", "", { "dependencies": { "@cloudflare/kv-asset-handler": "0.4.1", "@cloudflare/unenv-preset": "2.7.13", "blake3-wasm": "2.1.5", "esbuild": "0.27.0", "miniflare": "4.20251213.0", "path-to-regexp": "6.3.0", "unenv": "2.0.0-rc.24", "workerd": "1.20251213.0" }, "optionalDependencies": { "fsevents": "~2.3.2" }, "peerDependencies": { "@cloudflare/workers-types": "^4.20251213.0" }, "optionalPeers": ["@cloudflare/workers-types"], "bin": { "wrangler": "bin/wrangler.js", "wrangler2": "bin/wrangler.js" } }, "sha512-50icmLX8UbNaq0FmFHbcvvOh7I6rDA/FyaMYRcNSl1iX0JwuKswezmmtYvYPxPTkbYz7FUYR8GPZLaT23uzFqw=="],
+
+ "ws": ["ws@8.18.0", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw=="],
+
+ "youch": ["youch@4.1.0-beta.10", "", { "dependencies": { "@poppinss/colors": "^4.1.5", "@poppinss/dumper": "^0.6.4", "@speed-highlight/core": "^1.2.7", "cookie": "^1.0.2", "youch-core": "^0.3.3" } }, "sha512-rLfVLB4FgQneDr0dv1oddCVZmKjcJ6yX6mS4pU82Mq/Dt9a3cLZQ62pDBL4AUO+uVrCvtWz3ZFUL2HFAFJ/BXQ=="],
+
+ "youch-core": ["youch-core@0.3.3", "", { "dependencies": { "@poppinss/exception": "^1.2.2", "error-stack-parser-es": "^1.0.5" } }, "sha512-ho7XuGjLaJ2hWHoK8yFnsUGy2Y5uDpqSTq1FkHLK4/oqKtyUU1AFbOOxY4IpC9f0fTLjwYbslUz0Po5BpD1wrA=="],
+
+ "zimmerframe": ["zimmerframe@1.1.4", "", {}, "sha512-B58NGBEoc8Y9MWWCQGl/gq9xBCe4IiKM0a2x7GZdQKOW5Exr8S1W24J6OgM1njK8xCRGvAJIL/MxXHf6SkmQKQ=="],
+
+ "zod": ["zod@3.22.3", "", {}, "sha512-EjIevzuJRiRPbVH4mGc8nApb/lVLKVpmUhAaR5R5doKGfAnGJ6Gr3CViAVjP+4FWSxCsybeWQdcgCtbX+7oZug=="],
+
+ "@cspotcode/source-map-support/@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.9", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.0.3", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ=="],
+
+ "miniflare/acorn": ["acorn@8.14.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA=="],
+
+ "youch/cookie": ["cookie@1.1.1", "", {}, "sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ=="],
+ }
+}
diff --git a/extensions.json b/extensions.json
new file mode 100644
index 0000000..df58c69
--- /dev/null
+++ b/extensions.json
@@ -0,0 +1,30 @@
+{
+ "mihon": {
+ "keiyoushi": {
+ "name": "Keiyoushi",
+ "source": "https://github.com/keiyoushi/extensions",
+ "path": "/keiyoushi/index.min.json",
+ "commit": "92eae2fca691e97b3eb42069535c290fb1169a9a"
+ },
+ "yuzono/manga": {
+ "name": "Yuzono Manga",
+ "source": "https://github.com/yuzono/manga-repo",
+ "path": "/yuzono/manga/index.min.json",
+ "commit": "93f9a963f0bf9df3e0115e360bbb454f29b2bcbc"
+ }
+ },
+ "aniyomi": {
+ "kohi-den": {
+ "name": "Kohi-den",
+ "source": "https://noz.one/mirror/kohi-den-extensions",
+ "path": "/kohi-den/index.min.json",
+ "commit": "23839da666cdb9f157aedfc0a9a2c7fd5c835667"
+ },
+ "yuzono/anime": {
+ "name": "Yuzono Anime",
+ "source": "https://github.com/yuzono/anime-repo",
+ "path": "/yuzono/anime/index.min.json",
+ "commit": "2416dec159cad0952031fe0a128882f58e37c9c9"
+ }
+ }
+} \ No newline at end of file
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..e4ae207
--- /dev/null
+++ b/package.json
@@ -0,0 +1,40 @@
+{
+ "name": "x",
+ "version": "0.0.1",
+ "devDependencies": {
+ "@sveltejs/adapter-static": "^3.0.10",
+ "@sveltejs/kit": "^2.49.2",
+ "@sveltejs/vite-plugin-svelte": "^6.2.1",
+ "@types/bun": "^1.3.4",
+ "meilisearch": "^0.54.0",
+ "prettier": "^3.7.4",
+ "prettier-plugin-svelte": "^3.4.1",
+ "svelte": "^5.46.0",
+ "svelte-check": "^4.3.4",
+ "typescript": "^5.9.3",
+ "vite": "^7.3.0",
+ "wrangler": "^4.55.0"
+ },
+ "description": "Mihon & Aniyomi Extensions",
+ "keywords": [
+ "mihon",
+ "aniyomi",
+ "extensions",
+ "manga",
+ "anime"
+ ],
+ "private": true,
+ "scripts": {
+ "dev": "vite dev",
+ "build": "bun run update --generate-only && vite build",
+ "preview": "vite preview",
+ "prepare": "svelte-kit sync || echo ''",
+ "check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
+ "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
+ "format": "prettier --write .",
+ "lint": "prettier --check .",
+ "update": "bun run scripts/update.ts",
+ "test": "bun test"
+ },
+ "type": "module"
+}
diff --git a/scripts/cache.ts b/scripts/cache.ts
new file mode 100644
index 0000000..f16bed4
--- /dev/null
+++ b/scripts/cache.ts
@@ -0,0 +1,200 @@
+import type { S3Client } from 'bun';
+import { join } from 'path';
+import { cleanupDir, compressToTar, ensureDir, extractTar, validateCache } from './cache/files';
+import { withLock } from './cache/lock';
+import { log } from './cache/logger';
+import { addCacheEntry } from './cache/manifest';
+import { loadMetadata, saveMetadata, updateBothAccessTimes } from './cache/metadata';
+import { cleanupOldCaches, ENABLED, getClient, resolveCacheKey } from './cache/s3';
+import { CACHE_FILE_NAME, TMP_DIR } from './cache/utils';
+
+const CACHE_FILE_PATH = join(TMP_DIR, CACHE_FILE_NAME);
+
+function formatBytes(bytes: number): string {
+ return (bytes / (1024 * 1024)).toFixed(2);
+}
+
+async function downloadCache(s3: S3Client, key: string, targetPath: string): Promise<number> {
+ const s3File = s3.file(key);
+ const stream = s3File.stream();
+ const writer = Bun.file(targetPath).writer();
+
+ const transfer = log.transfer('Received');
+ let downloadedBytes = 0;
+
+ for await (const chunk of stream) {
+ writer.write(chunk);
+ downloadedBytes += chunk.length;
+ transfer.progress(downloadedBytes);
+ }
+ await writer.end();
+
+ transfer.complete(downloadedBytes);
+
+ return downloadedBytes;
+}
+
+async function uploadCache(s3: S3Client, key: string, sourcePath: string): Promise<number> {
+ const cacheFile = Bun.file(sourcePath);
+ const stream = cacheFile.stream();
+
+ const s3File = s3.file(key);
+ const writer = s3File.writer({
+ partSize: 10 * 1024 * 1024, // 10 MB
+ queueSize: 4,
+ retry: 3
+ });
+
+ const timer = log.timer('Uploading cache');
+ let uploadedBytes = 0;
+
+ // Start a timer to log progress every second
+ const progressInterval = setInterval(() => {
+ timer.progress();
+ }, 1000);
+
+ try {
+ for await (const chunk of stream) {
+ writer.write(chunk);
+ uploadedBytes += chunk.length;
+ }
+
+ await writer.end();
+ return uploadedBytes;
+ } finally {
+ clearInterval(progressInterval);
+ timer.complete();
+ }
+}
+
+export async function restoreCache(
+ paths: string[],
+ key: string,
+ restoreKeys?: string[]
+): Promise<string | undefined> {
+ if (!ENABLED) {
+ console.log('R2 Cache disabled');
+ return undefined;
+ }
+
+ const s3 = getClient();
+ if (!s3) return undefined;
+
+ try {
+ // Find matching cache (exact or prefix match)
+ const matchedKey = await resolveCacheKey(s3, key, restoreKeys);
+ if (!matchedKey) {
+ console.log('Cache not found');
+ return undefined;
+ }
+
+ // Check if local cache is still valid
+ const metadata = await loadMetadata(s3, matchedKey);
+ if (metadata && (await validateCache(metadata))) {
+ await updateBothAccessTimes(s3, matchedKey, metadata);
+ return matchedKey;
+ }
+
+ await ensureDir(TMP_DIR);
+
+ // Ensure all target paths exist
+ for (const path of paths) {
+ await ensureDir(path);
+ }
+
+ console.log(`Downloading cache from key: ${matchedKey}`);
+ const startTime = Date.now();
+
+ const downloadedBytes = await downloadCache(s3, matchedKey, CACHE_FILE_PATH);
+
+ const downloadTime = Date.now() - startTime;
+ const sizeInMB = formatBytes(downloadedBytes);
+
+ console.log(`Cache Size: ~${sizeInMB} MB (${downloadedBytes} B)`);
+ console.log(`Cache downloaded in ${(downloadTime / 1000).toFixed(2)}s`);
+
+ console.log('Extracting cache...');
+ const extractStartTime = Date.now();
+ await extractTar(CACHE_FILE_PATH);
+ const extractTime = Date.now() - extractStartTime;
+ console.log(`Cache extracted in ${(extractTime / 1000).toFixed(2)}s`);
+
+ await cleanupDir(TMP_DIR);
+
+ // Update access time after successful restore
+ const newMetadata = await loadMetadata(s3, matchedKey);
+ if (newMetadata) {
+ await updateBothAccessTimes(s3, matchedKey, newMetadata);
+ }
+
+ console.log(`Cache restored successfully`);
+ return matchedKey;
+ } catch (e) {
+ console.error('Failed to restore cache:', e);
+ return undefined;
+ }
+}
+
+export async function saveCache(paths: string[], key: string): Promise<void> {
+ if (!ENABLED) return;
+
+ const s3 = getClient();
+ if (!s3) return;
+
+ // Use withLock for automatic lock management with renewal
+ const result = await withLock(s3, async () => {
+ // Check if cache already exists before compressing
+ const cacheFile = s3.file(key);
+ if (await cacheFile.exists()) {
+ console.log(`Cache already exists: ${key}, skipping upload`);
+ return;
+ }
+
+ await ensureDir(TMP_DIR);
+
+ // Compress and calculate checksums
+ console.log('Compressing cache...');
+ const compressStartTime = Date.now();
+ const files = await compressToTar(paths, CACHE_FILE_PATH);
+ const compressTime = Date.now() - compressStartTime;
+ console.log(`Cache compressed in ${(compressTime / 1000).toFixed(2)}s`);
+
+ const cache = Bun.file(CACHE_FILE_PATH);
+ const sizeInBytes = cache.size;
+ const sizeInMB = formatBytes(sizeInBytes);
+
+ console.log(`Cache Size: ~${sizeInMB} MB (${sizeInBytes} B)`);
+ console.log(`Uploading cache to key: ${key}`);
+
+ const startTime = Date.now();
+
+ await uploadCache(s3, key, CACHE_FILE_PATH);
+
+ const uploadTime = Date.now() - startTime;
+ const uploadSpeed = sizeInBytes / (1024 * 1024) / (uploadTime / 1000);
+ console.log(
+ `Cache uploaded in ${(uploadTime / 1000).toFixed(2)}s (${uploadSpeed.toFixed(2)} MB/s)`
+ );
+
+ const timestamp = Date.now();
+
+ // Save metadata and get hash
+ const hash = await saveMetadata(s3, key, files, CACHE_FILE_PATH);
+ await cleanupDir(TMP_DIR);
+
+ // Add entry to manifest
+ await addCacheEntry(s3, key, hash, timestamp);
+
+ console.log(`Cache saved successfully`);
+
+ // Extract prefix for cleanup (e.g., "extensions-abc.tgz" -> "extensions-")
+ const prefix = key.split('-')[0] + '-';
+ await cleanupOldCaches(s3, prefix);
+
+ return;
+ });
+
+ if (result === null) {
+ console.error('Failed to acquire lock for cache save');
+ }
+}
diff --git a/scripts/cache/files.ts b/scripts/cache/files.ts
new file mode 100644
index 0000000..e2bdf8b
--- /dev/null
+++ b/scripts/cache/files.ts
@@ -0,0 +1,138 @@
+import { $ } from 'bun';
+import { mkdir, readdir, rm, exists } from 'fs/promises';
+import { join, relative, sep } from 'path';
+import type { CacheMetadata, FileMetadata } from './utils';
+
+export async function calculateFileChecksum(filePath: string): Promise<string> {
+ const fileBlob = Bun.file(filePath);
+ const size = fileBlob.size;
+
+ const hasher = new Bun.CryptoHasher('sha256');
+ if (size <= 10 * 1024 * 1024 /** 10MB */)
+ return hasher.update(await fileBlob.arrayBuffer()).digest('hex');
+
+ const reader = fileBlob.stream().getReader();
+ while (true) {
+ const { done, value } = await reader.read();
+ if (done) break;
+ if (value) hasher.update(value);
+ }
+
+ return hasher.digest('hex');
+}
+
+export async function calculateDirectoryChecksums(
+ paths: string[]
+): Promise<Record<string, FileMetadata>> {
+ const files: Record<string, FileMetadata> = {};
+
+ for (const path of paths) {
+ const entries = await readdir(path, {
+ recursive: true,
+ withFileTypes: true
+ });
+
+ await Promise.all(
+ entries
+ .filter((entry) => entry.isFile())
+ .map(async (entry) => {
+ const fullPath = join(entry.parentPath, entry.name);
+ const relativePath = relative('.', fullPath).split(sep).join('/');
+
+ const size = Bun.file(fullPath).size;
+ const checksum = await calculateFileChecksum(fullPath);
+
+ files[relativePath] = { checksum, size };
+ })
+ );
+ }
+
+ return files;
+}
+
+export async function validateCache(metadata: CacheMetadata): Promise<boolean> {
+ console.log('Validating cache...');
+ let valid = 0;
+ let invalid = 0;
+ let missing = 0;
+
+ const totalFiles = Object.keys(metadata.files).length;
+
+ for (const [filePath, fileInfo] of Object.entries(metadata.files)) {
+ const fullPath = join('.', filePath);
+
+ if (!(await exists(fullPath))) {
+ missing++;
+ continue;
+ }
+
+ try {
+ const actualChecksum = await calculateFileChecksum(fullPath);
+ if (actualChecksum === fileInfo.checksum) valid++;
+ else invalid++;
+ } catch (e) {
+ invalid++;
+ }
+ }
+
+ const isValid = invalid === 0 && missing === 0;
+
+ if (isValid) {
+ console.log(`Cache is valid: ${valid} files matched`);
+ } else {
+ console.log(
+ `Cache validation failed: ${valid} valid, ${invalid} invalid, ${missing} missing (total: ${totalFiles})`
+ );
+ }
+
+ return isValid;
+}
+
+export async function extractTar(tarPath: string): Promise<void> {
+ const compressedData = await Bun.file(tarPath).arrayBuffer();
+ const decompressed = Bun.zstdDecompressSync(new Uint8Array(compressedData));
+
+ // Write decompressed tar to temp file
+ const tempTarPath = tarPath + '.tmp';
+ await Bun.write(tempTarPath, decompressed);
+
+ await $`tar -xf ${tempTarPath}`.quiet().finally(async () => {
+ await rm(tempTarPath).catch(() => {});
+ });
+}
+
+export async function compressToTar(
+ paths: string[],
+ outputPath: string
+): Promise<Record<string, FileMetadata>> {
+ const checksums = await calculateDirectoryChecksums(paths);
+
+ const tempTarPath = outputPath + '.tmp';
+ await $`tar -cf ${tempTarPath} ${paths}`.quiet();
+
+ try {
+ const tarData = await Bun.file(tempTarPath).arrayBuffer();
+ const compressed = Bun.zstdCompressSync(new Uint8Array(tarData));
+ await Bun.write(outputPath, compressed);
+ } finally {
+ await rm(tempTarPath).catch(() => {});
+ }
+
+ return checksums;
+}
+
+export async function ensureDir(dir: string): Promise<void> {
+ if (!(await exists(dir))) {
+ await mkdir(dir, { recursive: true });
+ }
+}
+
+export async function cleanupDir(dir: string): Promise<void> {
+ try {
+ await rm(dir, { recursive: true, force: true });
+ } catch (e: any) {
+ if (e.code !== 'EBUSY' && e.code !== 'ENOTEMPTY') {
+ throw e;
+ }
+ }
+}
diff --git a/scripts/cache/lock.ts b/scripts/cache/lock.ts
new file mode 100644
index 0000000..674f4be
--- /dev/null
+++ b/scripts/cache/lock.ts
@@ -0,0 +1,220 @@
+import type { S3Client } from 'bun';
+import { hostname } from 'os';
+import type { CacheLock } from './utils';
+import {
+ LOCK_DOUBLE_CHECK_MS,
+ LOCK_KEY,
+ LOCK_MAX_RETRIES,
+ LOCK_RETRY_MAX_MS,
+ LOCK_RETRY_START_MS,
+ LOCK_TIMEOUT_MS,
+ writeJsonToS3
+} from './utils';
+
+export function generateInstanceId(): string {
+ return `${Date.now()}-${Math.random().toString(36).substring(2, 9)}`;
+}
+
+/**
+ * Checks if a process is still running (only works on same machine).
+ * Uses Node.js process.kill(pid, 0) which doesn't actually kill but checks existence.
+ */
+function isProcessRunning(pid: number): boolean {
+ try {
+ // Signal 0 doesn't kill, just checks if process exists
+ process.kill(pid, 0);
+ return true;
+ } catch {
+ return false;
+ }
+}
+
+/**
+ * Checks if a lock is stale based on Restic's algorithm:
+ * 1. If timestamp is older than LOCK_TIMEOUT_MS (30 min), it's stale
+ * 2. If on same machine and process doesn't exist, it's stale
+ */
+function isLockStale(lock: CacheLock): boolean {
+ const lockAge = Date.now() - lock.timestamp;
+ const timeSinceRenewal = lock.renewedAt ? Date.now() - lock.renewedAt : lockAge;
+
+ // Check 1: Timestamp-based staleness (30 minutes)
+ if (timeSinceRenewal > LOCK_TIMEOUT_MS) {
+ return true;
+ }
+
+ // Check 2: Process-based staleness (only on same machine)
+ if (lock.hostname === hostname()) {
+ if (!isProcessRunning(lock.pid)) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+/**
+ * Acquires a distributed lock using Restic's double-check pattern.
+ * This is simpler and more reliable than the lease-based approach.
+ *
+ * Algorithm (inspired by Restic):
+ * 1. Check for existing locks
+ * 2. If lock exists and is NOT stale, retry with exponential backoff
+ * 3. If no lock or stale lock found, create our lock
+ * 4. Wait 200ms (LOCK_DOUBLE_CHECK_MS)
+ * 5. Re-check: verify we still own the lock
+ * 6. If verification fails, we lost the race - retry
+ */
+export async function acquireLock(s3: S3Client, instanceId: string): Promise<string | null> {
+ const lockFile = s3.file(LOCK_KEY);
+ let retryDelay = LOCK_RETRY_START_MS;
+
+ for (let attempt = 0; attempt < LOCK_MAX_RETRIES; attempt++) {
+ try {
+ // Step 1: Check for existing lock
+ if (await lockFile.exists()) {
+ const lockContent = await lockFile.text();
+ const existingLock: CacheLock = JSON.parse(lockContent);
+
+ // Check if lock is stale
+ if (isLockStale(existingLock)) {
+ console.log('Stale lock detected, removing...');
+ await lockFile.delete().catch(() => {});
+ } else {
+ // Lock is valid, need to retry
+ console.log(
+ `Lock busy, retrying in ${retryDelay / 1000}s (${attempt + 1}/${LOCK_MAX_RETRIES})...`
+ );
+ await Bun.sleep(retryDelay);
+
+ // Exponential backoff: double delay each time, up to max
+ retryDelay = Math.min(retryDelay * 2, LOCK_RETRY_MAX_MS);
+ continue;
+ }
+ }
+
+ // Step 2: Create new lock
+ const newLock: CacheLock = {
+ locked: true,
+ timestamp: Date.now(),
+ instance: instanceId,
+ ttl: LOCK_TIMEOUT_MS,
+ renewedAt: Date.now(),
+ pid: process.pid,
+ hostname: hostname()
+ };
+
+ await writeJsonToS3(s3, LOCK_KEY, newLock);
+
+ // Step 3: Wait for double-check delay (Restic's waitBeforeLockCheck pattern)
+ // This allows any racing processes to also write their locks
+ await Bun.sleep(LOCK_DOUBLE_CHECK_MS);
+
+ // Step 4: Verify we still own the lock (detect race conditions)
+ if (await lockFile.exists()) {
+ const verifyContent = await lockFile.text();
+ const verifyLock: CacheLock = JSON.parse(verifyContent);
+
+ if (verifyLock.instance === instanceId) {
+ // Successfully acquired lock
+ console.log('Lock acquired');
+ return instanceId;
+ }
+ }
+
+ // Lost the race - another process overwrote our lock
+ // Retry with exponential backoff
+ console.log(`Lost lock race, retrying in ${retryDelay / 1000}s...`);
+ await Bun.sleep(retryDelay);
+ retryDelay = Math.min(retryDelay * 2, LOCK_RETRY_MAX_MS);
+ } catch (e) {
+ console.error(`Lock error: ${e}`);
+ await Bun.sleep(retryDelay);
+ retryDelay = Math.min(retryDelay * 2, LOCK_RETRY_MAX_MS);
+ }
+ }
+
+ console.error('Failed to acquire lock');
+ return null;
+}
+
+/**
+ * Renews the lock to extend its TTL. Should be called periodically during long operations.
+ */
+export async function renewLock(s3: S3Client, instanceId: string): Promise<boolean> {
+ const lockFile = s3.file(LOCK_KEY);
+
+ try {
+ if (!(await lockFile.exists())) {
+ return false;
+ }
+
+ const lock: CacheLock = JSON.parse(await lockFile.text());
+
+ if (lock.instance !== instanceId) {
+ return false;
+ }
+
+ // Update renewal time
+ lock.renewedAt = Date.now();
+ await writeJsonToS3(s3, LOCK_KEY, lock);
+
+ console.log('Lock renewed');
+ return true;
+ } catch (e) {
+ console.error('Failed to renew lock:', e);
+ return false;
+ }
+}
+
+/**
+ * Releases the lock if owned by this instance.
+ */
+export async function releaseLock(s3: S3Client, instanceId: string): Promise<void> {
+ const lockFile = s3.file(LOCK_KEY);
+
+ try {
+ if (!(await lockFile.exists())) {
+ return;
+ }
+
+ const lock: CacheLock = JSON.parse(await lockFile.text());
+
+ if (lock.instance === instanceId) {
+ await lockFile.delete();
+ console.log('Lock released');
+ }
+ } catch (e) {
+ console.error('Failed to release lock:', e);
+ }
+}
+
+/**
+ * Executes a callback while holding the lock, with automatic renewal.
+ */
+export async function withLock<T>(
+ s3: S3Client,
+ callback: (instanceId: string) => Promise<T>
+): Promise<T | null> {
+ const instanceId = generateInstanceId();
+ const lockToken = await acquireLock(s3, instanceId);
+
+ if (!lockToken) {
+ return null;
+ }
+
+ // Setup automatic lock renewal every 2 minutes
+ const renewalInterval = setInterval(
+ async () => {
+ await renewLock(s3, instanceId);
+ },
+ 2 * 60 * 1000
+ );
+
+ try {
+ return await callback(instanceId);
+ } finally {
+ clearInterval(renewalInterval);
+ await releaseLock(s3, instanceId);
+ }
+}
diff --git a/scripts/cache/logger.ts b/scripts/cache/logger.ts
new file mode 100644
index 0000000..1743370
--- /dev/null
+++ b/scripts/cache/logger.ts
@@ -0,0 +1,152 @@
+/**
+ * Checks if the current environment supports interactive terminal features
+ * like carriage return (\r) for progress updates.
+ *
+ * Returns false for:
+ * - Non-TTY environments (CI/CD logs, file redirects)
+ * - Dumb terminals
+ * - Environments without cursor control support
+ */
+function isInteractiveTerminal(): boolean {
+ // Check if stdout is a TTY (interactive terminal)
+ if (!process.stdout.isTTY) return false;
+ // Check for dumb terminal
+ if (process.env.TERM === 'dumb') return false;
+ // Check for CI environments (most set CI=true)
+ if (process.env.CI === 'true' || process.env.CI === '1') return false;
+
+ // Check for common CI environment variables
+ const ciEnvVars = [
+ 'GITHUB_ACTIONS',
+ 'GITLAB_CI',
+ 'CIRCLECI',
+ 'TRAVIS',
+ 'JENKINS_HOME',
+ 'BUILDKITE',
+ 'DRONE',
+ 'RENDER', // Render.com
+ 'CF_PAGES', // Cloudflare Pages
+ 'VERCEL' // Vercel
+ ];
+
+ for (const envVar of ciEnvVars) {
+ if (process.env[envVar]) return false;
+ }
+
+ return true;
+}
+
+/**
+ * Formats transfer statistics (size and speed).
+ */
+function formatTransferStats(bytes: number, elapsedSeconds: number): string {
+ const sizeMB = (bytes / (1024 * 1024)).toFixed(2);
+ const speedMBps = (bytes / (1024 * 1024) / elapsedSeconds).toFixed(2);
+ return `${sizeMB} MB (${speedMBps} MB/s)`;
+}
+
+class TimerLogger {
+ private isInteractive: boolean;
+ private startTime: number;
+ private lastLogTime: number;
+ private prefix: string;
+
+ constructor(prefix: string) {
+ this.isInteractive = isInteractiveTerminal();
+ this.startTime = Date.now();
+ this.lastLogTime = this.startTime;
+ this.prefix = prefix;
+ }
+
+ /**
+ * Logs timer progress at regular intervals (throttled to 1 second).
+ */
+ progress(): this {
+ const now = Date.now();
+ if (now - this.lastLogTime >= 1000) {
+ const elapsed = (now - this.startTime) / 1000;
+ const message = `${this.prefix} (${elapsed.toFixed(0)}s)...`;
+
+ if (this.isInteractive) process.stdout.write(`\r${message}`);
+ else console.log(message);
+
+ this.lastLogTime = now;
+ }
+ return this;
+ }
+
+ /**
+ * Logs final timer completion message.
+ */
+ complete(): void {
+ const elapsed = (Date.now() - this.startTime) / 1000;
+ const message = `${this.prefix} (${elapsed.toFixed(0)}s)`;
+
+ if (this.isInteractive) process.stdout.write(`\r\x1b[K${message}\n`);
+ else console.log(message);
+ }
+}
+
+class TransferLogger {
+ private isInteractive: boolean;
+ private startTime: number;
+ private lastLogTime: number;
+ private prefix: string;
+
+ constructor(prefix: string) {
+ this.isInteractive = isInteractiveTerminal();
+ this.startTime = Date.now();
+ this.lastLogTime = this.startTime;
+ this.prefix = prefix;
+ }
+
+ /**
+ * Logs transfer progress at regular intervals (throttled to 1 second).
+ */
+ progress(bytes: number): this {
+ const now = Date.now();
+ if (now - this.lastLogTime >= 1000) {
+ const elapsed = (now - this.startTime) / 1000;
+ const message = `${this.prefix} ${formatTransferStats(bytes, elapsed)}...`;
+
+ if (this.isInteractive) process.stdout.write(`\r${message}`);
+ else console.log(message);
+
+ this.lastLogTime = now;
+ }
+ return this;
+ }
+
+ /**
+ * Logs final transfer completion message.
+ */
+ complete(bytes: number): void {
+ if (bytes > 0) {
+ const elapsed = (Date.now() - this.startTime) / 1000;
+ const message = `${this.prefix} ${formatTransferStats(bytes, elapsed)}`;
+
+ if (this.isInteractive) process.stdout.write(`\r\x1b[K${message}\n`);
+ else console.log(message);
+ }
+ }
+}
+
+class Logger {
+ /**
+ * Creates a timer progress logger.
+ * Usage: log.timer('Uploading cache').progress().complete()
+ */
+ timer(prefix: string): TimerLogger {
+ return new TimerLogger(prefix);
+ }
+
+ /**
+ * Creates a transfer progress logger.
+ * Usage: log.transfer('Received').progress(bytes).complete(bytes)
+ */
+ transfer(prefix: string): TransferLogger {
+ return new TransferLogger(prefix);
+ }
+}
+
+export const log = new Logger();
diff --git a/scripts/cache/manifest.ts b/scripts/cache/manifest.ts
new file mode 100644
index 0000000..7558047
--- /dev/null
+++ b/scripts/cache/manifest.ts
@@ -0,0 +1,76 @@
+import type { S3Client } from 'bun';
+import type { CacheEntry, CacheManifest } from './utils';
+import { writeJsonToS3 } from './utils';
+
+const MANIFEST_KEY = 'manifest.json';
+const MANIFEST_VERSION = 1;
+
+export async function loadManifest(s3: S3Client): Promise<CacheManifest> {
+ const manifestFile = s3.file(MANIFEST_KEY);
+
+ try {
+ if (await manifestFile.exists()) {
+ const data = await manifestFile.text();
+ const manifest: CacheManifest = JSON.parse(data);
+
+ if (manifest.version === MANIFEST_VERSION) {
+ return manifest;
+ }
+ }
+ } catch (e) {
+ console.error('Failed to load manifest:', e);
+ }
+
+ // Return empty manifest if not found or invalid
+ return {
+ version: MANIFEST_VERSION,
+ caches: []
+ };
+}
+
+export async function saveManifest(s3: S3Client, manifest: CacheManifest): Promise<void> {
+ await writeJsonToS3(s3, MANIFEST_KEY, manifest);
+}
+
+export async function addCacheEntry(
+ s3: S3Client,
+ key: string,
+ hash: string,
+ timestamp: number
+): Promise<void> {
+ const manifest = await loadManifest(s3);
+
+ // Remove existing entry with same key if exists
+ manifest.caches = manifest.caches.filter((entry) => entry.key !== key);
+
+ // Add new entry
+ manifest.caches.push({
+ key,
+ hash,
+ timestamp,
+ lastAccessed: timestamp
+ });
+
+ await saveManifest(s3, manifest);
+}
+
+export async function removeCacheEntry(s3: S3Client, key: string): Promise<void> {
+ const manifest = await loadManifest(s3);
+ manifest.caches = manifest.caches.filter((entry) => entry.key !== key);
+ await saveManifest(s3, manifest);
+}
+
+export function findCacheByKey(manifest: CacheManifest, key: string): CacheEntry | null {
+ return manifest.caches.find((entry) => entry.key === key) || null;
+}
+
+export function findCacheByPrefix(manifest: CacheManifest, prefix: string): CacheEntry | null {
+ const matching = manifest.caches.filter((entry) => entry.key.startsWith(prefix));
+
+ if (matching.length === 0) {
+ return null;
+ }
+
+ // Return most recently created cache
+ return matching.sort((a, b) => b.timestamp - a.timestamp)[0];
+}
diff --git a/scripts/cache/metadata.ts b/scripts/cache/metadata.ts
new file mode 100644
index 0000000..2326d08
--- /dev/null
+++ b/scripts/cache/metadata.ts
@@ -0,0 +1,96 @@
+import type { S3Client } from 'bun';
+import { METADATA_VERSION, writeJsonToS3 } from './utils';
+import type { CacheMetadata, FileMetadata } from './utils';
+
+function getMetadataKey(cacheKey: string): string {
+ return `${cacheKey}.meta.json`;
+}
+
+export async function saveMetadata(
+ s3: S3Client,
+ key: string,
+ files: Record<string, FileMetadata>,
+ cacheFilePath: string
+): Promise<string> {
+ const content = await Bun.file(cacheFilePath).arrayBuffer();
+ const hash = new Bun.CryptoHasher('sha256').update(content).digest('hex');
+
+ const metadata: CacheMetadata = {
+ key,
+ hash,
+ timestamp: Date.now(),
+ lastAccessed: Date.now(),
+ files,
+ version: METADATA_VERSION
+ };
+
+ const metadataKey = getMetadataKey(key);
+ await writeJsonToS3(s3, metadataKey, metadata);
+
+ console.log(`Metadata saved: ${metadataKey}`);
+ return hash;
+}
+
+export async function loadMetadata(s3: S3Client, cacheKey: string): Promise<CacheMetadata | null> {
+ const metadataKey = getMetadataKey(cacheKey);
+ const metadataFile = s3.file(metadataKey);
+
+ try {
+ if (!(await metadataFile.exists())) {
+ return null;
+ }
+
+ const metadata: CacheMetadata = JSON.parse(await metadataFile.text());
+
+ if (metadata.version !== METADATA_VERSION) {
+ return null;
+ }
+
+ return metadata;
+ } catch (e) {
+ console.error('Failed to load metadata:', e);
+ return null;
+ }
+}
+
+async function updateMetadataAccessTime(
+ s3: S3Client,
+ cacheKey: string,
+ metadata: CacheMetadata
+): Promise<void> {
+ metadata.lastAccessed = Date.now();
+
+ const metadataKey = getMetadataKey(cacheKey);
+ await writeJsonToS3(s3, metadataKey, metadata);
+}
+
+export async function updateBothAccessTimes(
+ s3: S3Client,
+ cacheKey: string,
+ metadata: CacheMetadata
+): Promise<void> {
+ await updateMetadataAccessTime(s3, cacheKey, metadata);
+
+ // Also update manifest
+ const { loadManifest, saveManifest } = await import('./manifest');
+ const manifest = await loadManifest(s3);
+ const entry = manifest.caches.find((e) => e.key === cacheKey);
+
+ if (entry) {
+ entry.lastAccessed = Date.now();
+ await saveManifest(s3, manifest);
+ }
+}
+
+export async function deleteMetadata(s3: S3Client, cacheKey: string): Promise<void> {
+ const metadataKey = getMetadataKey(cacheKey);
+ const metadataFile = s3.file(metadataKey);
+
+ try {
+ if (await metadataFile.exists()) {
+ await metadataFile.delete();
+ }
+ } catch (e) {
+ console.error(`Failed to delete metadata: ${e}`);
+ }
+}
diff --git a/scripts/cache/s3.ts b/scripts/cache/s3.ts
new file mode 100644
index 0000000..ffbb6ac
--- /dev/null
+++ b/scripts/cache/s3.ts
@@ -0,0 +1,117 @@
+import { S3Client } from 'bun';
+import { MAX_CACHE_AGE_DAYS, MAX_CACHE_FILES } from './utils';
+import { findCacheByKey, findCacheByPrefix, loadManifest, removeCacheEntry } from './manifest';
+import { deleteMetadata } from './metadata';
+
+const s3Config = {
+ ENDPOINT: process.env.S3_ENDPOINT,
+ ACCESS_KEY_ID: process.env.S3_ACCESS_KEY_ID,
+ SECRET_ACCESS_KEY: process.env.S3_SECRET_ACCESS_KEY,
+ BUCKET_NAME: process.env.S3_BUCKET_NAME,
+ REGION: process.env.S3_REGION
+};
+
+export const ENABLED =
+ !!s3Config.ENDPOINT &&
+ !!s3Config.ACCESS_KEY_ID &&
+ !!s3Config.SECRET_ACCESS_KEY &&
+ !!s3Config.BUCKET_NAME;
+
+let client: S3Client | null = null;
+
+export function getClient(): S3Client | null {
+ if (!ENABLED || client) return client;
+
+ client = new S3Client({
+ endpoint: s3Config.ENDPOINT,
+ accessKeyId: s3Config.ACCESS_KEY_ID,
+ secretAccessKey: s3Config.SECRET_ACCESS_KEY,
+ bucket: s3Config.BUCKET_NAME,
+ region: s3Config.REGION
+ });
+ return client;
+}
+
+const cacheExists = async (s3: S3Client, key: string) =>
+ await s3
+ .file(key)
+ .exists()
+ .catch(() => false);
+
+const cleanupStaleCache = async (s3: S3Client, key: string): Promise<void> => {
+ console.log(`Cleaning stale cache from manifest (cache missing): ${key}`);
+ await deleteMetadata(s3, key);
+ await removeCacheEntry(s3, key);
+};
+
+export async function resolveCacheKey(
+ s3: S3Client,
+ key: string,
+ restoreKeys?: string[]
+): Promise<string | null> {
+ const manifest = await loadManifest(s3);
+
+ // Try exact match first
+ const exactMatch = findCacheByKey(manifest, key);
+ if (exactMatch) {
+ if (await cacheExists(s3, exactMatch.key)) {
+ return exactMatch.key;
+ }
+ await cleanupStaleCache(s3, exactMatch.key);
+ }
+
+ // Try restore keys in order (prefix matching), preferring most recent
+ if (restoreKeys && restoreKeys.length > 0) {
+ for (const prefix of restoreKeys) {
+ const match = findCacheByPrefix(manifest, prefix);
+ if (match) {
+ if (await cacheExists(s3, match.key)) {
+ return match.key;
+ }
+ await cleanupStaleCache(s3, match.key);
+ }
+ }
+ }
+
+ return null;
+}
+
+export async function cleanupOldCaches(s3: S3Client, prefix: string): Promise<void> {
+ const manifest = await loadManifest(s3);
+
+ // Filter caches by prefix
+ const filesWithMetadata = manifest.caches
+ .filter((entry) => entry.key.startsWith(prefix))
+ .map((entry) => ({
+ key: entry.key,
+ lastAccessed: entry.lastAccessed,
+ timestamp: entry.timestamp
+ }));
+
+ // Sort by lastAccessed (most recently accessed first)
+ const files = filesWithMetadata.sort((a, b) => b.lastAccessed - a.lastAccessed);
+
+ const now = Date.now();
+ const maxAge = MAX_CACHE_AGE_DAYS * 24 * 60 * 60 * 1000;
+ let manifestUpdated = false;
+
+ for (let i = 0; i < files.length; i++) {
+ const entry = files[i];
+ const age = now - entry.lastAccessed;
+ const shouldDelete = i >= MAX_CACHE_FILES || age > maxAge;
+
+ if (shouldDelete) {
+ console.log(
+ `Deleting cache: ${entry.key} (age: ${Math.floor(age / (24 * 60 * 60 * 1000))} days, position: ${i + 1})`
+ );
+ await s3.file(entry.key).delete();
+ await deleteMetadata(s3, entry.key);
+ await removeCacheEntry(s3, entry.key);
+ manifestUpdated = true;
+ }
+ }
+
+ if (manifestUpdated) {
+ console.log('Manifest updated after cleanup');
+ }
+}
diff --git a/scripts/cache/utils.ts b/scripts/cache/utils.ts
new file mode 100644
index 0000000..c095000
--- /dev/null
+++ b/scripts/cache/utils.ts
@@ -0,0 +1,85 @@
+import type { S3Client } from 'bun';
+
+// ============================================================================
+// Types
+// ============================================================================
+
+export interface FileMetadata {
+ checksum: string;
+ size: number;
+}
+
+export interface CacheMetadata {
+ key: string;
+ hash: string;
+ timestamp: number;
+ lastAccessed: number;
+ files: Record<string, FileMetadata>;
+ version: number;
+}
+
+export interface CacheLock {
+ locked: boolean;
+ timestamp: number;
+ instance: string;
+ ttl: number;
+ renewedAt?: number;
+ pid: number;
+ hostname: string;
+}
+
+export interface S3ListObject {
+ key: string;
+ lastModified?: string;
+}
+
+export interface CacheEntry {
+ key: string;
+ timestamp: number;
+ lastAccessed: number;
+ hash: string;
+}
+
+export interface CacheManifest {
+ version: number;
+ caches: CacheEntry[];
+}
+
+// ============================================================================
+// Constants
+// ============================================================================
+
+export const LOCK_TIMEOUT_MS = 30 * 60 * 1000; // 30 minutes (matches Restic)
+export const LOCK_RETRY_START_MS = 5000; // 5 seconds (initial retry delay)
+export const LOCK_RETRY_MAX_MS = 60000; // 60 seconds (max retry delay)
+export const LOCK_MAX_RETRIES = 6; // With exponential backoff: 5s, 10s, 20s, 40s, 60s, 60s
+export const LOCK_DOUBLE_CHECK_MS = 200; // 200ms delay for double-check pattern (matches Restic)
+
+export const METADATA_VERSION = 1;
+export const METADATA_KEY = 'metadata.json';
+export const LOCK_KEY = 'cache.lock';
+
+export const MAX_CACHE_FILES = 7;
+export const MAX_CACHE_AGE_DAYS = 7;
+
+export const TMP_DIR = 'tmp';
+export const STATIC_DIR = 'static';
+export const CACHE_FILE_NAME = 'extensions-cache.tzst';
+
+// Cache configuration
+export const CACHE_PATHS = ['static'];
+export const CACHE_KEY_PREFIX = 'extensions-';
+export const CACHE_RESTORE_KEYS = ['extensions-'];
+export const EXTENSIONS_CONFIG_FILE = 'extensions.json';
+
+// Helper to generate cache key from extensions.json
+export async function generateCacheKey(): Promise<string> {
+ const content = await Bun.file(EXTENSIONS_CONFIG_FILE).arrayBuffer();
+ const hash = new Bun.CryptoHasher('sha256').update(content).digest('hex');
+ return `${CACHE_KEY_PREFIX}${hash}.tzst`;
+}
+
+// Helper to write JSON to S3 file
+export async function writeJsonToS3(s3: S3Client, key: string, data: any): Promise<void> {
+ await Bun.write(s3.file(key), JSON.stringify(data, null, 2));
+}
diff --git a/scripts/config.ts b/scripts/config.ts
new file mode 100644
index 0000000..1ea762e
--- /dev/null
+++ b/scripts/config.ts
@@ -0,0 +1,20 @@
+export const config = {
+ github: {
+ owner: 'amrkmn',
+ repo: 'x',
+ branch: 'main'
+ },
+ domains: [
+ 'https://x.noz.one', //
+ 'https://x.ujol.dev',
+ 'https://x.amar.kim',
+ 'https://x.ujol.workers.dev'
+ ],
+ filesToCopy: [
+ 'index.json', //
+ 'index.min.json',
+ 'repo.json',
+ 'apk',
+ 'icon'
+ ]
+};
diff --git a/scripts/meilisearch.ts b/scripts/meilisearch.ts
new file mode 100644
index 0000000..a900ceb
--- /dev/null
+++ b/scripts/meilisearch.ts
@@ -0,0 +1,158 @@
+import { MeiliSearch } from 'meilisearch';
+import { readdir } from 'fs/promises';
+import { join } from 'path';
+
+interface Extension {
+ name: string;
+ pkg: string;
+ apk: string;
+ lang: string;
+ code: number;
+ version: string;
+ nsfw: number;
+}
+
+interface EnrichedExtension extends Extension {
+ id: string;
+ category: string;
+ sourceName: string;
+ formattedSourceName: string;
+ repoUrl: string;
+}
+
+interface SourceMapping {
+ name: string;
+ repoUrl: string;
+ category: string;
+}
+
+async function buildSourceMapping(path: string): Promise<Map<string, SourceMapping>> {
+ const mapping = new Map<string, SourceMapping>();
+ const data = await Bun.file(path).json();
+
+ for (const category in data.extensions) {
+ for (const repo of data.extensions[category]) {
+ const normalizedPath = repo.path.replace(/^\//, '');
+ mapping.set(normalizedPath, {
+ name: repo.name,
+ repoUrl: repo.path.substring(0, repo.path.lastIndexOf('/')),
+ category
+ });
+ }
+ }
+ return mapping;
+}
+
+async function findExtensionFiles(dir: string): Promise<string[]> {
+ let results: string[] = [];
+ try {
+ const entries = await readdir(dir, { withFileTypes: true });
+ for (const file of entries) {
+ const path = join(dir, file.name);
+ if (file.isDirectory()) results.push(...(await findExtensionFiles(path)));
+ else if (file.name === 'index.min.json') results.push(path);
+ }
+ } catch (e) {
+ console.error(`Error reading ${dir}:`, e);
+ }
+ return results;
+}
+
+export async function updateMeilisearch() {
+ const env = {
+ host: process.env.MEILISEARCH_HOST,
+ apiKey: process.env.MEILISEARCH_MASTER_KEY
+ };
+
+ if (!env.host || !env.apiKey) {
+ console.log('Skipping Meilisearch update (not configured)');
+ return;
+ }
+
+ console.log('Updating Meilisearch index...');
+ const STATIC_DIR = join(process.cwd(), 'static');
+
+ try {
+ const client = new MeiliSearch({ host: env.host, apiKey: env.apiKey });
+ await client.health();
+ const index = client.index('extensions');
+
+ await index.updateSettings({
+ searchableAttributes: ['name', 'pkg', 'lang', 'sourceName'],
+ filterableAttributes: [
+ 'sourceName',
+ 'formattedSourceName',
+ 'category',
+ 'lang',
+ 'nsfw',
+ 'pkg'
+ ],
+ sortableAttributes: ['name', 'lang', 'version'],
+ rankingRules: ['words', 'typo', 'proximity', 'attribute', 'sort', 'exactness'],
+ pagination: { maxTotalHits: 10000 }
+ });
+
+ const sourceMapping = await buildSourceMapping(join(STATIC_DIR, 'data.json'));
+ const files = await findExtensionFiles(STATIC_DIR);
+
+ if (!files.length) {
+ console.warn('No extension files found for Meilisearch');
+ return;
+ }
+
+ const allExtensions: EnrichedExtension[] = [];
+
+ for (const file of files) {
+ try {
+ const extensions: Extension[] = await Bun.file(file).json();
+ const relativePath = file
+ .replace(STATIC_DIR, '')
+ .replace(/\\/g, '/')
+ .replace(/^\//, '');
+ const pathParts = relativePath.split('/').filter(Boolean);
+ const sourceInfo = sourceMapping.get(relativePath);
+
+ const sourceName = sourceInfo?.name || pathParts[0] || 'Unknown';
+ const repoUrl = sourceInfo?.repoUrl || '/' + pathParts.slice(0, -1).join('/');
+ const category =
+ sourceInfo?.category ||
+ (pathParts[0]?.toLowerCase().includes('anime') ? 'aniyomi' : 'mihon');
+ const formattedSourceName = sourceName.toLowerCase().replace(/\s+/g, '.');
+ const idSafeSourceName = formattedSourceName.replace(/\./g, '_');
+
+ allExtensions.push(
+ ...extensions.map((ext) => ({
+ ...ext,
+ id: `${idSafeSourceName}-${ext.pkg.replace(/\./g, '_')}`,
+ category,
+ sourceName,
+ formattedSourceName,
+ repoUrl,
+ nsfw: typeof ext.nsfw === 'number' ? ext.nsfw : ext.nsfw ? 1 : 0
+ }))
+ );
+ } catch (err) {
+ console.error(`Error processing ${file}:`, err);
+ }
+ }
+
+ const task = await index.updateDocuments(allExtensions, { primaryKey: 'id' });
+ const result = await client.tasks.waitForTask(task.taskUid, {
+ timeout: 300000,
+ interval: 1000
+ });
+
+ if (result.status === 'succeeded') {
+ const stats = await index.getStats();
+ console.log(`Meilisearch updated: ${stats.numberOfDocuments} documents indexed`);
+ } else {
+ console.error('Meilisearch indexing failed:', result.error);
+ }
+ } catch (error) {
+ console.error('Meilisearch update error:', error);
+ }
+}
+
+if (import.meta.main) {
+ await updateMeilisearch();
+}
diff --git a/scripts/types.ts b/scripts/types.ts
new file mode 100644
index 0000000..da11cd4
--- /dev/null
+++ b/scripts/types.ts
@@ -0,0 +1,31 @@
+// Type definitions for config.json
+export interface ExtensionConfig {
+ source: string;
+ name: string;
+ path: string;
+ category: 'mihon' | 'aniyomi';
+ commit?: string;
+}
+
+export interface Config {
+ github: {
+ owner: string;
+ repo: string;
+ branch: string;
+ };
+ domains: string[];
+ directories: {
+ output: string;
+ extensions: string;
+ };
+ filesToCopy: string[];
+}
+
+export interface ExtensionSources {
+ [category: string]: Array<{
+ source: string;
+ name: string;
+ path: string;
+ commit?: string;
+ }>;
+}
diff --git a/scripts/update.ts b/scripts/update.ts
new file mode 100644
index 0000000..8649b41
--- /dev/null
+++ b/scripts/update.ts
@@ -0,0 +1,183 @@
+import { $ } from 'bun';
+import { existsSync } from 'fs';
+import { appendFile, cp } from 'fs/promises';
+import { join } from 'path';
+import { restoreCache, saveCache } from './cache';
+import { CACHE_PATHS, CACHE_RESTORE_KEYS, generateCacheKey } from './cache/utils';
+import { config } from './config';
+import { updateMeilisearch } from './meilisearch';
+import type { ExtensionConfig } from './types';
+
+const EXT_DIR = join(process.cwd(), 'static');
+const DATA_FILE = join(EXT_DIR, 'data.json');
+const TEMP_DIR = join(process.cwd(), 'tmp');
+
+const extensionsData: Record<string, Record<string, ExtensionConfig>> = await Bun.file(
+ 'extensions.json'
+).json();
+
+const setOutput = async (key: string, value: string) =>
+ process.env.GITHUB_OUTPUT && (await appendFile(process.env.GITHUB_OUTPUT, `${key}=${value}\n`));
+
+async function generateData() {
+ console.log('Generating data.json...');
+ try {
+ const extensions = Object.fromEntries(
+ Object.entries(extensionsData).map(([category, exts]) => [
+ category,
+ Object.values(exts).map(({ source, name, path, commit }) => ({
+ source,
+ name,
+ path,
+ commit
+ }))
+ ])
+ );
+
+ const commit = (await $`git rev-parse HEAD`.text()).trim();
+ const { owner, repo } = config.github;
+ const source = `https://github.com/${owner}/${repo}`;
+
+ await Bun.write(
+ DATA_FILE,
+ JSON.stringify({
+ extensions,
+ domains: config.domains,
+ source,
+ commitLink: `${source}/commit/${commit}`,
+ latestCommitHash: commit.substring(0, 7)
+ })
+ );
+ console.log(`Generated data.json (${commit.substring(0, 7)})`);
+ } catch (error) {
+ console.error('Failed to generate data.json:', error);
+ process.exit(1);
+ }
+}
+
+if (process.argv.includes('--generate-only')) {
+ await generateData();
+ process.exit(0);
+}
+
+if (process.argv.includes('--update-search')) {
+ console.log('Updating search index only...');
+ await updateMeilisearch();
+ process.exit(0);
+}
+
+const quickMode = process.argv.includes('--quick');
+const useCache = !process.argv.includes('--no-cache') && !quickMode;
+
+if (useCache) await restoreCache(CACHE_PATHS, await generateCacheKey(), CACHE_RESTORE_KEYS);
+else
+ console.log(quickMode ? 'Cache disabled for quick mode' : 'Cache disabled via --no-cache flag');
+
+console.log('Checking for updates...');
+const synced = new Map<string, string>();
+if (!quickMode) {
+ try {
+ Object.values((await Bun.file(DATA_FILE).json()).extensions || {})
+ .flat()
+ .forEach((e: any) => e?.path && e?.commit && synced.set(e.path, e.commit));
+ } catch {}
+}
+
+const updates = (
+ await Promise.all(
+ Object.entries(extensionsData).flatMap(([category, group]) =>
+ Object.entries(group).map(async ([key, ext]) => {
+ try {
+ const dest = join(EXT_DIR, key);
+ const syncedHash = synced.get(ext.path);
+
+ if (!quickMode && !existsSync(dest))
+ return { category, key, ext, hash: ext.commit || 'HEAD' };
+
+ const remoteHash = (
+ await $`git ls-remote ${ext.source} HEAD | cut -f1`.text()
+ ).trim();
+
+ if (quickMode && remoteHash !== ext.commit) {
+ console.log(
+ `[${ext.name}] Update available: ${ext.commit?.slice(0, 7) ?? 'none'} -> ${remoteHash.slice(0, 7)}`
+ );
+ return { category, key, ext, hash: remoteHash };
+ }
+
+ if (!quickMode && (remoteHash !== syncedHash || ext.commit !== syncedHash)) {
+ console.log(
+ `[${ext.name}] Update: ${syncedHash?.slice(0, 7) ?? 'none'} -> ${remoteHash.slice(0, 7)}`
+ );
+ return { category, key, ext, hash: remoteHash };
+ }
+ } catch {
+ console.error(`Check failed: ${ext.name}`);
+ }
+ return null;
+ })
+ )
+ )
+).filter((u): u is NonNullable<typeof u> => u !== null);
+
+if (updates.length === 0) {
+ console.log('No updates found');
+ await setOutput('updated', 'false');
+ process.exit(0);
+}
+
+if (quickMode) {
+ console.log(`Found ${updates.length} updates. Updating extensions.json...`);
+ updates.forEach(({ category, key, hash }) => (extensionsData[category][key].commit = hash));
+ await Bun.write('extensions.json', JSON.stringify(extensionsData, null, 4));
+ await setOutput('updated', 'true');
+ process.exit(0);
+}
+
+const { CI, GITHUB_EVENT_NAME } = process.env;
+if (
+ CI === 'true' &&
+ GITHUB_EVENT_NAME &&
+ !['schedule', 'workflow_dispatch'].includes(GITHUB_EVENT_NAME)
+) {
+ console.log('Skipping updates (CI)');
+ await setOutput('updated', 'false');
+ process.exit(0);
+}
+
+console.log(`Updating ${updates.length} extensions...`);
+await $`rm -rf ${TEMP_DIR}`;
+
+let changed = false;
+for (const { key, ext, hash, category } of updates) {
+ console.log(`Processing ${ext.name}...`);
+ const temp = join(TEMP_DIR, key);
+ const dest = join(EXT_DIR, key);
+
+ try {
+ await $`git clone --depth 1 ${ext.source} ${temp}`.quiet();
+ await $`rm -rf ${dest} && mkdir -p ${dest}`;
+
+ for (const file of config.filesToCopy) {
+ const srcPath = join(temp, file);
+ if (existsSync(srcPath)) await cp(srcPath, join(dest, file), { recursive: true });
+ }
+
+ extensionsData[category][key].commit = hash;
+ changed = true;
+ console.log(` Updated ${ext.name}`);
+ } catch (e) {
+ console.error(` Update failed: ${ext.name}`, e);
+ }
+}
+
+await $`rm -rf ${TEMP_DIR}`;
+if (changed) {
+ await Bun.write('extensions.json', JSON.stringify(extensionsData, null, 4));
+ console.log('Updated extensions.json');
+ await generateData();
+ await updateMeilisearch();
+ if (useCache) await saveCache(CACHE_PATHS, await generateCacheKey());
+}
+
+await setOutput('updated', String(changed));
diff --git a/scripts/worker.ts b/scripts/worker.ts
new file mode 100644
index 0000000..ae9db49
--- /dev/null
+++ b/scripts/worker.ts
@@ -0,0 +1,6 @@
+export default {
+ async fetch(request: Request, env: any) {
+ // Serve static assets
+ return env.ASSETS.fetch(request);
+ }
+};
diff --git a/src/app.css b/src/app.css
new file mode 100644
index 0000000..6bdad32
--- /dev/null
+++ b/src/app.css
@@ -0,0 +1,563 @@
+html,
+body {
+ height: 100%;
+ margin: 0;
+ padding: 0;
+ display: flex;
+ flex-direction: column;
+ background-color: #121212;
+ font-family: Arial, sans-serif;
+ color: #a0d1ff;
+}
+
+body {
+ box-sizing: border-box;
+}
+
+.container {
+ flex-grow: 1;
+ width: 100%;
+ max-width: 800px;
+ margin: 0 auto;
+ padding: 20px;
+ box-sizing: border-box;
+}
+
+.controls {
+ text-align: center;
+ margin: 0 auto 20px;
+ padding: 8px;
+ background-color: #1c2a39;
+ border: 1px solid #66b2ff;
+ display: table;
+}
+
+select {
+ padding: 4px;
+ background-color: #121212;
+ color: #a0d1ff;
+ border: 1px solid #66b2ff;
+ font-size: 14px;
+}
+
+h1,
+h2 {
+ text-align: center;
+ color: #66b2ff;
+}
+
+.mihon,
+.aniyomi {
+ margin-top: 20px;
+}
+
+.grid {
+ display: flex;
+ flex-wrap: wrap;
+ justify-content: center;
+ gap: 20px;
+}
+
+.card {
+ background-color: #1c2a39;
+ border: 1px solid #2c3e50;
+ padding: 20px;
+ display: flex;
+ flex-direction: column;
+ transition:
+ transform 0.2s,
+ box-shadow 0.2s;
+ flex: 1 1 300px;
+ max-width: 350px;
+}
+
+.card:hover {
+ border-color: #66b2ff;
+}
+
+.card-header {
+ margin-bottom: 15px;
+}
+
+.card-title {
+ font-size: 1.2em;
+ font-weight: bold;
+ color: #a0d1ff;
+ text-decoration: none;
+ display: block;
+ margin-bottom: 8px;
+}
+
+.card-title:hover {
+ color: #fff;
+}
+
+.card-meta {
+ font-size: 0.85em;
+ color: #6c8b9f;
+ font-family: monospace;
+}
+
+.extension-card {
+ flex-direction: row;
+ align-items: center;
+ max-width: 100%;
+ text-align: left;
+}
+
+.extension-icon {
+ width: 40px;
+ height: 40px;
+ border-radius: 8px;
+ object-fit: cover;
+ background-color: #1c2a39;
+ border: 1px solid #2c3e50;
+}
+
+.extension-details {
+ flex-grow: 1;
+ display: flex;
+ flex-direction: column;
+ min-width: 0;
+}
+
+.extension-details .card-header {
+ margin-bottom: 5px;
+}
+
+.extension-details .pkg-name {
+ font-size: 0.8em;
+ color: #555;
+ margin-bottom: 10px;
+ word-break: break-all;
+}
+
+.extension-details .card-actions {
+ justify-content: flex-start;
+}
+
+.extension-details .btn {
+ flex: 0 0 auto;
+ padding: 6px 12px;
+ font-size: 0.85em;
+}
+
+.table-container {
+ overflow-x: auto;
+ background-color: #1c2a39;
+ border: 1px solid #2c3e50;
+}
+
+.extensions-table {
+ width: 100%;
+ border-collapse: collapse;
+ margin-top: 20px;
+}
+
+.extensions-table th,
+.extensions-table td {
+ padding: 6px 10px;
+ border-bottom: 1px solid #2c3e50;
+}
+
+.extensions-table th {
+ background-color: #15202b;
+ color: #66b2ff;
+ font-weight: bold;
+ text-transform: uppercase;
+ font-size: 0.85em;
+}
+
+.extensions-table tr:last-child td {
+ border-bottom: none;
+}
+
+.extensions-table tr:hover {
+ background-color: #253646;
+}
+
+.info-cell {
+ max-width: 200px;
+}
+
+.extension-icon-small {
+ width: 32px;
+ height: 32px;
+ border-radius: 6px;
+ object-fit: cover;
+ background-color: #2c3e50;
+ display: block;
+}
+
+.extension-name {
+ font-weight: bold;
+ color: #66b2ff;
+ font-size: 16px;
+ margin-bottom: 2px;
+ word-wrap: break-word;
+ overflow-wrap: break-word;
+}
+
+.nsfw-badge {
+ display: inline-block;
+ background-color: #e74c3c;
+ color: #fff;
+ padding: 2px 6px;
+ border-radius: 3px;
+ font-size: 0.7em;
+ font-weight: bold;
+ text-transform: uppercase;
+ letter-spacing: 0.5px;
+}
+
+.extension-pkg {
+ font-size: 0.75em;
+ color: #6c8b9f;
+ font-family: monospace;
+ margin-top: 2px;
+ word-wrap: break-word;
+ overflow-wrap: break-word;
+}
+
+.extension-source {
+ font-size: 0.7em;
+ color: #8a9ba8;
+ margin-top: 3px;
+ font-style: italic;
+ word-wrap: break-word;
+ overflow-wrap: break-word;
+}
+
+.meta-cell {
+ font-size: 0.9em;
+ color: #ccc;
+ white-space: nowrap;
+}
+
+.meta-cell .version {
+ display: inline-block;
+ margin-right: 8px;
+ color: #a0d1ff;
+}
+
+.meta-cell .lang {
+ display: inline-block;
+ background-color: #2c3e50;
+ padding: 1px 5px;
+ border-radius: 3px;
+ font-size: 0.8em;
+ color: #a0d1ff;
+}
+
+.search-container {
+ margin-bottom: 20px;
+ text-align: center;
+}
+
+.search-input {
+ width: 100%;
+ max-width: 600px;
+ padding: 12px 15px;
+ background-color: #1c2a39;
+ border: 1px solid #2c3e50;
+ color: #a0d1ff;
+ font-size: 16px;
+ box-sizing: border-box;
+ border-radius: 4px;
+ transition: border-color 0.2s;
+}
+
+.search-input:focus {
+ outline: none;
+ border-color: #66b2ff;
+}
+
+.filter-bar {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(180px, 1fr));
+ gap: 15px;
+ margin-bottom: 20px;
+ padding: 15px;
+ background-color: #1c2a39;
+ border: 1px solid #2c3e50;
+ border-radius: 4px;
+ align-items: center;
+}
+
+.filter-group {
+ display: flex;
+ flex-direction: column;
+ gap: 6px;
+ align-items: flex-start;
+}
+
+.filter-group label {
+ color: #a0d1ff;
+ font-size: 13px;
+ font-weight: 500;
+ white-space: nowrap;
+}
+
+.filter-group select {
+ width: 100%;
+ padding: 8px 10px;
+ background-color: #121212;
+ color: #a0d1ff;
+ border: 1px solid #66b2ff;
+ border-radius: 3px;
+ font-size: 14px;
+ cursor: pointer;
+}
+
+.filter-group select:focus {
+ outline: none;
+ border-color: #89cfff;
+ box-shadow: 0 0 0 2px rgba(102, 178, 255, 0.1);
+}
+
+.filter-checkbox {
+ display: flex;
+ align-items: flex-start;
+ justify-content: flex-start;
+}
+
+.filter-checkbox label {
+ display: flex;
+ align-items: center;
+ gap: 6px;
+ cursor: pointer;
+}
+
+.filter-checkbox input[type='checkbox'] {
+ width: 18px;
+ height: 18px;
+ cursor: pointer;
+ accent-color: #66b2ff;
+}
+
+.filter-checkbox span {
+ user-select: none;
+}
+
+.page-header {
+ display: flex;
+ justify-content: center;
+ align-items: center;
+ margin-bottom: 20px;
+ position: relative;
+}
+
+.page-header h1 {
+ margin: 0;
+}
+
+.header-btn {
+ position: absolute;
+ right: 0;
+}
+
+.commit-link {
+ color: inherit;
+ text-decoration: none;
+ transition: all 0.2s;
+ border-bottom: 1px dotted transparent;
+}
+
+.commit-link:hover {
+ color: #66b2ff;
+ border-bottom-color: #66b2ff;
+ text-shadow: 0 0 8px rgba(102, 178, 255, 0.4);
+}
+
+.card-actions {
+ margin-top: auto;
+ display: flex;
+ gap: 10px;
+}
+
+.btn {
+ flex: 1;
+ padding: 10px;
+ text-align: center;
+ text-decoration: none;
+ font-weight: bold;
+ transition: all 0.2s;
+ font-size: 0.9em;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ cursor: pointer;
+}
+
+.btn-sm {
+ padding: 6px 12px;
+ font-size: 0.85em;
+}
+
+.btn-primary {
+ background-color: #196ec8;
+ color: #fff;
+ border: 1px solid #196ec8;
+}
+
+.btn-primary:hover {
+ background-color: #155a9c;
+ border-color: #155a9c;
+}
+
+.btn-secondary {
+ background-color: transparent;
+ border: 1px solid #66b2ff;
+ color: #66b2ff;
+}
+
+.btn-secondary:hover {
+ background-color: rgba(102, 178, 255, 0.1);
+}
+
+footer {
+ text-align: center;
+ padding: 10px 0;
+ background-color: #1c2a39;
+ border-top: 2px solid #66b2ff;
+ color: #a0d1ff;
+ width: 100%;
+ margin-top: auto;
+}
+
+footer a {
+ color: #66b2ff;
+ text-decoration: none;
+}
+
+footer a:hover {
+ color: #89cfff;
+}
+
+@media (max-width: 600px) {
+ .container {
+ padding: 10px;
+ }
+
+ .page-header {
+ flex-direction: column;
+ position: static;
+ gap: 10px;
+ }
+
+ .header-btn,
+ .page-header .btn {
+ position: static;
+ width: auto;
+ min-width: 120px;
+ }
+
+ .page-header h1 {
+ font-size: 1.5em;
+ margin-bottom: 5px;
+ }
+
+ .extensions-table th,
+ .extensions-table td {
+ padding: 12px 10px;
+ text-align: left;
+ border-bottom: 1px solid #2c3e50;
+ vertical-align: middle;
+ }
+
+ .extension-pkg {
+ display: none;
+ }
+
+ .meta-cell .version {
+ display: block;
+ margin-right: 0;
+ margin-bottom: 2px;
+ }
+
+ .meta-cell .lang {
+ display: inline-block;
+ }
+
+ .card {
+ flex: 1 1 100%;
+ max-width: 100%;
+ }
+
+ .filter-bar {
+ grid-template-columns: 1fr;
+ gap: 12px;
+ padding: 12px;
+ }
+
+ .filter-group {
+ width: 100%;
+ }
+
+ .filter-checkbox {
+ justify-content: flex-start;
+ width: 100%;
+ }
+}
+
+.pagination-container {
+ margin-top: 30px;
+ padding: 20px;
+ background-color: #1c2a39;
+ border: 1px solid #2c3e50;
+ border-radius: 4px;
+ display: flex;
+ flex-direction: column;
+ gap: 15px;
+ align-items: center;
+}
+
+.pagination-info {
+ color: #a0d1ff;
+ font-size: 14px;
+ text-align: center;
+}
+
+.pagination-controls {
+ display: flex;
+ align-items: center;
+ gap: 8px;
+ flex-wrap: wrap;
+ justify-content: center;
+}
+
+.page-numbers {
+ display: flex;
+ gap: 5px;
+ align-items: center;
+}
+
+.pagination-controls .btn:disabled {
+ opacity: 0.5;
+ cursor: not-allowed;
+}
+
+.pagination-controls .btn:disabled:hover {
+ background-color: transparent;
+}
+
+@media (max-width: 600px) {
+ .pagination-container {
+ padding: 15px;
+ margin-top: 20px;
+ }
+
+ .pagination-controls {
+ gap: 6px;
+ }
+
+ .page-numbers .btn {
+ min-width: 35px;
+ padding: 6px 10px;
+ font-size: 13px;
+ }
+
+ .pagination-info {
+ font-size: 13px;
+ }
+}
diff --git a/src/app.d.ts b/src/app.d.ts
new file mode 100644
index 0000000..d76242a
--- /dev/null
+++ b/src/app.d.ts
@@ -0,0 +1,13 @@
+// See https://svelte.dev/docs/kit/types#app.d.ts
+// for information about these interfaces
+declare global {
+ namespace App {
+ // interface Error {}
+ // interface Locals {}
+ // interface PageData {}
+ // interface PageState {}
+ // interface Platform {}
+ }
+}
+
+export {};
diff --git a/src/app.html b/src/app.html
new file mode 100644
index 0000000..cfcdea3
--- /dev/null
+++ b/src/app.html
@@ -0,0 +1,13 @@
+<!doctype html>
+<html lang="en">
+ <head>
+ <meta charset="utf-8" />
+ <meta name="viewport" content="width=device-width, initial-scale=1.0" />
+ <title>Mihon & Aniyomi Extensions</title>
+ <link rel="shortcut icon" href="%sveltekit.assets%/favicon.ico" type="image/x-icon" />
+ %sveltekit.head%
+ </head>
+ <body data-sveltekit-preload-data="hover">
+ <div style="display: contents">%sveltekit.body%</div>
+ </body>
+</html>
diff --git a/src/lib/components/ExtensionCard.svelte b/src/lib/components/ExtensionCard.svelte
new file mode 100644
index 0000000..90769e9
--- /dev/null
+++ b/src/lib/components/ExtensionCard.svelte
@@ -0,0 +1,47 @@
+<script lang="ts">
+ interface Props {
+ repo: {
+ source: string;
+ name: string;
+ path: string;
+ commit?: string;
+ };
+ protocol: string;
+ selectedDomain: string;
+ }
+
+ let { repo, protocol, selectedDomain }: Props = $props();
+</script>
+
+<div class="card">
+ <div class="card-header">
+ <a href={repo.source} target="_blank" class="card-title">
+ {repo.name}
+ </a>
+ <div class="card-meta">
+ {#if repo.commit}
+ Commit:{' '}
+ <a
+ href={`${repo.source}/commit/${repo.commit}`}
+ target="_blank"
+ class="commit-link"
+ >
+ {repo.commit.substring(0, 7)}
+ </a>
+ {:else}
+ Commit: N/A
+ {/if}
+ </div>
+ </div>
+ <div class="card-actions">
+ <a
+ href={`${protocol}://add-repo?url=${selectedDomain}${repo.path}`}
+ class="btn btn-primary"
+ >
+ Add Repo
+ </a>
+ <a href={`${selectedDomain}${repo.path}`} target="_blank" class="btn btn-secondary">
+ JSON
+ </a>
+ </div>
+</div>
diff --git a/src/lib/components/ExtensionCategory.svelte b/src/lib/components/ExtensionCategory.svelte
new file mode 100644
index 0000000..0acf92f
--- /dev/null
+++ b/src/lib/components/ExtensionCategory.svelte
@@ -0,0 +1,24 @@
+<script lang="ts">
+ import ExtensionCard from './ExtensionCard.svelte';
+ import type { ExtensionRepo } from '$lib/types';
+
+ interface Props {
+ category: string;
+ repos: ExtensionRepo[];
+ selectedDomain: string;
+ }
+
+ let { category, repos, selectedDomain }: Props = $props();
+
+ let protocol = $derived(category.toLowerCase() === 'mihon' ? 'tachiyomi' : 'aniyomi');
+ let title = $derived(category.charAt(0).toUpperCase() + category.slice(1));
+</script>
+
+<div class={category}>
+ <h2>{title} Extensions</h2>
+ <div class="grid">
+ {#each repos as repo}
+ <ExtensionCard {repo} {protocol} {selectedDomain} />
+ {/each}
+ </div>
+</div>
diff --git a/src/lib/components/ExtensionRow.svelte b/src/lib/components/ExtensionRow.svelte
new file mode 100644
index 0000000..d2a4102
--- /dev/null
+++ b/src/lib/components/ExtensionRow.svelte
@@ -0,0 +1,47 @@
+<script lang="ts">
+ import type { Extension } from '$lib/types';
+
+ interface Props {
+ extension: Extension;
+ repoUrl: string;
+ }
+
+ let { extension, repoUrl }: Props = $props();
+
+ function handleImageError(e: Event) {
+ const target = e.target as HTMLImageElement;
+ target.src =
+ 'data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSI2NCIgaGVpZ2h0PSI2NCIgdmlld0JveD0iMCAwIDY0IDY0Ij48cmVjdCB3aWR0aD0iNjQiIGhlaWdodD0iNjQiIGZpbGw9IiMyYzNlNTAiLz48dGV4dCB4PSI1MCUiIHk9IjUwJSIgZG9taW5hbnQtYmFzZWxpbmU9Im1pZGRsZSIgdGV4dC1hbmNob3I9Im1pZGRsZSIgZmlsbD0iIzZjOGI5ZiIgZm9udC1zaXplPSIxMiI+TjwvdGV4dD48L3N2Zz4=';
+ }
+</script>
+
+<tr class="extension-row">
+ <td class="icon-cell">
+ <img
+ src={`${repoUrl}/icon/${extension.pkg}.png`}
+ alt={extension.name}
+ class="extension-icon-small"
+ loading="lazy"
+ onerror={handleImageError}
+ />
+ </td>
+ <td class="info-cell">
+ <div class="extension-name">
+ {extension.name}
+ {#if extension.nsfw === 1}
+ <span class="nsfw-badge">NSFW</span>
+ {/if}
+ </div>
+ <div class="extension-pkg">{extension.pkg}</div>
+ {#if extension.sourceName}
+ <div class="extension-source">Source: {extension.sourceName}</div>
+ {/if}
+ </td>
+ <td class="meta-cell">
+ <span class="version">v{extension.version}</span>
+ <span class="lang">{extension.lang}</span>
+ </td>
+ <td class="action-cell">
+ <a href={`${repoUrl}/apk/${extension.apk}`} class="btn btn-primary btn-sm"> Download </a>
+ </td>
+</tr>
diff --git a/src/lib/components/Footer.svelte b/src/lib/components/Footer.svelte
new file mode 100644
index 0000000..3a228ca
--- /dev/null
+++ b/src/lib/components/Footer.svelte
@@ -0,0 +1,16 @@
+<script lang="ts">
+ interface Props {
+ source: string;
+ commitLink: string;
+ latestCommitHash: string;
+ }
+
+ let { source, commitLink, latestCommitHash }: Props = $props();
+</script>
+
+<footer>
+ Source Code: <a href={source} target="_blank">{source}</a>
+ <div>
+ Commit: <a href={commitLink} target="_blank">{latestCommitHash}</a>
+ </div>
+</footer>
diff --git a/src/lib/components/MirrorSelector.svelte b/src/lib/components/MirrorSelector.svelte
new file mode 100644
index 0000000..7db4885
--- /dev/null
+++ b/src/lib/components/MirrorSelector.svelte
@@ -0,0 +1,26 @@
+<script lang="ts">
+ import { selectedDomain } from '$lib/stores/mirror';
+
+ interface Props {
+ domains: string[];
+ }
+
+ let { domains }: Props = $props();
+
+ function getHostname(url: string) {
+ try {
+ return new URL(url).hostname;
+ } catch {
+ return url;
+ }
+ }
+</script>
+
+<div class="controls">
+ <label for="mirror-select">Select Mirror:&nbsp;</label>
+ <select id="mirror-select" bind:value={$selectedDomain}>
+ {#each domains as domain}
+ <option value={domain}>{getHostname(domain)}</option>
+ {/each}
+ </select>
+</div>
diff --git a/src/lib/search/debounce.ts b/src/lib/search/debounce.ts
new file mode 100644
index 0000000..4b685e7
--- /dev/null
+++ b/src/lib/search/debounce.ts
@@ -0,0 +1,30 @@
+/**
+ * Creates a debounced version of a function that delays execution until after
+ * the specified wait time has elapsed since the last invocation.
+ */
+export function debounce<T extends (...args: any[]) => any>(
+ func: T,
+ wait: number
+): (...args: Parameters<T>) => void {
+ let timeoutId: ReturnType<typeof setTimeout> | null = null;
+
+ return function (...args: Parameters<T>) {
+ if (timeoutId !== null) {
+ clearTimeout(timeoutId);
+ }
+
+ timeoutId = setTimeout(() => {
+ func(...args);
+ timeoutId = null;
+ }, wait);
+ };
+}
+
+/**
+ * Clears a timeout if it exists
+ */
+export function clearDebounce(timeoutId: ReturnType<typeof setTimeout> | null) {
+ if (timeoutId !== null) {
+ clearTimeout(timeoutId);
+ }
+}
diff --git a/src/lib/search/meilisearch.ts b/src/lib/search/meilisearch.ts
new file mode 100644
index 0000000..9b4c0d1
--- /dev/null
+++ b/src/lib/search/meilisearch.ts
@@ -0,0 +1,125 @@
+export interface MeilisearchConfig {
+ host: string;
+ apiKey?: string;
+}
+
+export interface SearchFilters {
+ query?: string;
+ source?: string;
+ category?: string;
+ lang?: string;
+ nsfw?: boolean;
+ page?: number;
+ limit?: number;
+}
+
+interface MeilisearchClient {
+ host: string;
+ apiKey: string;
+}
+
+let client: MeilisearchClient | null = null;
+
+export function initMeilisearch(config: MeilisearchConfig) {
+ if (!config.host) {
+ console.warn('Meilisearch not configured');
+ return null;
+ }
+ client = { host: config.host, apiKey: config.apiKey ?? '' };
+ return client;
+}
+
+export function isMeilisearchEnabled(): boolean {
+ return client !== null;
+}
+
+/**
+ * Transforms a Meilisearch hit to EnrichedExtension format
+ */
+export function transformMeilisearchHit(hit: any) {
+ return {
+ name: hit.name,
+ pkg: hit.pkg,
+ apk: hit.apk,
+ lang: hit.lang,
+ code: hit.code,
+ version: hit.version,
+ nsfw: hit.nsfw,
+ repoUrl: hit.repoUrl,
+ sourceName: hit.sourceName,
+ formattedSourceName: hit.formattedSourceName
+ };
+}
+
+export async function searchExtensions(filters: SearchFilters) {
+ if (!client) {
+ throw new Error('Meilisearch client not initialized');
+ }
+
+ const filterConditions: string[] = [];
+
+ if (filters.source && filters.source !== 'all')
+ filterConditions.push(`formattedSourceName = "${filters.source}"`);
+ if (filters.category && filters.category !== 'all')
+ filterConditions.push(`category = "${filters.category}"`);
+ if (filters.lang && filters.lang !== 'all') filterConditions.push(`lang = "${filters.lang}"`);
+ if (filters.nsfw === false) filterConditions.push('nsfw = 0');
+
+ const page = filters.page || 1;
+ const limit = filters.limit || 50;
+ const offset = (page - 1) * limit;
+
+ const body: Record<string, any> = {
+ q: filters.query || '',
+ limit,
+ offset
+ };
+
+ if (filterConditions.length > 0) body.filter = filterConditions;
+
+ const response = await fetch(`${client.host}/indexes/extensions/search`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ Authorization: `Bearer ${client.apiKey}`
+ },
+ body: JSON.stringify(body)
+ });
+
+ if (!response.ok) {
+ throw new Error(`Meilisearch error: ${response.status} ${response.statusText}`);
+ }
+
+ return await response.json();
+}
+
+export async function getFilterOptions() {
+ if (!client) {
+ throw new Error('Meilisearch client not initialized');
+ }
+
+ const response = await fetch(`${client.host}/indexes/extensions/search`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ Authorization: `Bearer ${client.apiKey}`
+ },
+ body: JSON.stringify({
+ q: '',
+ limit: 0,
+ facets: ['formattedSourceName', 'category', 'lang']
+ })
+ });
+
+ if (!response.ok) {
+ throw new Error(`Meilisearch error: ${response.status} ${response.statusText}`);
+ }
+
+ const result = await response.json();
+
+ return {
+ sources: Object.keys(result.facetDistribution?.formattedSourceName || {}),
+ categories: Object.keys(result.facetDistribution?.category || {}),
+ languages: Object.keys(result.facetDistribution?.lang || {})
+ };
+}
diff --git a/src/lib/search/types.ts b/src/lib/search/types.ts
new file mode 100644
index 0000000..cc30ae5
--- /dev/null
+++ b/src/lib/search/types.ts
@@ -0,0 +1,27 @@
+import type { Extension } from '$lib/types';
+
+/**
+ * Extension enriched with repository and source information
+ */
+export interface EnrichedExtension extends Extension {
+ repoUrl: string;
+ sourceName: string;
+ formattedSourceName: string;
+ category: string;
+}
+
+/**
+ * Repository information from data.json
+ */
+export interface RepoInfo {
+ name: string;
+ path: string;
+ commit: string;
+}
+
+/**
+ * Repository data grouped by category
+ */
+export interface RepoData {
+ [category: string]: RepoInfo[];
+}
diff --git a/src/lib/search/utils.ts b/src/lib/search/utils.ts
new file mode 100644
index 0000000..d6b6aa8
--- /dev/null
+++ b/src/lib/search/utils.ts
@@ -0,0 +1,17 @@
+/**
+ * Formats a source name to lowercase with dots instead of spaces
+ */
+export function formatSourceName(sourceName: string): string {
+ return sourceName.toLowerCase().replace(/\s+/g, '.');
+}
+
+/**
+ * Finds a source by its formatted name from available sources
+ */
+export function findSourceByFormattedName(
+ formattedName: string,
+ availableSources: string[]
+): string {
+ if (formattedName === 'all') return 'all';
+ return availableSources.find((source) => formatSourceName(source) === formattedName) ?? 'all';
+}
diff --git a/src/lib/stores/mirror.ts b/src/lib/stores/mirror.ts
new file mode 100644
index 0000000..d80a105
--- /dev/null
+++ b/src/lib/stores/mirror.ts
@@ -0,0 +1,3 @@
+import { writable } from 'svelte/store';
+
+export const selectedDomain = writable<string>('');
diff --git a/src/lib/types.ts b/src/lib/types.ts
new file mode 100644
index 0000000..79783b3
--- /dev/null
+++ b/src/lib/types.ts
@@ -0,0 +1,26 @@
+export interface Extension {
+ pkg: string;
+ name: string;
+ version: string;
+ lang: string;
+ apk: string;
+ nsfw: number;
+ sourceName?: string;
+}
+
+export interface ExtensionRepo {
+ source: string;
+ name: string;
+ path: string;
+ commit: string;
+}
+
+export interface AppData {
+ extensions: {
+ [category: string]: ExtensionRepo[];
+ };
+ domains: string[];
+ source: string;
+ commitLink: string;
+ latestCommitHash: string;
+}
diff --git a/src/routes/+layout.svelte b/src/routes/+layout.svelte
new file mode 100644
index 0000000..cf931d8
--- /dev/null
+++ b/src/routes/+layout.svelte
@@ -0,0 +1,28 @@
+<script lang="ts">
+ import '../app.css';
+
+ import { selectedDomain } from '$lib/stores/mirror';
+ import type { Snippet } from 'svelte';
+ import type { LayoutData } from './$types';
+
+ import Footer from '$lib/components/Footer.svelte';
+
+ interface Props {
+ children: Snippet;
+ data: LayoutData;
+ }
+
+ let { children, data }: Props = $props();
+
+ let { source, commitLink, latestCommitHash, domains } = $derived(data);
+
+ $effect(() => {
+ if (domains && domains.length > 0) {
+ selectedDomain.update((d) => d || domains[0]);
+ }
+ });
+</script>
+
+{@render children()}
+
+<Footer {source} {commitLink} {latestCommitHash} />
diff --git a/src/routes/+layout.ts b/src/routes/+layout.ts
new file mode 100644
index 0000000..fc5ca3f
--- /dev/null
+++ b/src/routes/+layout.ts
@@ -0,0 +1,10 @@
+import type { AppData } from '$lib/types';
+
+export const prerender = true;
+
+export const load = async ({ fetch }) => {
+ const response = await fetch('/data.json');
+ const data = (await response.json()) as AppData;
+
+ return { ...data };
+};
diff --git a/src/routes/+page.svelte b/src/routes/+page.svelte
new file mode 100644
index 0000000..5365d99
--- /dev/null
+++ b/src/routes/+page.svelte
@@ -0,0 +1,21 @@
+<script lang="ts">
+ import ExtensionCategory from '$lib/components/ExtensionCategory.svelte';
+ import MirrorSelector from '$lib/components/MirrorSelector.svelte';
+ import { selectedDomain } from '$lib/stores/mirror';
+
+ let { data } = $props();
+ let { extensions, domains } = $derived(data);
+</script>
+
+<div class="container">
+ <div class="page-header">
+ <h1>Mihon & Aniyomi Extensions</h1>
+ <a href="/search" class="btn btn-secondary header-btn"> Search </a>
+ </div>
+
+ <MirrorSelector {domains} />
+
+ {#each Object.entries(extensions) as [category, repos]}
+ <ExtensionCategory {category} {repos} selectedDomain={$selectedDomain} />
+ {/each}
+</div>
diff --git a/src/routes/search/+page.svelte b/src/routes/search/+page.svelte
new file mode 100644
index 0000000..2df902d
--- /dev/null
+++ b/src/routes/search/+page.svelte
@@ -0,0 +1,322 @@
+<script lang="ts">
+ import { browser } from '$app/environment';
+ import { goto } from '$app/navigation';
+ import { page } from '$app/state';
+ import { onMount } from 'svelte';
+
+ import ExtensionRow from '$lib/components/ExtensionRow.svelte';
+ import { debounce } from '$lib/search/debounce.js';
+ import {
+ getFilterOptions,
+ initMeilisearch,
+ searchExtensions,
+ transformMeilisearchHit
+ } from '$lib/search/meilisearch.js';
+ import type { EnrichedExtension } from '$lib/search/types.js';
+ import { findSourceByFormattedName, formatSourceName } from '$lib/search/utils.js';
+
+ // Component state (must be declared before derived state that uses them)
+ let loading = $state(true);
+ let error = $state<string | null>(null);
+ let results = $state<EnrichedExtension[]>([]);
+ let sources = $state<string[]>(['all']);
+ let categories = $state<string[]>(['all']);
+ let languages = $state<string[]>(['all']);
+ let currentPage = $state(1);
+ let totalPages = $state(1);
+ let totalHits = $state(0);
+ let resultsPerPage = $state(10);
+ let hasSearched = $state(false);
+
+ // Derived state from URL parameters
+ let query = $derived(browser ? (page.url.searchParams.get('q') ?? '') : '');
+ let selectedSource = $derived(
+ browser
+ ? findSourceByFormattedName(page.url.searchParams.get('source') ?? 'all', sources)
+ : 'all'
+ );
+ let selectedCategory = $derived(
+ browser ? (page.url.searchParams.get('category') ?? 'all') : 'all'
+ );
+ let selectedLanguage = $derived(browser ? (page.url.searchParams.get('lang') ?? 'all') : 'all');
+ let showNSFW = $derived(browser ? page.url.searchParams.get('nsfw') !== '0' : true);
+ let pageParam = $derived(browser ? parseInt(page.url.searchParams.get('page') ?? '1') : 1);
+
+ // URL parameter management
+ function updateParams(updates: Record<string, string | null>) {
+ const params = new URLSearchParams(page.url.searchParams);
+
+ // Reset to page 1 if any search filter changed (not page parameter)
+ const filterChanges = Object.keys(updates).filter((key) => key !== 'page');
+ if (filterChanges.length > 0) {
+ params.set('page', '1');
+ }
+
+ for (const [key, value] of Object.entries(updates)) {
+ if (value === null) params.delete(key);
+ else params.set(key, value);
+ }
+ goto(`?${params.toString()}`, { replaceState: true, keepFocus: true, noScroll: true });
+ }
+
+ // Initialize Meilisearch
+ onMount(async () => {
+ try {
+ const meiliConfig = {
+ host: import.meta.env.VITE_MEILISEARCH_HOST || '',
+ apiKey: import.meta.env.VITE_MEILISEARCH_DEFAULT_SEARCH_KEY
+ };
+
+ if (!meiliConfig.host) {
+ error = 'Meilisearch is not configured.';
+ return;
+ }
+
+ initMeilisearch(meiliConfig);
+ } catch (e) {
+ console.error(e);
+ error = 'Failed to initialize Meilisearch.';
+ } finally {
+ loading = false;
+ }
+ });
+
+ // Debounced search with 300ms delay
+ const debouncedSearch = debounce(
+ (
+ query: string,
+ source: string,
+ category: string,
+ lang: string,
+ nsfw: boolean,
+ page: number
+ ) => {
+ searchExtensions({
+ query: query || undefined,
+ source: source !== 'all' ? formatSourceName(source) : undefined,
+ category: category !== 'all' ? category : undefined,
+ lang: lang !== 'all' ? lang : undefined,
+ nsfw: nsfw,
+ page,
+ limit: resultsPerPage
+ })
+ .then((searchResults) => {
+ results = searchResults.hits.map(transformMeilisearchHit);
+ totalHits = searchResults.estimatedTotalHits || searchResults.hits.length;
+ totalPages = Math.ceil(totalHits / resultsPerPage);
+ currentPage = page;
+ hasSearched = true;
+ })
+ .catch((err) => {
+ console.error('Meilisearch error:', err);
+ error = 'Search failed. Please try again.';
+ hasSearched = true;
+ })
+ .finally(() => {
+ loading = false;
+ });
+ },
+ 300
+ );
+
+ // Reactive search effect
+ $effect(() => {
+ if (!browser) return;
+
+ currentPage = pageParam;
+
+ // Set loading immediately, then execute debounced search
+ loading = true;
+ debouncedSearch(
+ query,
+ selectedSource,
+ selectedCategory,
+ selectedLanguage,
+ showNSFW,
+ pageParam
+ );
+ });
+
+ // Load filter options from Meilisearch
+ $effect(() => {
+ if (!browser) return;
+ getFilterOptions()
+ .then((options) => {
+ sources = [...new Set(['all', ...options.sources.sort()])];
+ categories = [...new Set(['all', ...options.categories.sort()])];
+ languages = [...new Set(['all', ...options.languages.sort()])];
+ })
+ .catch((err) => {
+ console.error('Failed to load filter options:', err);
+ });
+ });
+</script>
+
+<div class="container">
+ <div class="page-header">
+ <h1>Search Extensions</h1>
+ <a href="/" class="btn btn-secondary header-btn"> Home </a>
+ </div>
+ <div class="search-container">
+ <input
+ type="text"
+ class="search-input"
+ placeholder="Search by name or package..."
+ value={query}
+ oninput={(e) => updateParams({ q: e.currentTarget.value || null })}
+ />
+ </div>
+ <div class="filter-bar">
+ <div class="filter-group">
+ <label for="category-filter">Category:</label>
+ <select
+ id="category-filter"
+ value={selectedCategory}
+ onchange={(e) =>
+ updateParams({
+ category: e.currentTarget.value === 'all' ? null : e.currentTarget.value
+ })}
+ >
+ {#each categories as category (category)}
+ <option value={category}>
+ {category}
+ </option>
+ {/each}
+ </select>
+ </div>
+ <div class="filter-group">
+ <label for="source-filter">Source:</label>
+ <select
+ id="source-filter"
+ value={selectedSource}
+ onchange={(e) => {
+ const val = formatSourceName(e.currentTarget.value);
+ updateParams({ source: val === 'all' ? null : val });
+ }}
+ >
+ {#each sources as source (source)}
+ <option value={source}>
+ {source}
+ </option>
+ {/each}
+ </select>
+ </div>
+ <div class="filter-group">
+ <label for="language-filter">Language:</label>
+ <select
+ id="language-filter"
+ value={selectedLanguage}
+ onchange={(e) =>
+ updateParams({
+ lang: e.currentTarget.value === 'all' ? null : e.currentTarget.value
+ })}
+ >
+ {#each languages as lang (lang)}
+ <option value={lang}>
+ {lang}
+ </option>
+ {/each}
+ </select>
+ </div>
+ <div class="filter-group filter-checkbox">
+ <label>
+ <input
+ type="checkbox"
+ checked={showNSFW}
+ onchange={(e) => updateParams({ nsfw: e.currentTarget.checked ? null : '0' })}
+ />
+ <span>Show NSFW</span>
+ </label>
+ </div>
+ </div>
+ <div class="table-container">
+ <table class="extensions-table">
+ <thead>
+ <tr>
+ <th style="width: 60px;">Icon</th>
+ <th>Name / Package</th>
+ <th>Version / Lang</th>
+ <th style="width: 100px;">Action</th>
+ </tr>
+ </thead>
+ <tbody>
+ {#each results as ext (ext.formattedSourceName + ';' + ext.pkg)}
+ <ExtensionRow extension={ext} repoUrl={ext.repoUrl} />
+ {/each}
+ </tbody>
+ </table>
+ </div>
+ {#if totalPages > 1}
+ {@const startPage = Math.max(1, currentPage - 2)}
+ {@const endPage = Math.min(totalPages, startPage + 4)}
+ <div class="pagination-container">
+ <div class="pagination-info">
+ Showing {Math.min((currentPage - 1) * resultsPerPage + 1, totalHits)} to {Math.min(
+ currentPage * resultsPerPage,
+ totalHits
+ )} of {totalHits} results
+ </div>
+ <div class="pagination-controls">
+ <button
+ class="btn btn-secondary btn-sm"
+ disabled={currentPage === 1}
+ onclick={() => updateParams({ page: '1' })}
+ title="First page"
+ >
+ First
+ </button>
+
+ <button
+ class="btn btn-secondary btn-sm"
+ disabled={currentPage === 1}
+ onclick={() =>
+ updateParams({
+ page: currentPage === 1 ? null : (currentPage - 1).toString()
+ })}
+ >
+ Previous
+ </button>
+
+ <div class="page-numbers">
+ {#each Array.from({ length: endPage - startPage + 1 }, (_, i) => startPage + i) as pageNum}
+ <button
+ class="btn btn-sm {pageNum === currentPage
+ ? 'btn-primary'
+ : 'btn-secondary'}"
+ onclick={() =>
+ updateParams({ page: pageNum === 1 ? null : pageNum.toString() })}
+ >
+ {pageNum}
+ </button>
+ {/each}
+ </div>
+
+ <button
+ class="btn btn-secondary btn-sm"
+ disabled={currentPage === totalPages}
+ onclick={() => updateParams({ page: (currentPage + 1).toString() })}
+ >
+ Next
+ </button>
+
+ <button
+ class="btn btn-secondary btn-sm"
+ disabled={currentPage === totalPages}
+ onclick={() => updateParams({ page: totalPages.toString() })}
+ title="Last page"
+ >
+ Last
+ </button>
+ </div>
+ </div>
+ {/if}
+
+ {#if loading}
+ <div style="text-align: center; padding: 20px;">Loading extensions...</div>
+ {:else if results.length === 0 && hasSearched}
+ <div style="text-align: center; padding: 20px;">No results found.</div>
+ {/if}
+ {#if error}
+ <div style="text-align: center; margin-top: 50px; color: red;">{error}</div>
+ {/if}
+</div>
diff --git a/static/favicon.ico b/static/favicon.ico
new file mode 100644
index 0000000..3169e90
--- /dev/null
+++ b/static/favicon.ico
Binary files differ
diff --git a/static/robots.txt b/static/robots.txt
new file mode 100644
index 0000000..b6dd667
--- /dev/null
+++ b/static/robots.txt
@@ -0,0 +1,3 @@
+# allow crawling everything by default
+User-agent: *
+Disallow:
diff --git a/svelte.config.js b/svelte.config.js
new file mode 100644
index 0000000..ced8939
--- /dev/null
+++ b/svelte.config.js
@@ -0,0 +1,15 @@
+import adapter from '@sveltejs/adapter-static';
+import { vitePreprocess } from '@sveltejs/vite-plugin-svelte';
+
+/** @type {import('@sveltejs/kit').Config} */
+const config = {
+ preprocess: vitePreprocess(),
+ kit: {
+ adapter: adapter({
+ pages: 'dist',
+ assets: 'dist'
+ })
+ }
+};
+
+export default config;
diff --git a/tests/cache-files.test.ts b/tests/cache-files.test.ts
new file mode 100644
index 0000000..0dc740a
--- /dev/null
+++ b/tests/cache-files.test.ts
@@ -0,0 +1,100 @@
+import { test, expect, beforeEach, afterEach } from 'bun:test';
+import { calculateFileChecksum, ensureDir, cleanupDir } from '../scripts/cache/files';
+import { mkdir, rm, writeFile } from 'fs/promises';
+import { join } from 'path';
+import { tmpdir } from 'os';
+
+const testDir = join(tmpdir(), 'bun-test-cache');
+
+beforeEach(async () => {
+ await mkdir(testDir, { recursive: true });
+});
+
+afterEach(async () => {
+ await rm(testDir, { recursive: true, force: true });
+});
+
+test('calculateFileChecksum returns consistent hash for same content', async () => {
+ const filePath = join(testDir, 'test.txt');
+ await writeFile(filePath, 'Hello, World!');
+
+ const hash1 = await calculateFileChecksum(filePath);
+ const hash2 = await calculateFileChecksum(filePath);
+
+ expect(hash1).toBe(hash2);
+ expect(hash1).toHaveLength(64); // SHA256 produces 64 hex characters
+});
+
+test('calculateFileChecksum returns different hashes for different content', async () => {
+ const filePath1 = join(testDir, 'test1.txt');
+ const filePath2 = join(testDir, 'test2.txt');
+
+ await writeFile(filePath1, 'Hello, World!');
+ await writeFile(filePath2, 'Goodbye, World!');
+
+ const hash1 = await calculateFileChecksum(filePath1);
+ const hash2 = await calculateFileChecksum(filePath2);
+
+ expect(hash1).not.toBe(hash2);
+});
+
+test('calculateFileChecksum handles empty file', async () => {
+ const filePath = join(testDir, 'empty.txt');
+ await writeFile(filePath, '');
+
+ const hash = await calculateFileChecksum(filePath);
+ expect(hash).toHaveLength(64);
+});
+
+test('calculateFileChecksum handles larger file', async () => {
+ const filePath = join(testDir, 'large.txt');
+ const content = 'x'.repeat(11 * 1024 * 1024); // ~11MB (over 10MB threshold)
+ await writeFile(filePath, content);
+
+ const hash = await calculateFileChecksum(filePath);
+ expect(hash).toHaveLength(64);
+});
+
+test('ensureDir creates directory if it does not exist', async () => {
+ const newDir = join(testDir, 'new-directory');
+
+ await ensureDir(newDir);
+
+ // Check if directory exists by trying to create a file in it
+ await writeFile(join(newDir, 'test.txt'), 'test');
+ expect(true).toBe(true); // If we get here, directory exists
+});
+
+test('ensureDir does not error if directory already exists', async () => {
+ await ensureDir(testDir);
+ await ensureDir(testDir); // Should not throw
+
+ expect(true).toBe(true);
+});
+
+test('cleanupDir removes directory and all contents', async () => {
+ const subDir = join(testDir, 'subdir');
+ await mkdir(subDir);
+ await writeFile(join(subDir, 'file.txt'), 'content');
+ await writeFile(join(testDir, 'file2.txt'), 'content2');
+
+ await cleanupDir(testDir);
+
+ // Directory should be gone
+ let exists = true;
+ try {
+ await writeFile(join(testDir, 'test.txt'), 'test');
+ } catch {
+ exists = false;
+ }
+ expect(exists).toBe(false);
+});
+
+test('cleanupDir handles non-existent directory', async () => {
+ const nonExistent = join(testDir, 'does-not-exist');
+
+ // Should not throw
+ await cleanupDir(nonExistent);
+
+ expect(true).toBe(true);
+});
diff --git a/tests/cache-format.test.ts b/tests/cache-format.test.ts
new file mode 100644
index 0000000..33eb89b
--- /dev/null
+++ b/tests/cache-format.test.ts
@@ -0,0 +1,27 @@
+import { test, expect } from 'bun:test';
+
+// formatBytes is an internal function in cache.ts
+// This test verifies the expected behavior
+function formatBytes(bytes: number): string {
+ return (bytes / (1024 * 1024)).toFixed(2);
+}
+
+test('formatBytes converts bytes to MB', () => {
+ expect(formatBytes(1024 * 1024)).toBe('1.00');
+ expect(formatBytes(2 * 1024 * 1024)).toBe('2.00');
+ expect(formatBytes(10.5 * 1024 * 1024)).toBe('10.50');
+});
+
+test('formatBytes rounds to 2 decimal places', () => {
+ expect(formatBytes(1024 * 1024 + 1)).toBe('1.00');
+ expect(formatBytes(1.234 * 1024 * 1024)).toBe('1.23');
+});
+
+test('formatBytes handles zero', () => {
+ expect(formatBytes(0)).toBe('0.00');
+});
+
+test('formatBytes handles small values', () => {
+ expect(formatBytes(512 * 1024)).toBe('0.50');
+ expect(formatBytes(1024)).toBe('0.00');
+});
diff --git a/tests/cache-lock.test.ts b/tests/cache-lock.test.ts
new file mode 100644
index 0000000..c67ba01
--- /dev/null
+++ b/tests/cache-lock.test.ts
@@ -0,0 +1,141 @@
+import { test, expect } from 'bun:test';
+import { generateInstanceId } from '../scripts/cache/lock';
+import type { CacheLock } from '../scripts/cache/utils';
+
+test('generateInstanceId returns non-empty string', () => {
+ const id = generateInstanceId();
+ expect(typeof id).toBe('string');
+ expect(id.length).toBeGreaterThan(0);
+});
+
+test('generateInstanceId includes timestamp', () => {
+ const before = Date.now();
+ const id = generateInstanceId();
+ const after = Date.now();
+
+ const timestampPart = id.split('-')[0];
+ const timestamp = parseInt(timestampPart, 10);
+
+ expect(timestamp).toBeGreaterThanOrEqual(before);
+ expect(timestamp).toBeLessThanOrEqual(after);
+});
+
+test('generateInstanceId includes random component', () => {
+ const id1 = generateInstanceId();
+ const id2 = generateInstanceId();
+
+ expect(id1).not.toBe(id2);
+});
+
+test('generateInstanceId format is timestamp-randomstring', () => {
+ const id = generateInstanceId();
+ const parts = id.split('-');
+ expect(parts.length).toBe(2);
+
+ const [timestamp, random] = parts;
+ expect(timestamp).toMatch(/^\d+$/);
+ expect(random).toMatch(/^[a-z0-9]+$/);
+ expect(random.length).toBeGreaterThan(0);
+ expect(random.length).toBeLessThan(10);
+});
+
+// Helper function to test isLockStale logic (since it's private)
+function isLockStale(lock: CacheLock, currentHostname: string): boolean {
+ const lockAge = Date.now() - lock.timestamp;
+ const timeSinceRenewal = lock.renewedAt ? Date.now() - lock.renewedAt : lockAge;
+
+ // Check 1: Timestamp-based staleness (30 minutes)
+ if (timeSinceRenewal > 30 * 60 * 1000) {
+ return true;
+ }
+
+ // Check 2: Process-based staleness (only on same machine)
+ if (lock.hostname === currentHostname) {
+ // For testing, assume process doesn't exist if pid is -1
+ if (lock.pid === -1) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+test('isLockStale returns true for old lock (over 30 minutes)', () => {
+ const lock: CacheLock = {
+ locked: true,
+ timestamp: Date.now() - 31 * 60 * 1000, // 31 minutes ago
+ instance: 'test-instance',
+ ttl: 30 * 60 * 1000,
+ pid: 12345,
+ hostname: 'test-host'
+ };
+
+ expect(isLockStale(lock, 'test-host')).toBe(true);
+});
+
+test('isLockStale returns false for recent lock (under 30 minutes)', () => {
+ const lock: CacheLock = {
+ locked: true,
+ timestamp: Date.now() - 29 * 60 * 1000, // 29 minutes ago
+ instance: 'test-instance',
+ ttl: 30 * 60 * 1000,
+ pid: 12345,
+ hostname: 'test-host'
+ };
+
+ expect(isLockStale(lock, 'test-host')).toBe(false);
+});
+
+test('isLockStale respects renewedAt timestamp', () => {
+ const now = Date.now();
+ const lock: CacheLock = {
+ locked: true,
+ timestamp: now - 40 * 60 * 1000, // 40 minutes ago
+ instance: 'test-instance',
+ ttl: 30 * 60 * 1000,
+ renewedAt: now - 10 * 60 * 1000, // Renewed 10 minutes ago
+ pid: 12345,
+ hostname: 'test-host'
+ };
+
+ expect(isLockStale(lock, 'test-host')).toBe(false);
+});
+
+test('isLockStale returns true when lock on same host but process dead', () => {
+ const lock: CacheLock = {
+ locked: true,
+ timestamp: Date.now() - 10 * 60 * 1000, // 10 minutes ago
+ instance: 'test-instance',
+ ttl: 30 * 60 * 1000,
+ pid: -1, // Simulate dead process
+ hostname: 'test-host'
+ };
+
+ expect(isLockStale(lock, 'test-host')).toBe(true);
+});
+
+test('isLockStale returns false when lock on different host (even if old process)', () => {
+ const lock: CacheLock = {
+ locked: true,
+ timestamp: Date.now() - 10 * 60 * 1000,
+ instance: 'test-instance',
+ ttl: 30 * 60 * 1000,
+ pid: -1,
+ hostname: 'different-host'
+ };
+
+ expect(isLockStale(lock, 'test-host')).toBe(false);
+});
+
+test('isLockStale handles missing renewedAt', () => {
+ const lock: CacheLock = {
+ locked: true,
+ timestamp: Date.now() - 35 * 60 * 1000, // 35 minutes ago
+ instance: 'test-instance',
+ ttl: 30 * 60 * 1000,
+ pid: 12345,
+ hostname: 'test-host'
+ };
+
+ expect(isLockStale(lock, 'test-host')).toBe(true);
+});
diff --git a/tests/cache-manifest.test.ts b/tests/cache-manifest.test.ts
new file mode 100644
index 0000000..d54cd8b
--- /dev/null
+++ b/tests/cache-manifest.test.ts
@@ -0,0 +1,76 @@
+import { test, expect } from 'bun:test';
+import { findCacheByKey, findCacheByPrefix } from '../scripts/cache/manifest';
+import type { CacheManifest } from '../scripts/cache/utils';
+
+test('findCacheByKey returns null when cache not found', () => {
+ const manifest: CacheManifest = {
+ version: 1,
+ caches: [
+ { key: 'cache1.tzst', hash: 'abc123', timestamp: 1000, lastAccessed: 1000 },
+ { key: 'cache2.tzst', hash: 'def456', timestamp: 2000, lastAccessed: 2000 }
+ ]
+ };
+
+ expect(findCacheByKey(manifest, 'cache3.tzst')).toBeNull();
+});
+
+test('findCacheByKey returns matching cache entry', () => {
+ const manifest: CacheManifest = {
+ version: 1,
+ caches: [
+ { key: 'cache1.tzst', hash: 'abc123', timestamp: 1000, lastAccessed: 1000 },
+ { key: 'cache2.tzst', hash: 'def456', timestamp: 2000, lastAccessed: 2000 }
+ ]
+ };
+
+ const result = findCacheByKey(manifest, 'cache1.tzst');
+ expect(result).not.toBeNull();
+ expect(result?.key).toBe('cache1.tzst');
+ expect(result?.hash).toBe('abc123');
+});
+
+test('findCacheByPrefix returns null when no caches match prefix', () => {
+ const manifest: CacheManifest = {
+ version: 1,
+ caches: [
+ { key: 'cache1.tzst', hash: 'abc123', timestamp: 1000, lastAccessed: 1000 },
+ { key: 'cache2.tzst', hash: 'def456', timestamp: 2000, lastAccessed: 2000 }
+ ]
+ };
+
+ expect(findCacheByPrefix(manifest, 'other-')).toBeNull();
+});
+
+test('findCacheByPrefix returns most recently created cache', () => {
+ const manifest: CacheManifest = {
+ version: 1,
+ caches: [
+ { key: 'extensions-abc123.tzst', hash: 'abc123', timestamp: 1000, lastAccessed: 1000 },
+ { key: 'extensions-def456.tzst', hash: 'def456', timestamp: 3000, lastAccessed: 3000 },
+ { key: 'extensions-ghi789.tzst', hash: 'ghi789', timestamp: 2000, lastAccessed: 2000 }
+ ]
+ };
+
+ const result = findCacheByPrefix(manifest, 'extensions-');
+ expect(result).not.toBeNull();
+ expect(result?.key).toBe('extensions-def456.tzst');
+ expect(result?.timestamp).toBe(3000);
+});
+
+test('findCacheByPrefix handles empty caches array', () => {
+ const manifest: CacheManifest = {
+ version: 1,
+ caches: []
+ };
+
+ expect(findCacheByPrefix(manifest, 'extensions-')).toBeNull();
+});
+
+test('findCacheByKey handles empty caches array', () => {
+ const manifest: CacheManifest = {
+ version: 1,
+ caches: []
+ };
+
+ expect(findCacheByKey(manifest, 'cache1.tzst')).toBeNull();
+});
diff --git a/tests/cache-metadata.test.ts b/tests/cache-metadata.test.ts
new file mode 100644
index 0000000..cf725c6
--- /dev/null
+++ b/tests/cache-metadata.test.ts
@@ -0,0 +1,21 @@
+import { test, expect } from 'bun:test';
+
+// The getMetadataKey function is internal to metadata.ts
+// This test verifies the expected behavior/format
+function getMetadataKey(cacheKey: string): string {
+ return `${cacheKey}.meta.json`;
+}
+
+test('getMetadataKey appends .meta.json to cache key', () => {
+ expect(getMetadataKey('cache.tzst')).toBe('cache.tzst.meta.json');
+ expect(getMetadataKey('extensions-abc123.tzst')).toBe('extensions-abc123.tzst.meta.json');
+ expect(getMetadataKey('my-cache.tar.zst')).toBe('my-cache.tar.zst.meta.json');
+});
+
+test('getMetadataKey handles keys with path', () => {
+ expect(getMetadataKey('path/to/cache.tzst')).toBe('path/to/cache.tzst.meta.json');
+});
+
+test('getMetadataKey handles empty string', () => {
+ expect(getMetadataKey('')).toBe('.meta.json');
+});
diff --git a/tests/cache-utils.test.ts b/tests/cache-utils.test.ts
new file mode 100644
index 0000000..cd3beff
--- /dev/null
+++ b/tests/cache-utils.test.ts
@@ -0,0 +1,23 @@
+import { test, expect } from 'bun:test';
+import { generateCacheKey, CACHE_KEY_PREFIX } from '../scripts/cache/utils';
+
+test('generateCacheKey returns key with correct prefix', async () => {
+ const key = await generateCacheKey();
+
+ expect(key).toStartWith(CACHE_KEY_PREFIX);
+ expect(key).toEndWith('.tzst');
+});
+
+test('generateCacheKey produces consistent hash for same content', async () => {
+ const key1 = await generateCacheKey();
+ const key2 = await generateCacheKey();
+
+ expect(key1).toBe(key2);
+});
+
+test('generateCacheKey produces 64-character hash', async () => {
+ const key = await generateCacheKey();
+ const hashPart = key.replace(CACHE_KEY_PREFIX, '').replace('.tzst', '');
+
+ expect(hashPart).toHaveLength(64);
+});
diff --git a/tests/debounce.test.ts b/tests/debounce.test.ts
new file mode 100644
index 0000000..da029a1
--- /dev/null
+++ b/tests/debounce.test.ts
@@ -0,0 +1,60 @@
+import { expect, test } from 'bun:test';
+import { debounce } from '../src/lib/search/debounce';
+
+test('debounce delays function execution', async () => {
+ const results: number[] = [];
+ const debouncedFn = debounce((value: number) => results.push(value), 100);
+
+ debouncedFn(1);
+ debouncedFn(2);
+ debouncedFn(3);
+
+ expect(results.length).toBe(0);
+
+ await Bun.sleep(150);
+ expect(results.length).toBe(1);
+ expect(results[0]).toBe(3);
+});
+
+test('debounce resets timer on repeated calls', async () => {
+ const results: number[] = [];
+ const debouncedFn = debounce((value: number) => results.push(value), 100);
+
+ debouncedFn(1);
+ await Bun.sleep(50);
+ debouncedFn(2);
+ await Bun.sleep(50);
+ debouncedFn(3);
+ await Bun.sleep(50);
+
+ expect(results.length).toBe(0);
+
+ await Bun.sleep(150);
+ expect(results.length).toBe(1);
+ expect(results[0]).toBe(3);
+});
+
+test('debounce allows multiple executions over time', async () => {
+ const results: number[] = [];
+ const debouncedFn = debounce((value: number) => results.push(value), 50);
+
+ debouncedFn(1);
+ await Bun.sleep(100);
+
+ debouncedFn(2);
+ await Bun.sleep(100);
+
+ expect(results.length).toBe(2);
+ expect(results).toEqual([1, 2]);
+});
+
+test('debounce passes arguments correctly', async () => {
+ const results: [string, number][] = [];
+ const debouncedFn = debounce((text: string, num: number) => results.push([text, num]), 50);
+
+ debouncedFn('hello', 42);
+ await Bun.sleep(100);
+
+ expect(results.length).toBe(1);
+ expect(results[0]).toEqual(['hello', 42]);
+});
diff --git a/tests/logger.test.ts b/tests/logger.test.ts
new file mode 100644
index 0000000..e19f466
--- /dev/null
+++ b/tests/logger.test.ts
@@ -0,0 +1,26 @@
+import { expect, test } from 'bun:test';
+
+// Re-implement the helper functions for testing (since they're private in the module)
+function formatTransferStats(bytes: number, elapsedSeconds: number): string {
+ const sizeMB = (bytes / (1024 * 1024)).toFixed(2);
+ const speedMBps = (bytes / (1024 * 1024) / elapsedSeconds).toFixed(2);
+ return `${sizeMB} MB (${speedMBps} MB/s)`;
+}
+
+test('formatTransferStats formats bytes correctly', () => {
+ // 10 MB over 1 second
+ expect(formatTransferStats(10 * 1024 * 1024, 1)).toBe('10.00 MB (10.00 MB/s)');
+
+ // 5.5 MB over 2 seconds (2.75 MB/s)
+ expect(formatTransferStats(5.5 * 1024 * 1024, 2)).toBe('5.50 MB (2.75 MB/s)');
+
+ // 1 KB (0.00 MB)
+ expect(formatTransferStats(1024, 1)).toBe('0.00 MB (0.00 MB/s)');
+});
+
+test('formatTransferStats handles zero elapsed time', () => {
+ // Should handle gracefully (Infinity would be wrong)
+ const bytes = 1024 * 1024; // 1 MB
+ const result = formatTransferStats(bytes, 0);
+ expect(result).toContain('1.00 MB');
+});
diff --git a/tests/search-utils.test.ts b/tests/search-utils.test.ts
new file mode 100644
index 0000000..18d3b45
--- /dev/null
+++ b/tests/search-utils.test.ts
@@ -0,0 +1,24 @@
+import { expect, test } from 'bun:test';
+import { findSourceByFormattedName, formatSourceName } from '../src/lib/search/utils';
+
+test('formatSourceName converts to lowercase and replaces spaces with dots', () => {
+ expect(formatSourceName('Example Source')).toBe('example.source');
+ expect(formatSourceName('Multiple Spaces')).toBe('multiple.spaces');
+ expect(formatSourceName('ALREADY LOWERCASE')).toBe('already.lowercase');
+ expect(formatSourceName('Mixed Case')).toBe('mixed.case');
+});
+
+test('findSourceByFormattedName returns "all" for "all"', () => {
+ expect(findSourceByFormattedName('all', ['Source A', 'Source B'])).toBe('all');
+});
+
+test('findSourceByFormattedName finds matching source', () => {
+ const sources = ['Example Source', 'Another Source', 'Test'];
+ expect(findSourceByFormattedName('example.source', sources)).toBe('Example Source');
+ expect(findSourceByFormattedName('another.source', sources)).toBe('Another Source');
+});
+
+test('findSourceByFormattedName returns "all" when no match found', () => {
+ const sources = ['Example Source', 'Another Source'];
+ expect(findSourceByFormattedName('non.existent', sources)).toBe('all');
+});
diff --git a/tsconfig.json b/tsconfig.json
new file mode 100644
index 0000000..f371425
--- /dev/null
+++ b/tsconfig.json
@@ -0,0 +1,20 @@
+{
+ "extends": "./.svelte-kit/tsconfig.json",
+ "compilerOptions": {
+ "rewriteRelativeImportExtensions": true,
+ "allowJs": true,
+ "checkJs": true,
+ "esModuleInterop": true,
+ "forceConsistentCasingInFileNames": true,
+ "resolveJsonModule": true,
+ "skipLibCheck": true,
+ "sourceMap": true,
+ "strict": true,
+ "moduleResolution": "bundler"
+ }
+ // Path aliases are handled by https://svelte.dev/docs/kit/configuration#alias
+ // except $lib which is handled by https://svelte.dev/docs/kit/configuration#files
+ //
+ // To make changes to top-level options such as include and exclude, we recommend extending
+ // the generated config; see https://svelte.dev/docs/kit/configuration#typescript
+}
diff --git a/vite.config.ts b/vite.config.ts
new file mode 100644
index 0000000..a3eb481
--- /dev/null
+++ b/vite.config.ts
@@ -0,0 +1,6 @@
+import { sveltekit } from '@sveltejs/kit/vite';
+import { defineConfig } from 'vite';
+
+export default defineConfig({
+ plugins: [sveltekit()]
+});
diff --git a/wrangler.toml b/wrangler.toml
new file mode 100644
index 0000000..003b85f
--- /dev/null
+++ b/wrangler.toml
@@ -0,0 +1,5 @@
+name = "x"
+main = "scripts/worker.ts"
+compatibility_date = "2024-11-23"
+
+assets = { directory = "./dist", binding = "ASSETS" } \ No newline at end of file