diff --git a/README.md b/README.md index ed9054d..56e081d 100644 --- a/README.md +++ b/README.md @@ -8,16 +8,19 @@ This is a monorepo containing multiple standalone projects. Each project lives i ```plaintext code-samples/ -├── typesense-angular-search-bar/ # Angular + Typesense search implementation -├── typesense-astro-search/ # Astro + Typesense search implementation -├── typesense-gin-full-text-search/ # Go (Gin) + Typesense backend implementation -├── typesense-next-search-bar/ # Next.js + Typesense search implementation -├── typesense-nuxt-search-bar/ # Nuxt.js + Typesense search implementation -├── typesense-qwik-js-search/ # Qwik + Typesense search implementation -├── typesense-react-native-search-bar/ # React Native + Typesense search implementation -├── typesense-solid-js-search/ # SolidJS + Typesense search implementation -├── typesense-vanilla-js-search/ # Vanilla JS + Typesense search implementation -└── README.md # You are here +├── typesense-angular-search-bar/ # Angular + Typesense search implementation +├── typesense-astro-search/ # Astro + Typesense search implementation +├── typesense-gin-full-text-search/ # Go (Gin) + Typesense backend implementation +├── typesense-next-search-bar/ # Next.js + Typesense search implementation +├── typesense-nuxt-search-bar/ # Nuxt.js + Typesense search implementation +├── typesense-qwik-js-search/ # Qwik + Typesense search implementation +├── typesense-react-native-search-bar/ # React Native + Typesense search implementation +├── typesense-solid-js-search/ # SolidJS + Typesense search implementation +├── typesense-springboot-full-text-search/ # Spring Boot + Typesense backend implementation +├── typesense-node-prisma-full-text-search/ # Node.js (Express) + Typesense + Prisma backend implementation +├── typesense-node-sequelize-full-text-search/ # Node.js (Express) + Typesense + Sequelize backend implementation +├── typesense-vanilla-js-search/ # Vanilla JS + Typesense search implementation +└── README.md # You are here ``` ## Projects @@ -32,6 +35,9 @@ code-samples/ | [typesense-qwik-js-search](./typesense-qwik-js-search) | Qwik | Resumable search bar with real-time search and modern UI | | [typesense-react-native-search-bar](./typesense-react-native-search-bar) | React Native | A mobile search bar with instant search capabilities | | [typesense-solid-js-search](./typesense-solid-js-search) | SolidJS | A modern search bar with instant search capabilities | +| [typesense-springboot-full-text-search](./typesense-springboot-full-text-search) | Spring Boot | Backend API with full-text search using Typesense | +| [typesense-node-prisma-full-text-search](./typesense-node-prisma-full-text-search) | Node.js (Express) + Typesense + Prisma | Backend API with full-text search using Typesense | +| [typesense-node-sequelize-full-text-search](./typesense-node-sequelize-full-text-search) | Node.js (Express) + Typesense + Sequelize | Backend API with full-text search using Typesense | | [typesense-vanilla-js-search](./typesense-vanilla-js-search) | Vanilla JS | A modern search bar with instant search capabilities | ## Getting Started diff --git a/typesense-node-prisma-full-text-search/.env.example b/typesense-node-prisma-full-text-search/.env.example new file mode 100644 index 0000000..9b6d483 --- /dev/null +++ b/typesense-node-prisma-full-text-search/.env.example @@ -0,0 +1,15 @@ +# Server Configuration +PORT=3000 + +# Database Configuration +DB_HOST=localhost +DB_USER=postgres +DB_PASSWORD=password +DB_NAME=typesense_books +DB_PORT=5432 + +# Typesense Configuration +TYPESENSE_HOST=localhost +TYPESENSE_PORT=8108 +TYPESENSE_PROTOCOL=http +TYPESENSE_API_KEY=xyz diff --git a/typesense-node-prisma-full-text-search/.gitignore b/typesense-node-prisma-full-text-search/.gitignore new file mode 100644 index 0000000..126419d --- /dev/null +++ b/typesense-node-prisma-full-text-search/.gitignore @@ -0,0 +1,5 @@ +node_modules +# Keep environment variables out of version control +.env + +/src/generated/prisma diff --git a/typesense-node-prisma-full-text-search/README.md b/typesense-node-prisma-full-text-search/README.md new file mode 100644 index 0000000..652bfc0 --- /dev/null +++ b/typesense-node-prisma-full-text-search/README.md @@ -0,0 +1,231 @@ +# Node.js Express Full-Text Search with Typesense + +A production-ready RESTful search API built with Node.js, Express, PostgreSQL (Prisma), and Typesense. Features full-text search, CRUD operations, real-time async indexing, and background sync workers. + +## Tech Stack + +- Node.js +- Express +- PostgreSQL with Prisma ORM +- Typesense +- TypeScript +- Docker + +## Prerequisites + +- Node.js v18+ installed +- Docker (for Typesense and PostgreSQL) +- Basic knowledge of REST APIs and SQL + +## Quick Start + +### 1. Clone the repository + +```bash +git clone https://github.com/typesense/code-samples.git +cd typesense-node-prisma-full-text-search +``` + +### 2. Install dependencies + +```bash +npm install +``` + +### 3. Start Typesense and PostgreSQL + +Run Typesense and PostgreSQL using Docker: + +```bash +# Start Typesense (replace TYPESENSE_VERSION with the latest from https://typesense.org/docs/guide/install-typesense.html) +docker run -d \ + -p 8108:8108 \ + -v typesense-data:/data \ + typesense/typesense:27.1 \ + --data-dir /data \ + --api-key=xyz \ + --enable-cors + +# Start PostgreSQL +docker run -d \ + -p 5432:5432 \ + -e POSTGRES_USER=postgres \ + -e POSTGRES_PASSWORD=password \ + -e POSTGRES_DB=typesense_books \ + -v postgres-data:/var/lib/postgresql/data \ + postgres:15 +``` + +### 4. Set up environment variables + +Create a `.env` file in the project root by copying `.env.example`: + +```bash +cp .env.example .env +``` + +### 5. Project Structure + +```text +├── prisma/ +│ └── schema.prisma # Prisma schema and model definitions +├── src/ +│ ├── config/ +│ │ ├── database.ts # Prisma Client instantiation +│ │ └── env.ts # Environment variable validation +│ ├── routes/ +│ │ ├── books.ts # CRUD endpoints for books +│ │ └── search.ts # Search and sync endpoints +│ ├── search/ +│ │ ├── client.ts # Typesense client initialization +│ │ ├── collections.ts # Typesense collection schema +│ │ ├── sync.ts # Sync logic (incremental, full, soft delete) +│ │ └── worker.ts # Background sync worker +│ └── server.ts # Main application entry point +├── package.json +├── tsconfig.json +└── .env +``` + +### 6. Database Migrations + +**Development Environment:** +When building out your schema or making changes during development, use the `db push` command. It pushes your schema state directly to the database without generating history: +```bash +npx prisma db push +``` + +**Production Environment:** +For production, you should use Prisma Migrate to generate and apply consistent database migrations. +Generate a migration (run this in dev when ready): +```bash +npx prisma migrate dev --name init_books +``` +Apply migrations safely in production (e.g., during your CI/CD pipeline): +```bash +npx prisma migrate deploy +``` + +### 7. Start the development server + +```bash +npm run dev +``` + +The server will automatically restart when you make changes to any TypeScript file. + +Open [http://localhost:3000](http://localhost:3000) in your browser. + +### 8. API Endpoints + +#### Search + +```bash +GET /search?q= +``` + +Example: + +```bash +curl "http://localhost:3000/search?q=harry" +``` + +#### CRUD Operations + +**Create a book:** + +```bash +POST /books +Content-Type: application/json + +{ + "title": "The Go Programming Language", + "authors": ["Alan Donovan", "Brian Kernighan"], + "publication_year": 2015, + "average_rating": 4.5, + "image_url": "https://example.com/image.jpg", + "ratings_count": 1000 +} +``` + +**Get a book:** + +```bash +GET /books/:id +``` + +**Get all books (with pagination):** + +```bash +GET /books?page=1&limit=10 +``` + +**Update a book:** + +```bash +PUT /books/:id +Content-Type: application/json + +{ + "title": "Updated Title", + "authors": ["Author Name"], + "publication_year": 2024, + "average_rating": 4.8, + "image_url": "https://example.com/updated.jpg", + "ratings_count": 1500 +} +``` + +**Delete a book (soft delete):** + +```bash +DELETE /books/:id +``` + +#### Sync Operations + +**Trigger manual sync:** + +```bash +POST /sync +``` + +**Check sync status:** + +```bash +GET /sync/status +``` + +### 9. How It Works + +#### Architecture + +```plaintext +User Request + ↓ +Express API (CRUD) + ↓ +PostgreSQL (Source of Truth) + ↓ +Async Sync → Typesense (Search Index) + ↑ +Background Worker (Every 60s) +``` + +#### Sync Strategies + +##### 1. Startup Sync (Smart) + +On every server start, the sync worker checks whether the Typesense collection already has documents. If empty, it seeds `lastSyncTime` to zero and runs a full sync. If it has data, it runs an incremental sync since `MAX(updated_at)` of PostgreSQL books table. + +##### 2. Real-time Sync (Async) + +Triggered on Create, Update, Delete operations in the background. + +##### 3. Background Periodic Sync + +Runs every 60 seconds automatically, doing incremental sync. + +##### 4. Manual Sync + +Endpoint: `POST /sync` diff --git a/typesense-node-prisma-full-text-search/package.json b/typesense-node-prisma-full-text-search/package.json new file mode 100644 index 0000000..d7d7c65 --- /dev/null +++ b/typesense-node-prisma-full-text-search/package.json @@ -0,0 +1,30 @@ +{ + "name": "typesense-node-prisma-search-app", + "version": "1.0.0", + "description": "A production-ready RESTful search API built with Node.js, Express, PostgreSQL, and Typesense.", + "main": "dist/server.js", + "scripts": { + "start": "node dist/server.js", + "dev": "ts-node-dev --respawn --transpile-only src/server.ts", + "build": "tsc" + }, + "dependencies": { + "@prisma/adapter-pg": "^7.8.0", + "@prisma/client": "^7.8.0", + "cors": "^2.8.5", + "dotenv": "^16.4.5", + "express": "^4.19.2", + "node-cron": "^3.0.3", + "pg": "^8.20.0", + "typesense": "^1.8.2" + }, + "devDependencies": { + "@types/cors": "^2.8.17", + "@types/express": "^4.17.21", + "@types/node": "^20.12.7", + "@types/node-cron": "^3.0.11", + "prisma": "^7.8.0", + "ts-node-dev": "^2.0.0", + "typescript": "^5.4.5" + } +} diff --git a/typesense-node-prisma-full-text-search/prisma.config.ts b/typesense-node-prisma-full-text-search/prisma.config.ts new file mode 100644 index 0000000..831a20f --- /dev/null +++ b/typesense-node-prisma-full-text-search/prisma.config.ts @@ -0,0 +1,14 @@ +// This file was generated by Prisma, and assumes you have installed the following: +// npm install --save-dev prisma dotenv +import "dotenv/config"; +import { defineConfig } from "prisma/config"; + +export default defineConfig({ + schema: "prisma/schema.prisma", + migrations: { + path: "prisma/migrations", + }, + datasource: { + url: process.env["DATABASE_URL"], + }, +}); diff --git a/typesense-node-prisma-full-text-search/prisma/migrations/20260502073537_init_books/migration.sql b/typesense-node-prisma-full-text-search/prisma/migrations/20260502073537_init_books/migration.sql new file mode 100644 index 0000000..235560e --- /dev/null +++ b/typesense-node-prisma-full-text-search/prisma/migrations/20260502073537_init_books/migration.sql @@ -0,0 +1,15 @@ +-- CreateTable +CREATE TABLE "books" ( + "id" SERIAL NOT NULL, + "title" VARCHAR(255) NOT NULL, + "authors" JSONB NOT NULL DEFAULT '[]', + "publication_year" INTEGER, + "average_rating" DECIMAL(3,2), + "image_url" VARCHAR(255), + "ratings_count" INTEGER, + "created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updated_at" TIMESTAMP(3) NOT NULL, + "deleted_at" TIMESTAMP(3), + + CONSTRAINT "books_pkey" PRIMARY KEY ("id") +); diff --git a/typesense-node-prisma-full-text-search/prisma/migrations/migration_lock.toml b/typesense-node-prisma-full-text-search/prisma/migrations/migration_lock.toml new file mode 100644 index 0000000..044d57c --- /dev/null +++ b/typesense-node-prisma-full-text-search/prisma/migrations/migration_lock.toml @@ -0,0 +1,3 @@ +# Please do not edit this file manually +# It should be added in your version-control system (e.g., Git) +provider = "postgresql" diff --git a/typesense-node-prisma-full-text-search/prisma/schema.prisma b/typesense-node-prisma-full-text-search/prisma/schema.prisma new file mode 100644 index 0000000..9b5e64d --- /dev/null +++ b/typesense-node-prisma-full-text-search/prisma/schema.prisma @@ -0,0 +1,22 @@ +generator client { + provider = "prisma-client-js" +} + +datasource db { + provider = "postgresql" +} + +model Book { + id Int @id @default(autoincrement()) + title String @db.VarChar(255) + authors Json @default("[]") + publication_year Int? + average_rating Decimal? @db.Decimal(3, 2) + image_url String? @db.VarChar(255) + ratings_count Int? + created_at DateTime @default(now()) + updated_at DateTime @updatedAt + deleted_at DateTime? + + @@map("books") +} diff --git a/typesense-node-prisma-full-text-search/src/config/database.ts b/typesense-node-prisma-full-text-search/src/config/database.ts new file mode 100644 index 0000000..3bc2212 --- /dev/null +++ b/typesense-node-prisma-full-text-search/src/config/database.ts @@ -0,0 +1,13 @@ +import { PrismaClient } from '@prisma/client'; +import { PrismaPg } from '@prisma/adapter-pg'; +import { Pool } from 'pg'; + +const connectionString = process.env.DATABASE_URL; + +const pool = new Pool({ connectionString }); +const adapter = new PrismaPg(pool); + +export const prisma = new PrismaClient({ + adapter, + log: ['query', 'info', 'warn', 'error'], +}); diff --git a/typesense-node-prisma-full-text-search/src/config/env.ts b/typesense-node-prisma-full-text-search/src/config/env.ts new file mode 100644 index 0000000..ca37595 --- /dev/null +++ b/typesense-node-prisma-full-text-search/src/config/env.ts @@ -0,0 +1,18 @@ +import dotenv from 'dotenv'; + +dotenv.config(); + +export const env = { + PORT: parseInt(process.env.PORT || '3000', 10), + + DB_HOST: process.env.DB_HOST || 'localhost', + DB_USER: process.env.DB_USER || 'postgres', + DB_PASSWORD: process.env.DB_PASSWORD || 'password', + DB_NAME: process.env.DB_NAME || 'typesense_books', + DB_PORT: parseInt(process.env.DB_PORT || '5432', 10), + + TYPESENSE_HOST: process.env.TYPESENSE_HOST || 'localhost', + TYPESENSE_PORT: parseInt(process.env.TYPESENSE_PORT || '8108', 10), + TYPESENSE_PROTOCOL: process.env.TYPESENSE_PROTOCOL || 'http', + TYPESENSE_API_KEY: process.env.TYPESENSE_API_KEY || 'xyz', +}; diff --git a/typesense-node-prisma-full-text-search/src/routes/books.ts b/typesense-node-prisma-full-text-search/src/routes/books.ts new file mode 100644 index 0000000..5c9702c --- /dev/null +++ b/typesense-node-prisma-full-text-search/src/routes/books.ts @@ -0,0 +1,155 @@ +import { Router, type Request, type Response } from 'express'; +import { prisma } from '../config/database'; +import type { Book } from '@prisma/client'; +import { typesenseClient } from '../search/client'; +import { BOOKS_COLLECTION_NAME } from '../search/collections'; + +const router = Router(); + +// Helper for real-time async sync +const syncBookToTypesense = async (book: Book) => { + try { + // Prisma returns JSON as Prisma.JsonValue, we cast to array for typesense + const authorsArray = Array.isArray(book.authors) ? book.authors : [book.authors]; + + const document = { + id: book.id.toString(), + title: book.title, + authors: authorsArray as string[], + publication_year: book.publication_year || 0, + average_rating: book.average_rating ? Number(book.average_rating) : 0, + image_url: book.image_url || '', + ratings_count: book.ratings_count || 0, + }; + + console.log(`Syncing book ${book.id} to Typesense:`, document.title); + await typesenseClient.collections(BOOKS_COLLECTION_NAME).documents().upsert(document); + console.log(`Successfully synced book ${book.id} to Typesense.`); + } catch (err) { + console.error(`Failed to sync book ${book.id} to Typesense:`, err); + throw err; + } +}; + +const deleteBookFromTypesense = async (id: number) => { + try { + await typesenseClient.collections(BOOKS_COLLECTION_NAME).documents(id.toString()).delete(); + } catch (err) { + console.error(`Failed to delete book ${id} from Typesense`, err); + } +}; + +// GET /books - Get all books with pagination +router.get('/', async (req: Request, res: Response) => { + const page = parseInt(req.query.page as string || '1', 10); + const limit = parseInt(req.query.limit as string || '10', 10); + const offset = (page - 1) * limit; + + try { + const [count, rows] = await Promise.all([ + prisma.book.count({ where: { deleted_at: null } }), + prisma.book.findMany({ + where: { deleted_at: null }, + skip: offset, + take: limit, + orderBy: { id: 'asc' } + }) + ]); + + res.json({ + total: count, + page, + limit, + data: rows + }); + } catch (error) { + console.error(error); + res.status(500).json({ error: 'Failed to fetch books' }); + } +}); + +// GET /books/:id - Get a book +router.get('/:id', async (req: Request, res: Response) => { + try { + const book = await prisma.book.findUnique({ + where: { + id: parseInt(req.params.id), + deleted_at: null + } + }); + + if (!book) { + return res.status(404).json({ error: 'Book not found' }); + } + res.json(book); + } catch (error) { + res.status(500).json({ error: 'Failed to fetch book' }); + } +}); + +// POST /books - Create a book +router.post('/', async (req: Request, res: Response) => { + try { + const book = await prisma.book.create({ + data: req.body + }); + + // Real-time async sync + await syncBookToTypesense(book); + + res.status(201).json(book); + } catch (error) { + res.status(400).json({ error: (error as Error).message }); + } +}); + +// PUT /books/:id - Update a book +router.put('/:id', async (req: Request, res: Response) => { + try { + const bookId = parseInt(req.params.id); + const existingBook = await prisma.book.findUnique({ where: { id: bookId, deleted_at: null } }); + + if (!existingBook) { + return res.status(404).json({ error: 'Book not found' }); + } + + const updatedBook = await prisma.book.update({ + where: { id: bookId }, + data: req.body + }); + + // Real-time async sync + await syncBookToTypesense(updatedBook); + + res.json(updatedBook); + } catch (error) { + res.status(400).json({ error: (error as Error).message }); + } +}); + +// DELETE /books/:id - Delete a book +router.delete('/:id', async (req: Request, res: Response) => { + try { + const bookId = parseInt(req.params.id); + const existingBook = await prisma.book.findUnique({ where: { id: bookId, deleted_at: null } }); + + if (!existingBook) { + return res.status(404).json({ error: 'Book not found' }); + } + + // Soft delete + await prisma.book.update({ + where: { id: bookId }, + data: { deleted_at: new Date() } + }); + + // Real-time async sync + deleteBookFromTypesense(bookId); + + res.status(204).send(); + } catch (error) { + res.status(500).json({ error: (error as Error).message }); + } +}); + +export default router; diff --git a/typesense-node-prisma-full-text-search/src/routes/search.ts b/typesense-node-prisma-full-text-search/src/routes/search.ts new file mode 100644 index 0000000..7daa5eb --- /dev/null +++ b/typesense-node-prisma-full-text-search/src/routes/search.ts @@ -0,0 +1,53 @@ +import { Router, type Request, type Response } from 'express'; +import { typesenseClient } from '../search/client'; +import { BOOKS_COLLECTION_NAME } from '../search/collections'; +import { runFullSync, lastSyncTime } from '../search/sync'; +import { getSyncStatus } from '../search/worker'; + +const router = Router(); + +// GET /search?q= +router.get('/search', async (req: Request, res: Response) => { + const query = req.query.q as string || ''; + + try { + const searchResults = await typesenseClient.collections(BOOKS_COLLECTION_NAME).documents().search({ + q: query, + query_by: 'title,authors', + }); + + res.json({ + query, + found: searchResults.found, + results: searchResults.hits, + facet_counts: searchResults.facet_counts || [], + }); + } catch (_error) { + res.status(500).json({ error: 'Failed to fetch books' }); + } +}); + +// POST /sync - Trigger manual sync +router.post('/sync', async (_req: Request, res: Response) => { + try { + // We run full sync here for manual trigger, but you could also run incremental + await runFullSync(); + + res.json({ + message: 'Sync completed', + syncedAt: lastSyncTime.toISOString() + }); + } catch (_error) { + res.status(500).json({ error: 'Failed to sync books' }); + } +}); + +// GET /sync/status - Check sync status +router.get('/sync/status', (_req: Request, res: Response) => { + res.json({ + lastSyncTime: lastSyncTime.toISOString(), + syncWorkerRunning: getSyncStatus().syncWorkerRunning + }); +}); + +export default router; diff --git a/typesense-node-prisma-full-text-search/src/search/client.ts b/typesense-node-prisma-full-text-search/src/search/client.ts new file mode 100644 index 0000000..97ca2dc --- /dev/null +++ b/typesense-node-prisma-full-text-search/src/search/client.ts @@ -0,0 +1,14 @@ +import { Client } from 'typesense'; +import { env } from '../config/env'; + +export const typesenseClient = new Client({ + nodes: [ + { + host: env.TYPESENSE_HOST, + port: env.TYPESENSE_PORT, + protocol: env.TYPESENSE_PROTOCOL, + }, + ], + apiKey: env.TYPESENSE_API_KEY, + connectionTimeoutSeconds: 5, +}); diff --git a/typesense-node-prisma-full-text-search/src/search/collections.ts b/typesense-node-prisma-full-text-search/src/search/collections.ts new file mode 100644 index 0000000..28d478d --- /dev/null +++ b/typesense-node-prisma-full-text-search/src/search/collections.ts @@ -0,0 +1,35 @@ +import type { CollectionCreateSchema } from 'typesense/lib/Typesense/Collections'; +import { typesenseClient } from './client'; + +export const BOOKS_COLLECTION_NAME = 'books'; + +export const booksCollectionSchema: CollectionCreateSchema = { + name: BOOKS_COLLECTION_NAME, + fields: [ + { name: 'id', type: 'string' }, + { name: 'title', type: 'string' }, + { name: 'authors', type: 'string[]', facet: true }, + { name: 'publication_year', type: 'int32', facet: true, optional: true }, + { name: 'average_rating', type: 'float', facet: true, optional: true }, + { name: 'image_url', type: 'string', optional: true }, + { name: 'ratings_count', type: 'int32', optional: true }, + ], +}; + +export async function initializeTypesense(): Promise { + try { + const collections = await typesenseClient.collections().retrieve(); + const collectionExists = collections.some((c) => c.name === BOOKS_COLLECTION_NAME); + + if (!collectionExists) { + console.log(`Creating collection ${BOOKS_COLLECTION_NAME}...`); + await typesenseClient.collections().create(booksCollectionSchema); + console.log(`Collection ${BOOKS_COLLECTION_NAME} created successfully.`); + } else { + console.log(`Collection ${BOOKS_COLLECTION_NAME} already exists.`); + } + } catch (error) { + console.error('Error initializing Typesense collection:', error); + throw error; + } +} diff --git a/typesense-node-prisma-full-text-search/src/search/sync.ts b/typesense-node-prisma-full-text-search/src/search/sync.ts new file mode 100644 index 0000000..d6173de --- /dev/null +++ b/typesense-node-prisma-full-text-search/src/search/sync.ts @@ -0,0 +1,184 @@ +import { prisma } from '../config/database'; +import { typesenseClient } from './client'; +import { BOOKS_COLLECTION_NAME } from './collections'; +import type { Book } from '@prisma/client'; + +export let lastSyncTime: Date = new Date(0); + +const BATCH_SIZE = 1000; + +const mapBookToTypesense = (b: Book) => ({ + id: b.id.toString(), + title: b.title, + authors: (Array.isArray(b.authors) ? b.authors : [b.authors]) as string[], + publication_year: b.publication_year || 0, + average_rating: b.average_rating ? Number(b.average_rating) : 0, + image_url: b.image_url || '', + ratings_count: b.ratings_count || 0, +}); + +export async function runFullSync() { + console.log('Running full sync...'); + let lastId = 0; + let hasMore = true; + let totalProcessed = 0; + + while (hasMore) { + let books: Book[]; + try { + books = await prisma.book.findMany({ + where: { + id: { gt: lastId }, + deleted_at: null + }, + take: BATCH_SIZE, + orderBy: { id: 'asc' } + }); + } catch (err) { + console.error('Database error during full sync fetching:', err); + break; // Abort this sync run gracefully on DB failure + } + + if (books.length === 0) { + hasMore = false; + break; + } + + lastId = books[books.length - 1].id; + + const documents = books.map(mapBookToTypesense); + + try { + await typesenseClient.collections(BOOKS_COLLECTION_NAME).documents().import(documents, { action: 'upsert' }); + totalProcessed += documents.length; + console.log(`Full sync: Processed ${totalProcessed} books.`); + } catch (err) { + console.error('Error importing documents during full sync', err); + break; + } + } + + // Update lastSyncTime to now + lastSyncTime = new Date(); + console.log('Full sync completed.'); +} + +export async function runIncrementalSync() { + console.log(`Running incremental sync since ${lastSyncTime.toISOString()}...`); + + // 1. Process newly created or updated books in batches + let lastUpsertId = 0; + let hasMoreUpserts = true; + let totalUpserted = 0; + + while (hasMoreUpserts) { + let updatedBooks: Book[]; + try { + updatedBooks = await prisma.book.findMany({ + where: { + updated_at: { gt: lastSyncTime }, + deleted_at: null, + id: { gt: lastUpsertId } + }, + take: BATCH_SIZE, + orderBy: { id: 'asc' } + }); + } catch (err) { + console.error('Database error during incremental sync upsert fetching:', err); + break; + } + + if (updatedBooks.length === 0) { + hasMoreUpserts = false; + break; + } + + lastUpsertId = updatedBooks[updatedBooks.length - 1].id; + const documents = updatedBooks.map(mapBookToTypesense); + + try { + await typesenseClient.collections(BOOKS_COLLECTION_NAME).documents().import(documents, { action: 'upsert' }); + totalUpserted += documents.length; + } catch (err) { + console.error('Error upserting documents in incremental sync', err); + break; + } + } + + if (totalUpserted > 0) { + console.log(`Incremental sync: Upserted ${totalUpserted} books.`); + } + + // 2. Process soft-deleted books in batches + let lastDeleteId = 0; + let hasMoreDeletes = true; + let totalDeleted = 0; + + while (hasMoreDeletes) { + let deletedBooks: Book[]; + try { + deletedBooks = await prisma.book.findMany({ + where: { + deleted_at: { gt: lastSyncTime }, + id: { gt: lastDeleteId } + }, + take: BATCH_SIZE, + orderBy: { id: 'asc' } + }); + } catch (err) { + console.error('Database error during incremental sync delete fetching:', err); + break; + } + + if (deletedBooks.length === 0) { + hasMoreDeletes = false; + break; + } + + lastDeleteId = deletedBooks[deletedBooks.length - 1].id; + const ids = deletedBooks.map(b => b.id.toString()); + + try { + // Bulk delete in Typesense using filter_by + await typesenseClient.collections(BOOKS_COLLECTION_NAME).documents().delete({ + filter_by: `id:=[${ids.join(',')}]` + }); + totalDeleted += deletedBooks.length; + } catch (err) { + console.error('Error deleting documents in incremental sync', err); + break; + } + } + + if (totalDeleted > 0) { + console.log(`Incremental sync: Deleted ${totalDeleted} books from Typesense.`); + } + + lastSyncTime = new Date(); + console.log('Incremental sync completed.'); +} + +export async function determineAndRunStartupSync() { + try { + const searchStats = await typesenseClient.collections(BOOKS_COLLECTION_NAME).retrieve(); + const docCount = searchStats.num_documents; + + if (docCount === 0) { + // Empty Typesense collection, full sync + await runFullSync(); + } else { + // Typesense has data, get latest updated_at from DB + const latestBook = await prisma.book.findFirst({ + orderBy: { updated_at: 'desc' } + }); + + if (latestBook?.updated_at) { + lastSyncTime = latestBook.updated_at; + } + + await runIncrementalSync(); + } + } catch (error) { + console.error('Error during startup sync:', error); + } +} diff --git a/typesense-node-prisma-full-text-search/src/search/worker.ts b/typesense-node-prisma-full-text-search/src/search/worker.ts new file mode 100644 index 0000000..775aa48 --- /dev/null +++ b/typesense-node-prisma-full-text-search/src/search/worker.ts @@ -0,0 +1,31 @@ +import cron from 'node-cron'; +import { runIncrementalSync } from './sync'; + +let isSyncRunning = false; + +export function startBackgroundSyncWorker() { + console.log('Starting background periodic sync worker (every 60s)...'); + + // Runs every minute + cron.schedule('* * * * *', async () => { + if (isSyncRunning) { + console.log('Sync already running, skipping this iteration.'); + return; + } + + isSyncRunning = true; + try { + await runIncrementalSync(); + } catch (error) { + console.error('Error in background sync worker:', error); + } finally { + isSyncRunning = false; + } + }); +} + +export function getSyncStatus() { + return { + syncWorkerRunning: isSyncRunning, + }; +} diff --git a/typesense-node-prisma-full-text-search/src/server.ts b/typesense-node-prisma-full-text-search/src/server.ts new file mode 100644 index 0000000..51d78cb --- /dev/null +++ b/typesense-node-prisma-full-text-search/src/server.ts @@ -0,0 +1,49 @@ +import express from 'express'; +import cors from 'cors'; +import { env } from './config/env'; +import { prisma } from './config/database'; +import { initializeTypesense } from './search/collections'; +import { determineAndRunStartupSync } from './search/sync'; +import { startBackgroundSyncWorker } from './search/worker'; + +import booksRouter from './routes/books'; +import searchRouter from './routes/search'; + +const app = express(); + +app.use(cors()); +app.use(express.json()); + +// Routes +app.use('/books', booksRouter); +app.use('/', searchRouter); + +async function startServer() { + try { + // 1. Connect to PostgreSQL + console.log('Connecting to PostgreSQL database...'); + await prisma.$connect(); + console.log('Database connected.'); + + // 2. Initialize Typesense + console.log('Initializing Typesense...'); + await initializeTypesense(); + + // 3. Run Startup Sync + console.log('Running startup sync...'); + await determineAndRunStartupSync(); + + // 4. Start Background Worker + startBackgroundSyncWorker(); + + // 5. Start Express API + app.listen(env.PORT, () => { + console.log(`Server is running on http://localhost:${env.PORT}`); + }); + } catch (error) { + console.error('Failed to start server:', error); + process.exit(1); + } +} + +startServer(); diff --git a/typesense-node-prisma-full-text-search/tsconfig.json b/typesense-node-prisma-full-text-search/tsconfig.json new file mode 100644 index 0000000..24cf495 --- /dev/null +++ b/typesense-node-prisma-full-text-search/tsconfig.json @@ -0,0 +1,14 @@ +{ + "compilerOptions": { + "target": "es2022", + "module": "commonjs", + "rootDir": "./src", + "outDir": "./dist", + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "strict": true, + "skipLibCheck": true, + "resolveJsonModule": true + }, + "include": ["src/**/*"] +} diff --git a/typesense-node-sequelize-full-text-search/.env.example b/typesense-node-sequelize-full-text-search/.env.example new file mode 100644 index 0000000..9b6d483 --- /dev/null +++ b/typesense-node-sequelize-full-text-search/.env.example @@ -0,0 +1,15 @@ +# Server Configuration +PORT=3000 + +# Database Configuration +DB_HOST=localhost +DB_USER=postgres +DB_PASSWORD=password +DB_NAME=typesense_books +DB_PORT=5432 + +# Typesense Configuration +TYPESENSE_HOST=localhost +TYPESENSE_PORT=8108 +TYPESENSE_PROTOCOL=http +TYPESENSE_API_KEY=xyz diff --git a/typesense-node-sequelize-full-text-search/README.md b/typesense-node-sequelize-full-text-search/README.md new file mode 100644 index 0000000..447e8e8 --- /dev/null +++ b/typesense-node-sequelize-full-text-search/README.md @@ -0,0 +1,212 @@ +# Node.js Express Full-Text Search with Typesense + +A production-ready RESTful search API built with Node.js, Express, PostgreSQL (Sequelize), and Typesense. Features full-text search, CRUD operations, real-time async indexing, and background sync workers. + +## Tech Stack + +- Node.js +- Express +- PostgreSQL with Sequelize +- Typesense +- TypeScript +- Docker + +## Prerequisites + +- Node.js v18+ installed +- Docker (for Typesense and PostgreSQL) +- Basic knowledge of REST APIs and SQL + +## Quick Start + +### 1. Clone the repository + +```bash +git clone https://github.com/typesense/code-samples.git +cd typesense-node-sequelize-search-app +``` + +### 2. Install dependencies + +```bash +npm install +``` + +### 3. Start Typesense and PostgreSQL + +Run Typesense and PostgreSQL using Docker: + +```bash +# Start Typesense (replace TYPESENSE_VERSION with the latest from https://typesense.org/docs/guide/install-typesense.html) +docker run -d \ + -p 8108:8108 \ + -v typesense-data:/data \ + typesense/typesense:27.1 \ + --data-dir /data \ + --api-key=xyz \ + --enable-cors + +# Start PostgreSQL +docker run -d \ + -p 5432:5432 \ + -e POSTGRES_USER=postgres \ + -e POSTGRES_PASSWORD=password \ + -e POSTGRES_DB=typesense_books \ + -v postgres-data:/var/lib/postgresql/data \ + postgres:15 +``` + +### 4. Set up environment variables + +Create a `.env` file in the project root by copying `.env.example`: + +```bash +cp .env.example .env +``` + +### 5. Project Structure + +```text +├── src/ +│ ├── config/ +│ │ ├── database.ts # Sequelize configuration +│ │ └── env.ts # Environment variable validation +│ ├── models/ +│ │ └── Book.ts # Sequelize Book model +│ ├── routes/ +│ │ ├── books.ts # CRUD endpoints for books +│ │ └── search.ts # Search and sync endpoints +│ ├── search/ +│ │ ├── client.ts # Typesense client initialization +│ │ ├── collections.ts # Typesense collection schema +│ │ ├── sync.ts # Sync logic (incremental, full, soft delete) +│ │ └── worker.ts # Background sync worker +│ └── server.ts # Main application entry point +├── package.json +├── tsconfig.json +└── .env +``` + +### 6. Start the development server + +```bash +npm run dev +``` + +The server will automatically restart when you make changes to any TypeScript file. + +Open [http://localhost:3000](http://localhost:3000) in your browser. + +### 7. API Endpoints + +#### Search + +```bash +GET /search?q= +``` + +Example: + +```bash +curl "http://localhost:3000/search?q=harry" +``` + +#### CRUD Operations + +**Create a book:** + +```bash +POST /books +Content-Type: application/json + +{ + "title": "The Go Programming Language", + "authors": ["Alan Donovan", "Brian Kernighan"], + "publication_year": 2015, + "average_rating": 4.5, + "image_url": "https://example.com/image.jpg", + "ratings_count": 1000 +} +``` + +**Get a book:** + +```bash +GET /books/:id +``` + +**Get all books (with pagination):** + +```bash +GET /books?page=1&limit=10 +``` + +**Update a book:** + +```bash +PUT /books/:id +Content-Type: application/json + +{ + "title": "Updated Title", + "authors": ["Author Name"], + "publication_year": 2024, + "average_rating": 4.8, + "image_url": "https://example.com/updated.jpg", + "ratings_count": 1500 +} +``` + +**Delete a book (soft delete):** + +```bash +DELETE /books/:id +``` + +#### Sync Operations + +**Trigger manual sync:** + +```bash +POST /sync +``` + +**Check sync status:** + +```bash +GET /sync/status +``` + +### 8. How It Works + +#### Architecture + +```plaintext +User Request + ↓ +Express API (CRUD) + ↓ +PostgreSQL (Source of Truth) + ↓ +Async Sync → Typesense (Search Index) + ↑ +Background Worker (Every 60s) +``` + +#### Sync Strategies + +##### 1. Startup Sync (Smart) + +On every server start, the sync worker checks whether the Typesense collection already has documents. If empty, it seeds `lastSyncTime` to zero and runs a full sync. If it has data, it runs an incremental sync since `MAX(updated_at)` of PostgreSQL books table. + +##### 2. Real-time Sync (Async) + +Triggered on Create, Update, Delete operations in the background. + +##### 3. Background Periodic Sync + +Runs every 60 seconds automatically, doing incremental sync. + +##### 4. Manual Sync + +Endpoint: `POST /sync` diff --git a/typesense-node-sequelize-full-text-search/package.json b/typesense-node-sequelize-full-text-search/package.json new file mode 100644 index 0000000..73d21b2 --- /dev/null +++ b/typesense-node-sequelize-full-text-search/package.json @@ -0,0 +1,29 @@ +{ + "name": "typesense-node-sequelize-search-app", + "version": "1.0.0", + "description": "A production-ready RESTful search API built with Node.js, Express, PostgreSQL, and Typesense.", + "main": "dist/server.js", + "scripts": { + "start": "node dist/server.js", + "dev": "ts-node-dev --respawn --transpile-only src/server.ts", + "build": "tsc" + }, + "dependencies": { + "cors": "^2.8.5", + "dotenv": "^16.4.5", + "express": "^4.19.2", + "node-cron": "^3.0.3", + "pg": "^8.11.5", + "pg-hstore": "^2.3.4", + "sequelize": "^6.37.3", + "typesense": "^1.8.2" + }, + "devDependencies": { + "@types/cors": "^2.8.17", + "@types/express": "^4.17.21", + "@types/node": "^20.12.7", + "@types/node-cron": "^3.0.11", + "ts-node-dev": "^2.0.0", + "typescript": "^5.4.5" + } +} diff --git a/typesense-node-sequelize-full-text-search/src/config/database.ts b/typesense-node-sequelize-full-text-search/src/config/database.ts new file mode 100644 index 0000000..1f22f20 --- /dev/null +++ b/typesense-node-sequelize-full-text-search/src/config/database.ts @@ -0,0 +1,15 @@ +import { Sequelize } from 'sequelize'; +import { env } from './env'; + +export const sequelize = new Sequelize(env.DB_NAME, env.DB_USER, env.DB_PASSWORD, { + host: env.DB_HOST, + port: env.DB_PORT, + dialect: 'postgres', + logging: console.log, + pool: { + max: 5, + min: 0, + acquire: 30000, + idle: 10000 + } +}); diff --git a/typesense-node-sequelize-full-text-search/src/config/env.ts b/typesense-node-sequelize-full-text-search/src/config/env.ts new file mode 100644 index 0000000..ca37595 --- /dev/null +++ b/typesense-node-sequelize-full-text-search/src/config/env.ts @@ -0,0 +1,18 @@ +import dotenv from 'dotenv'; + +dotenv.config(); + +export const env = { + PORT: parseInt(process.env.PORT || '3000', 10), + + DB_HOST: process.env.DB_HOST || 'localhost', + DB_USER: process.env.DB_USER || 'postgres', + DB_PASSWORD: process.env.DB_PASSWORD || 'password', + DB_NAME: process.env.DB_NAME || 'typesense_books', + DB_PORT: parseInt(process.env.DB_PORT || '5432', 10), + + TYPESENSE_HOST: process.env.TYPESENSE_HOST || 'localhost', + TYPESENSE_PORT: parseInt(process.env.TYPESENSE_PORT || '8108', 10), + TYPESENSE_PROTOCOL: process.env.TYPESENSE_PROTOCOL || 'http', + TYPESENSE_API_KEY: process.env.TYPESENSE_API_KEY || 'xyz', +}; diff --git a/typesense-node-sequelize-full-text-search/src/models/Book.ts b/typesense-node-sequelize-full-text-search/src/models/Book.ts new file mode 100644 index 0000000..4ae8f54 --- /dev/null +++ b/typesense-node-sequelize-full-text-search/src/models/Book.ts @@ -0,0 +1,79 @@ +import { Model, DataTypes, type Optional } from 'sequelize'; +import { sequelize } from '../config/database'; + +export interface BookAttributes { + id: number; + title: string; + authors: string[]; + publication_year: number; + average_rating: number; + image_url: string; + ratings_count: number; + created_at?: Date; + updated_at?: Date; + deleted_at?: Date | null; +} + +export interface BookCreationAttributes extends Optional {} + +export class Book extends Model implements BookAttributes { + declare id: number; + declare title: string; + declare authors: string[]; + declare publication_year: number; + declare average_rating: number; + declare image_url: string; + declare ratings_count: number; + + declare readonly created_at: Date; + declare readonly updated_at: Date; + declare readonly deleted_at: Date | null; +} + +Book.init( + { + id: { + type: DataTypes.INTEGER, + autoIncrement: true, + primaryKey: true, + }, + title: { + type: DataTypes.STRING(255), + allowNull: false, + }, + authors: { + type: DataTypes.JSONB, + allowNull: false, + defaultValue: [], + }, + publication_year: { + type: DataTypes.INTEGER, + allowNull: true, + }, + average_rating: { + type: DataTypes.DECIMAL(3, 2), + allowNull: true, + get() { + const value = this.getDataValue('average_rating'); + return value === null ? null : parseFloat(value as unknown as string); + } + }, + image_url: { + type: DataTypes.STRING(255), + allowNull: true, + }, + ratings_count: { + type: DataTypes.INTEGER, + allowNull: true, + }, + }, + { + sequelize, + tableName: 'books', + timestamps: true, + paranoid: true, // Enables soft deletes (deletedAt) + createdAt: 'created_at', + updatedAt: 'updated_at', + deletedAt: 'deleted_at', + } +); diff --git a/typesense-node-sequelize-full-text-search/src/routes/books.ts b/typesense-node-sequelize-full-text-search/src/routes/books.ts new file mode 100644 index 0000000..17794c0 --- /dev/null +++ b/typesense-node-sequelize-full-text-search/src/routes/books.ts @@ -0,0 +1,127 @@ +import { Router, type Request, type Response } from 'express'; +import { Book } from '../models/Book'; +import { typesenseClient } from '../search/client'; +import { BOOKS_COLLECTION_NAME } from '../search/collections'; + +const router = Router(); + +// Helper for real-time async sync +const syncBookToTypesense = async (book: Book) => { + try { + const document = { + id: book.id.toString(), + title: book.title, + authors: Array.isArray(book.authors) ? book.authors : [book.authors], + publication_year: book.publication_year || 0, + average_rating: typeof book.average_rating === 'number' ? book.average_rating : parseFloat(book.average_rating || '0'), + image_url: book.image_url || '', + ratings_count: book.ratings_count || 0, + }; + + console.log(`Syncing book ${book.id} to Typesense:`, document.title); + await typesenseClient.collections(BOOKS_COLLECTION_NAME).documents().upsert(document); + console.log(`Successfully synced book ${book.id} to Typesense.`); + } catch (err) { + console.error(`Failed to sync book ${book.id} to Typesense:`, err); + throw err; + } +}; + +const deleteBookFromTypesense = async (id: number) => { + try { + await typesenseClient.collections(BOOKS_COLLECTION_NAME).documents(id.toString()).delete(); + } catch (err) { + console.error(`Failed to delete book ${id} from Typesense`, err); + } +}; + +// GET /books - Get all books with pagination +router.get('/', async (req: Request, res: Response) => { + const page = parseInt(req.query.page as string || '1', 10); + const limit = parseInt(req.query.limit as string || '10', 10); + const offset = (page - 1) * limit; + + try { + const { count, rows } = await Book.findAndCountAll({ + limit, + offset, + order: [['id', 'ASC']] + }); + + res.json({ + total: count, + page, + limit, + data: rows + }); + } catch (_error) { + res.status(500).json({ error: 'Failed to fetch books' }); + } +}); + +// GET /books/:id - Get a book +router.get('/:id', async (req: Request, res: Response) => { + try { + const book = await Book.findByPk(req.params.id); + if (!book) { + return res.status(404).json({ error: 'Book not found' }); + } + res.json(book); + } catch (_error) { + res.status(500).json({ error: 'Failed to fetch book' }); + } +}); + +// POST /books - Create a book +router.post('/', async (req: Request, res: Response) => { + try { + const book = await Book.create(req.body); + + // Real-time async sync (now awaited to ensure consistency in tests) + await syncBookToTypesense(book); + + res.status(201).json(book); + } catch (error) { + res.status(400).json({ error: (error as Error).message }); + } +}); + +// PUT /books/:id - Update a book +router.put('/:id', async (req: Request, res: Response) => { + try { + const book = await Book.findByPk(req.params.id); + if (!book) { + return res.status(404).json({ error: 'Book not found' }); + } + + await book.update(req.body); + + // Real-time async sync (now awaited to ensure consistency in tests) + await syncBookToTypesense(book); + + res.json(book); + } catch (error) { + res.status(400).json({ error: (error as Error).message }); + } +}); + +// DELETE /books/:id - Delete a book +router.delete('/:id', async (req: Request, res: Response) => { + try { + const book = await Book.findByPk(req.params.id); + if (!book) { + return res.status(404).json({ error: 'Book not found' }); + } + + await book.destroy(); + + // Real-time async sync + deleteBookFromTypesense(book.id); + + res.status(204).send(); + } catch (error) { + res.status(500).json({ error: (error as Error).message }); + } +}); + +export default router; diff --git a/typesense-node-sequelize-full-text-search/src/routes/search.ts b/typesense-node-sequelize-full-text-search/src/routes/search.ts new file mode 100644 index 0000000..7daa5eb --- /dev/null +++ b/typesense-node-sequelize-full-text-search/src/routes/search.ts @@ -0,0 +1,53 @@ +import { Router, type Request, type Response } from 'express'; +import { typesenseClient } from '../search/client'; +import { BOOKS_COLLECTION_NAME } from '../search/collections'; +import { runFullSync, lastSyncTime } from '../search/sync'; +import { getSyncStatus } from '../search/worker'; + +const router = Router(); + +// GET /search?q= +router.get('/search', async (req: Request, res: Response) => { + const query = req.query.q as string || ''; + + try { + const searchResults = await typesenseClient.collections(BOOKS_COLLECTION_NAME).documents().search({ + q: query, + query_by: 'title,authors', + }); + + res.json({ + query, + found: searchResults.found, + results: searchResults.hits, + facet_counts: searchResults.facet_counts || [], + }); + } catch (_error) { + res.status(500).json({ error: 'Failed to fetch books' }); + } +}); + +// POST /sync - Trigger manual sync +router.post('/sync', async (_req: Request, res: Response) => { + try { + // We run full sync here for manual trigger, but you could also run incremental + await runFullSync(); + + res.json({ + message: 'Sync completed', + syncedAt: lastSyncTime.toISOString() + }); + } catch (_error) { + res.status(500).json({ error: 'Failed to sync books' }); + } +}); + +// GET /sync/status - Check sync status +router.get('/sync/status', (_req: Request, res: Response) => { + res.json({ + lastSyncTime: lastSyncTime.toISOString(), + syncWorkerRunning: getSyncStatus().syncWorkerRunning + }); +}); + +export default router; diff --git a/typesense-node-sequelize-full-text-search/src/search/client.ts b/typesense-node-sequelize-full-text-search/src/search/client.ts new file mode 100644 index 0000000..97ca2dc --- /dev/null +++ b/typesense-node-sequelize-full-text-search/src/search/client.ts @@ -0,0 +1,14 @@ +import { Client } from 'typesense'; +import { env } from '../config/env'; + +export const typesenseClient = new Client({ + nodes: [ + { + host: env.TYPESENSE_HOST, + port: env.TYPESENSE_PORT, + protocol: env.TYPESENSE_PROTOCOL, + }, + ], + apiKey: env.TYPESENSE_API_KEY, + connectionTimeoutSeconds: 5, +}); diff --git a/typesense-node-sequelize-full-text-search/src/search/collections.ts b/typesense-node-sequelize-full-text-search/src/search/collections.ts new file mode 100644 index 0000000..28d478d --- /dev/null +++ b/typesense-node-sequelize-full-text-search/src/search/collections.ts @@ -0,0 +1,35 @@ +import type { CollectionCreateSchema } from 'typesense/lib/Typesense/Collections'; +import { typesenseClient } from './client'; + +export const BOOKS_COLLECTION_NAME = 'books'; + +export const booksCollectionSchema: CollectionCreateSchema = { + name: BOOKS_COLLECTION_NAME, + fields: [ + { name: 'id', type: 'string' }, + { name: 'title', type: 'string' }, + { name: 'authors', type: 'string[]', facet: true }, + { name: 'publication_year', type: 'int32', facet: true, optional: true }, + { name: 'average_rating', type: 'float', facet: true, optional: true }, + { name: 'image_url', type: 'string', optional: true }, + { name: 'ratings_count', type: 'int32', optional: true }, + ], +}; + +export async function initializeTypesense(): Promise { + try { + const collections = await typesenseClient.collections().retrieve(); + const collectionExists = collections.some((c) => c.name === BOOKS_COLLECTION_NAME); + + if (!collectionExists) { + console.log(`Creating collection ${BOOKS_COLLECTION_NAME}...`); + await typesenseClient.collections().create(booksCollectionSchema); + console.log(`Collection ${BOOKS_COLLECTION_NAME} created successfully.`); + } else { + console.log(`Collection ${BOOKS_COLLECTION_NAME} already exists.`); + } + } catch (error) { + console.error('Error initializing Typesense collection:', error); + throw error; + } +} diff --git a/typesense-node-sequelize-full-text-search/src/search/sync.ts b/typesense-node-sequelize-full-text-search/src/search/sync.ts new file mode 100644 index 0000000..3a809b9 --- /dev/null +++ b/typesense-node-sequelize-full-text-search/src/search/sync.ts @@ -0,0 +1,148 @@ +import { Op } from 'sequelize'; +import { Book } from '../models/Book'; +import { typesenseClient } from './client'; +import { BOOKS_COLLECTION_NAME } from './collections'; + +export let lastSyncTime: Date = new Date(0); + +const BATCH_SIZE = 1000; + +export async function runFullSync() { + console.log('Running full sync...'); + let lastId = 0; + let hasMore = true; + let totalProcessed = 0; + + while (hasMore) { + let books: Book[]; + try { + books = await Book.findAll({ + where: { id: { [Op.gt]: lastId } }, + limit: BATCH_SIZE, + order: [['id', 'ASC']], + paranoid: true, // Only fetch active records + }); + } catch (err) { + console.error('Database error during full sync fetching:', err); + break; // Abort this sync run gracefully on DB failure + } + + if (books.length === 0) { + hasMore = false; + break; + } + + lastId = books[books.length - 1].id; + + const documents = books.map((b) => ({ + id: b.id.toString(), + title: b.title, + authors: b.authors, + publication_year: b.publication_year || 0, + average_rating: b.average_rating || 0.0, + image_url: b.image_url || '', + ratings_count: b.ratings_count || 0, + })); + + try { + await typesenseClient.collections(BOOKS_COLLECTION_NAME).documents().import(documents, { action: 'upsert' }); + totalProcessed += documents.length; + console.log(`Full sync: Processed ${totalProcessed} books.`); + } catch (err) { + console.error('Error importing documents during full sync', err); + // We can choose to break or continue here; breaking is safer on Typesense errors + break; + } + } + + // Update lastSyncTime to now + lastSyncTime = new Date(); + console.log('Full sync completed.'); +} + +export async function runIncrementalSync() { + console.log(`Running incremental sync since ${lastSyncTime.toISOString()}...`); + + // 1. Find newly created or updated books + const updatedBooks = await Book.findAll({ + where: { + updated_at: { + [Op.gt]: lastSyncTime, + }, + }, + paranoid: true, // Only active + }); + + if (updatedBooks.length > 0) { + const documents = updatedBooks.map((b) => ({ + id: b.id.toString(), + title: b.title, + authors: b.authors, + publication_year: b.publication_year || 0, + average_rating: b.average_rating || 0.0, + image_url: b.image_url || '', + ratings_count: b.ratings_count || 0, + })); + + try { + await typesenseClient.collections(BOOKS_COLLECTION_NAME).documents().import(documents, { action: 'upsert' }); + console.log(`Incremental sync: Upserted ${documents.length} books.`); + } catch (err) { + console.error('Error upserting documents in incremental sync', err); + } + } + + // 2. Find soft-deleted books + const deletedBooks = await Book.findAll({ + where: { + deleted_at: { + [Op.gt]: lastSyncTime, + }, + }, + paranoid: false, // Include soft-deleted + }); + + if (deletedBooks.length > 0) { + for (const book of deletedBooks) { + try { + await typesenseClient.collections(BOOKS_COLLECTION_NAME).documents(book.id.toString()).delete(); + console.log(`Incremental sync: Deleted book ${book.id} from Typesense.`); + } catch (err) { + // Typesense might return 404 if document doesn't exist, which is fine + const error = err as { httpStatus?: number }; + if (error.httpStatus !== 404) { + console.error(`Error deleting book ${book.id} from Typesense`, err); + } + } + } + } + + lastSyncTime = new Date(); + console.log('Incremental sync completed.'); +} + +export async function determineAndRunStartupSync() { + try { + const searchStats = await typesenseClient.collections(BOOKS_COLLECTION_NAME).retrieve(); + const docCount = searchStats.num_documents; + + if (docCount === 0) { + // Empty Typesense collection, full sync + await runFullSync(); + } else { + // Typesense has data, get latest updated_at from DB + const latestBook = await Book.findOne({ + order: [['updated_at', 'DESC']], + paranoid: false, // Check across all records + }); + + if (latestBook?.updated_at) { + lastSyncTime = latestBook.updated_at; + } + + await runIncrementalSync(); + } + } catch (error) { + console.error('Error during startup sync:', error); + } +} diff --git a/typesense-node-sequelize-full-text-search/src/search/worker.ts b/typesense-node-sequelize-full-text-search/src/search/worker.ts new file mode 100644 index 0000000..775aa48 --- /dev/null +++ b/typesense-node-sequelize-full-text-search/src/search/worker.ts @@ -0,0 +1,31 @@ +import cron from 'node-cron'; +import { runIncrementalSync } from './sync'; + +let isSyncRunning = false; + +export function startBackgroundSyncWorker() { + console.log('Starting background periodic sync worker (every 60s)...'); + + // Runs every minute + cron.schedule('* * * * *', async () => { + if (isSyncRunning) { + console.log('Sync already running, skipping this iteration.'); + return; + } + + isSyncRunning = true; + try { + await runIncrementalSync(); + } catch (error) { + console.error('Error in background sync worker:', error); + } finally { + isSyncRunning = false; + } + }); +} + +export function getSyncStatus() { + return { + syncWorkerRunning: isSyncRunning, + }; +} diff --git a/typesense-node-sequelize-full-text-search/src/server.ts b/typesense-node-sequelize-full-text-search/src/server.ts new file mode 100644 index 0000000..ef8e23c --- /dev/null +++ b/typesense-node-sequelize-full-text-search/src/server.ts @@ -0,0 +1,51 @@ +import express from 'express'; +import cors from 'cors'; +import { env } from './config/env'; +import { sequelize } from './config/database'; +import { initializeTypesense } from './search/collections'; +import { determineAndRunStartupSync } from './search/sync'; +import { startBackgroundSyncWorker } from './search/worker'; + +import booksRouter from './routes/books'; +import searchRouter from './routes/search'; + +const app = express(); + +app.use(cors()); +app.use(express.json()); + +// Routes +app.use('/books', booksRouter); +app.use('/', searchRouter); + +async function startServer() { + try { + // 1. Connect to PostgreSQL + console.log('Connecting to PostgreSQL database...'); + await sequelize.authenticate(); + // In production, use migrations instead of sync() + await sequelize.sync(); + console.log('Database connected and models synced.'); + + // 2. Initialize Typesense + console.log('Initializing Typesense...'); + await initializeTypesense(); + + // 3. Run Startup Sync + console.log('Running startup sync...'); + await determineAndRunStartupSync(); + + // 4. Start Background Worker + startBackgroundSyncWorker(); + + // 5. Start Express API + app.listen(env.PORT, () => { + console.log(`Server is running on http://localhost:${env.PORT}`); + }); + } catch (error) { + console.error('Failed to start server:', error); + process.exit(1); + } +} + +startServer(); diff --git a/typesense-node-sequelize-full-text-search/tsconfig.json b/typesense-node-sequelize-full-text-search/tsconfig.json new file mode 100644 index 0000000..24cf495 --- /dev/null +++ b/typesense-node-sequelize-full-text-search/tsconfig.json @@ -0,0 +1,14 @@ +{ + "compilerOptions": { + "target": "es2022", + "module": "commonjs", + "rootDir": "./src", + "outDir": "./dist", + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "strict": true, + "skipLibCheck": true, + "resolveJsonModule": true + }, + "include": ["src/**/*"] +} diff --git a/typesense-springboot-full-text-search/.env.example b/typesense-springboot-full-text-search/.env.example new file mode 100644 index 0000000..0f84697 --- /dev/null +++ b/typesense-springboot-full-text-search/.env.example @@ -0,0 +1,26 @@ +# Server Configuration +PORT=4000 + +# Database Configuration +DB_HOST=localhost +DB_PORT=5432 +DB_NAME=typesense_books +DB_USER=xxx +DB_PASSWORD=xxx + +# JPA / Hibernate +HIBERNATE_DDL_AUTO=update + +# Typesense Configuration +TYPESENSE_HOST=localhost +TYPESENSE_PORT=8108 +TYPESENSE_PROTOCOL=http +TYPESENSE_API_KEY=xyz +TYPESENSE_COLLECTION_NAME=books +TYPESENSE_CONNECTION_TIMEOUT=2 + +# Sync Configuration +TYPESENSE_SYNC_INTERVAL=60000 +TYPESENSE_SYNC_BATCH_SIZE=1000 +TYPESENSE_SYNC_PAGE_SIZE=1000 +TYPESENSE_SYNC_ENABLE_SOFT_DELETE=true diff --git a/typesense-springboot-full-text-search/.gitattributes b/typesense-springboot-full-text-search/.gitattributes new file mode 100644 index 0000000..3b41682 --- /dev/null +++ b/typesense-springboot-full-text-search/.gitattributes @@ -0,0 +1,2 @@ +/mvnw text eol=lf +*.cmd text eol=crlf diff --git a/typesense-springboot-full-text-search/.gitignore b/typesense-springboot-full-text-search/.gitignore new file mode 100644 index 0000000..667aaef --- /dev/null +++ b/typesense-springboot-full-text-search/.gitignore @@ -0,0 +1,33 @@ +HELP.md +target/ +.mvn/wrapper/maven-wrapper.jar +!**/src/main/**/target/ +!**/src/test/**/target/ + +### STS ### +.apt_generated +.classpath +.factorypath +.project +.settings +.springBeans +.sts4-cache + +### IntelliJ IDEA ### +.idea +*.iws +*.iml +*.ipr + +### NetBeans ### +/nbproject/private/ +/nbbuild/ +/dist/ +/nbdist/ +/.nb-gradle/ +build/ +!**/src/main/**/build/ +!**/src/test/**/build/ + +### VS Code ### +.vscode/ diff --git a/typesense-springboot-full-text-search/.mvn/wrapper/maven-wrapper.properties b/typesense-springboot-full-text-search/.mvn/wrapper/maven-wrapper.properties new file mode 100644 index 0000000..c595b00 --- /dev/null +++ b/typesense-springboot-full-text-search/.mvn/wrapper/maven-wrapper.properties @@ -0,0 +1,3 @@ +wrapperVersion=3.3.4 +distributionType=only-script +distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.14/apache-maven-3.9.14-bin.zip diff --git a/typesense-springboot-full-text-search/README.md b/typesense-springboot-full-text-search/README.md new file mode 100644 index 0000000..13aa5d2 --- /dev/null +++ b/typesense-springboot-full-text-search/README.md @@ -0,0 +1,241 @@ +# Spring Boot Full-Text Search with Typesense + +A production-ready RESTful search API built with Spring Boot, PostgreSQL, and Typesense. Features full-text search, CRUD operations, real-time async indexing, and scheduled background sync. + +## Tech Stack + +- Java 17+ +- Spring Boot 4.x +- PostgreSQL with Spring Data JPA +- Typesense +- Docker + +## Prerequisites + +- Java 17+ installed +- Maven 3.9+ +- Docker (for Typesense and PostgreSQL) + +## Quick Start + +### 1. Clone the repository + +```bash +git clone https://github.com/typesense/code-samples.git +cd typesense-springboot-full-text-search +``` + +### 2. Start Typesense and PostgreSQL + +```bash +# Start Typesense +docker run -d \ + -p 8108:8108 \ + -v typesense-data:/data \ + typesense/typesense:latest \ + --data-dir /data \ + --api-key=xyz \ + --enable-cors + +# Start PostgreSQL +docker run -d \ + -p 5432:5432 \ + -e POSTGRES_USER=postgres \ + -e POSTGRES_PASSWORD=password \ + -e POSTGRES_DB=typesense_books \ + -v postgres-data:/var/lib/postgresql/data \ + postgres:15 +``` + +### 3. Set up environment variables + +Copy the example file and adjust as needed: + +```bash +cp .env.example .env +``` + +Or export the variables directly: + +```bash +export DB_HOST=localhost +export DB_PORT=5432 +export DB_USER=postgres +export DB_PASSWORD=password +export DB_NAME=typesense_books +export TYPESENSE_HOST=localhost +export TYPESENSE_PORT=8108 +export TYPESENSE_PROTOCOL=http +export TYPESENSE_API_KEY=xyz +``` + +### 4. Project Structure + +```text +src/main/java/org/typesense/full_text_search/ +├── FullTextSearchApplication.java # Entry point (@EnableScheduling, @EnableAsync) +├── config/ +│ ├── TypesenseConfig.java # Typesense client bean +│ └── AsyncConfig.java # Thread pool for async Typesense operations +├── model/ +│ └── Book.java # JPA entity with soft delete support +├── repository/ +│ └── BookRepository.java # Spring Data JPA repository +├── service/ +│ ├── BookService.java # Book CRUD operations +│ └── TypesenseService.java # Typesense search, sync, collection management +├── scheduler/ +│ └── TypesenseSyncScheduler.java # @Scheduled periodic sync worker +└── controller/ + ├── BookController.java # CRUD endpoints for books + ├── SearchController.java # Search endpoint + └── SyncController.java # Manual sync + status endpoints +``` + +### 5. Start the development server + +```bash +./mvnw spring-boot:run +``` + +Open [http://localhost:4000](http://localhost:4000). + +### 6. API Endpoints + +#### Search + +```bash +GET /search?q= +``` + +Example: + +```bash +curl "http://localhost:4000/search?q=harry" +``` + +#### CRUD Operations + +**Create a book:** + +```bash +curl -X POST http://localhost:4000/books \ + -H "Content-Type: application/json" \ + -d '{ + "title": "The Go Programming Language", + "authors": ["Alan Donovan", "Brian Kernighan"], + "publicationYear": 2015, + "averageRating": 4.5, + "imageUrl": "https://example.com/image.jpg", + "ratingsCount": 1000 + }' +``` + +**Get a book:** + +```bash +GET /books/:id +``` + +**Get all books (paginated):** + +```bash +GET /books?page=1&page_size=100 +``` + +**Update a book:** + +```bash +PUT /books/:id +``` + +**Delete a book (soft delete):** + +```bash +DELETE /books/:id +``` + +#### Sync Operations + +**Trigger manual sync:** + +```bash +POST /sync +``` + +**Check sync status:** + +```bash +GET /sync/status +``` + +### 7. How It Works + +#### Architecture + +```plaintext +User Request + ↓ +Spring Boot API (CRUD) + ↓ +PostgreSQL (Source of Truth) + ↓ +Async Sync → Typesense (Search Index) + ↑ +@Scheduled Worker (Every 60s) +``` + +#### Sync Strategies + +##### 1. Startup Sync (Smart) + +On application startup (`ApplicationReadyEvent`), the scheduler checks whether the Typesense collection already has documents: + +- **Typesense is empty**: Seeds `lastSyncTime` to epoch and runs a full sync. +- **Typesense already has data**: Seeds `lastSyncTime` from `MAX(updated_at)` of the books table, then runs an incremental sync. + +##### 2. Real-time Sync (Async) + +- Triggered on: Create, Update, Delete operations +- Non-blocking: API responds immediately +- Runs in a dedicated thread pool (`typesenseAsyncExecutor`) +- If it fails, the background worker catches it within 60 seconds + +##### 3. Background Periodic Sync (`@Scheduled`) + +- Runs every 60 seconds (configurable via `typesense.sync.interval-ms`) +- Incremental: Only syncs books with `updated_at > lastSyncTime` +- Handles soft deletes: Removes deleted books from Typesense +- Uses upsert for both inserts and updates + +##### 4. Manual Sync + +- Endpoint: `POST /sync` +- On-demand sync trigger + +#### Configuration + +All sync parameters are configurable in `application.properties`: + +```properties +typesense.sync.interval-ms=60000 +typesense.sync.batch-size=1000 +typesense.sync.page-size=1000 +typesense.sync.enable-soft-delete=true +``` + +### 8. Deployment + +**Environment Variables for Production:** + +```env +DB_HOST=your-postgres-host.com +DB_USER=your-db-user +DB_PASSWORD=your-secure-password +DB_NAME=typesense_books +DB_PORT=5432 +TYPESENSE_HOST=xxx.typesense.net +TYPESENSE_PORT=443 +TYPESENSE_PROTOCOL=https +TYPESENSE_API_KEY=your-production-api-key +``` diff --git a/typesense-springboot-full-text-search/mvnw b/typesense-springboot-full-text-search/mvnw new file mode 100755 index 0000000..bd8896b --- /dev/null +++ b/typesense-springboot-full-text-search/mvnw @@ -0,0 +1,295 @@ +#!/bin/sh +# ---------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ---------------------------------------------------------------------------- + +# ---------------------------------------------------------------------------- +# Apache Maven Wrapper startup batch script, version 3.3.4 +# +# Optional ENV vars +# ----------------- +# JAVA_HOME - location of a JDK home dir, required when download maven via java source +# MVNW_REPOURL - repo url base for downloading maven distribution +# MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven +# MVNW_VERBOSE - true: enable verbose log; debug: trace the mvnw script; others: silence the output +# ---------------------------------------------------------------------------- + +set -euf +[ "${MVNW_VERBOSE-}" != debug ] || set -x + +# OS specific support. +native_path() { printf %s\\n "$1"; } +case "$(uname)" in +CYGWIN* | MINGW*) + [ -z "${JAVA_HOME-}" ] || JAVA_HOME="$(cygpath --unix "$JAVA_HOME")" + native_path() { cygpath --path --windows "$1"; } + ;; +esac + +# set JAVACMD and JAVACCMD +set_java_home() { + # For Cygwin and MinGW, ensure paths are in Unix format before anything is touched + if [ -n "${JAVA_HOME-}" ]; then + if [ -x "$JAVA_HOME/jre/sh/java" ]; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + JAVACCMD="$JAVA_HOME/jre/sh/javac" + else + JAVACMD="$JAVA_HOME/bin/java" + JAVACCMD="$JAVA_HOME/bin/javac" + + if [ ! -x "$JAVACMD" ] || [ ! -x "$JAVACCMD" ]; then + echo "The JAVA_HOME environment variable is not defined correctly, so mvnw cannot run." >&2 + echo "JAVA_HOME is set to \"$JAVA_HOME\", but \"\$JAVA_HOME/bin/java\" or \"\$JAVA_HOME/bin/javac\" does not exist." >&2 + return 1 + fi + fi + else + JAVACMD="$( + 'set' +e + 'unset' -f command 2>/dev/null + 'command' -v java + )" || : + JAVACCMD="$( + 'set' +e + 'unset' -f command 2>/dev/null + 'command' -v javac + )" || : + + if [ ! -x "${JAVACMD-}" ] || [ ! -x "${JAVACCMD-}" ]; then + echo "The java/javac command does not exist in PATH nor is JAVA_HOME set, so mvnw cannot run." >&2 + return 1 + fi + fi +} + +# hash string like Java String::hashCode +hash_string() { + str="${1:-}" h=0 + while [ -n "$str" ]; do + char="${str%"${str#?}"}" + h=$(((h * 31 + $(LC_CTYPE=C printf %d "'$char")) % 4294967296)) + str="${str#?}" + done + printf %x\\n $h +} + +verbose() { :; } +[ "${MVNW_VERBOSE-}" != true ] || verbose() { printf %s\\n "${1-}"; } + +die() { + printf %s\\n "$1" >&2 + exit 1 +} + +trim() { + # MWRAPPER-139: + # Trims trailing and leading whitespace, carriage returns, tabs, and linefeeds. + # Needed for removing poorly interpreted newline sequences when running in more + # exotic environments such as mingw bash on Windows. + printf "%s" "${1}" | tr -d '[:space:]' +} + +scriptDir="$(dirname "$0")" +scriptName="$(basename "$0")" + +# parse distributionUrl and optional distributionSha256Sum, requires .mvn/wrapper/maven-wrapper.properties +while IFS="=" read -r key value; do + case "${key-}" in + distributionUrl) distributionUrl=$(trim "${value-}") ;; + distributionSha256Sum) distributionSha256Sum=$(trim "${value-}") ;; + esac +done <"$scriptDir/.mvn/wrapper/maven-wrapper.properties" +[ -n "${distributionUrl-}" ] || die "cannot read distributionUrl property in $scriptDir/.mvn/wrapper/maven-wrapper.properties" + +case "${distributionUrl##*/}" in +maven-mvnd-*bin.*) + MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ + case "${PROCESSOR_ARCHITECTURE-}${PROCESSOR_ARCHITEW6432-}:$(uname -a)" in + *AMD64:CYGWIN* | *AMD64:MINGW*) distributionPlatform=windows-amd64 ;; + :Darwin*x86_64) distributionPlatform=darwin-amd64 ;; + :Darwin*arm64) distributionPlatform=darwin-aarch64 ;; + :Linux*x86_64*) distributionPlatform=linux-amd64 ;; + *) + echo "Cannot detect native platform for mvnd on $(uname)-$(uname -m), use pure java version" >&2 + distributionPlatform=linux-amd64 + ;; + esac + distributionUrl="${distributionUrl%-bin.*}-$distributionPlatform.zip" + ;; +maven-mvnd-*) MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ ;; +*) MVN_CMD="mvn${scriptName#mvnw}" _MVNW_REPO_PATTERN=/org/apache/maven/ ;; +esac + +# apply MVNW_REPOURL and calculate MAVEN_HOME +# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-,maven-mvnd--}/ +[ -z "${MVNW_REPOURL-}" ] || distributionUrl="$MVNW_REPOURL$_MVNW_REPO_PATTERN${distributionUrl#*"$_MVNW_REPO_PATTERN"}" +distributionUrlName="${distributionUrl##*/}" +distributionUrlNameMain="${distributionUrlName%.*}" +distributionUrlNameMain="${distributionUrlNameMain%-bin}" +MAVEN_USER_HOME="${MAVEN_USER_HOME:-${HOME}/.m2}" +MAVEN_HOME="${MAVEN_USER_HOME}/wrapper/dists/${distributionUrlNameMain-}/$(hash_string "$distributionUrl")" + +exec_maven() { + unset MVNW_VERBOSE MVNW_USERNAME MVNW_PASSWORD MVNW_REPOURL || : + exec "$MAVEN_HOME/bin/$MVN_CMD" "$@" || die "cannot exec $MAVEN_HOME/bin/$MVN_CMD" +} + +if [ -d "$MAVEN_HOME" ]; then + verbose "found existing MAVEN_HOME at $MAVEN_HOME" + exec_maven "$@" +fi + +case "${distributionUrl-}" in +*?-bin.zip | *?maven-mvnd-?*-?*.zip) ;; +*) die "distributionUrl is not valid, must match *-bin.zip or maven-mvnd-*.zip, but found '${distributionUrl-}'" ;; +esac + +# prepare tmp dir +if TMP_DOWNLOAD_DIR="$(mktemp -d)" && [ -d "$TMP_DOWNLOAD_DIR" ]; then + clean() { rm -rf -- "$TMP_DOWNLOAD_DIR"; } + trap clean HUP INT TERM EXIT +else + die "cannot create temp dir" +fi + +mkdir -p -- "${MAVEN_HOME%/*}" + +# Download and Install Apache Maven +verbose "Couldn't find MAVEN_HOME, downloading and installing it ..." +verbose "Downloading from: $distributionUrl" +verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName" + +# select .zip or .tar.gz +if ! command -v unzip >/dev/null; then + distributionUrl="${distributionUrl%.zip}.tar.gz" + distributionUrlName="${distributionUrl##*/}" +fi + +# verbose opt +__MVNW_QUIET_WGET=--quiet __MVNW_QUIET_CURL=--silent __MVNW_QUIET_UNZIP=-q __MVNW_QUIET_TAR='' +[ "${MVNW_VERBOSE-}" != true ] || __MVNW_QUIET_WGET='' __MVNW_QUIET_CURL='' __MVNW_QUIET_UNZIP='' __MVNW_QUIET_TAR=v + +# normalize http auth +case "${MVNW_PASSWORD:+has-password}" in +'') MVNW_USERNAME='' MVNW_PASSWORD='' ;; +has-password) [ -n "${MVNW_USERNAME-}" ] || MVNW_USERNAME='' MVNW_PASSWORD='' ;; +esac + +if [ -z "${MVNW_USERNAME-}" ] && command -v wget >/dev/null; then + verbose "Found wget ... using wget" + wget ${__MVNW_QUIET_WGET:+"$__MVNW_QUIET_WGET"} "$distributionUrl" -O "$TMP_DOWNLOAD_DIR/$distributionUrlName" || die "wget: Failed to fetch $distributionUrl" +elif [ -z "${MVNW_USERNAME-}" ] && command -v curl >/dev/null; then + verbose "Found curl ... using curl" + curl ${__MVNW_QUIET_CURL:+"$__MVNW_QUIET_CURL"} -f -L -o "$TMP_DOWNLOAD_DIR/$distributionUrlName" "$distributionUrl" || die "curl: Failed to fetch $distributionUrl" +elif set_java_home; then + verbose "Falling back to use Java to download" + javaSource="$TMP_DOWNLOAD_DIR/Downloader.java" + targetZip="$TMP_DOWNLOAD_DIR/$distributionUrlName" + cat >"$javaSource" <<-END + public class Downloader extends java.net.Authenticator + { + protected java.net.PasswordAuthentication getPasswordAuthentication() + { + return new java.net.PasswordAuthentication( System.getenv( "MVNW_USERNAME" ), System.getenv( "MVNW_PASSWORD" ).toCharArray() ); + } + public static void main( String[] args ) throws Exception + { + setDefault( new Downloader() ); + java.nio.file.Files.copy( java.net.URI.create( args[0] ).toURL().openStream(), java.nio.file.Paths.get( args[1] ).toAbsolutePath().normalize() ); + } + } + END + # For Cygwin/MinGW, switch paths to Windows format before running javac and java + verbose " - Compiling Downloader.java ..." + "$(native_path "$JAVACCMD")" "$(native_path "$javaSource")" || die "Failed to compile Downloader.java" + verbose " - Running Downloader.java ..." + "$(native_path "$JAVACMD")" -cp "$(native_path "$TMP_DOWNLOAD_DIR")" Downloader "$distributionUrl" "$(native_path "$targetZip")" +fi + +# If specified, validate the SHA-256 sum of the Maven distribution zip file +if [ -n "${distributionSha256Sum-}" ]; then + distributionSha256Result=false + if [ "$MVN_CMD" = mvnd.sh ]; then + echo "Checksum validation is not supported for maven-mvnd." >&2 + echo "Please disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2 + exit 1 + elif command -v sha256sum >/dev/null; then + if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | sha256sum -c - >/dev/null 2>&1; then + distributionSha256Result=true + fi + elif command -v shasum >/dev/null; then + if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | shasum -a 256 -c >/dev/null 2>&1; then + distributionSha256Result=true + fi + else + echo "Checksum validation was requested but neither 'sha256sum' or 'shasum' are available." >&2 + echo "Please install either command, or disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2 + exit 1 + fi + if [ $distributionSha256Result = false ]; then + echo "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised." >&2 + echo "If you updated your Maven version, you need to update the specified distributionSha256Sum property." >&2 + exit 1 + fi +fi + +# unzip and move +if command -v unzip >/dev/null; then + unzip ${__MVNW_QUIET_UNZIP:+"$__MVNW_QUIET_UNZIP"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -d "$TMP_DOWNLOAD_DIR" || die "failed to unzip" +else + tar xzf${__MVNW_QUIET_TAR:+"$__MVNW_QUIET_TAR"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -C "$TMP_DOWNLOAD_DIR" || die "failed to untar" +fi + +# Find the actual extracted directory name (handles snapshots where filename != directory name) +actualDistributionDir="" + +# First try the expected directory name (for regular distributions) +if [ -d "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" ]; then + if [ -f "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain/bin/$MVN_CMD" ]; then + actualDistributionDir="$distributionUrlNameMain" + fi +fi + +# If not found, search for any directory with the Maven executable (for snapshots) +if [ -z "$actualDistributionDir" ]; then + # enable globbing to iterate over items + set +f + for dir in "$TMP_DOWNLOAD_DIR"/*; do + if [ -d "$dir" ]; then + if [ -f "$dir/bin/$MVN_CMD" ]; then + actualDistributionDir="$(basename "$dir")" + break + fi + fi + done + set -f +fi + +if [ -z "$actualDistributionDir" ]; then + verbose "Contents of $TMP_DOWNLOAD_DIR:" + verbose "$(ls -la "$TMP_DOWNLOAD_DIR")" + die "Could not find Maven distribution directory in extracted archive" +fi + +verbose "Found extracted Maven distribution directory: $actualDistributionDir" +printf %s\\n "$distributionUrl" >"$TMP_DOWNLOAD_DIR/$actualDistributionDir/mvnw.url" +mv -- "$TMP_DOWNLOAD_DIR/$actualDistributionDir" "$MAVEN_HOME" || [ -d "$MAVEN_HOME" ] || die "fail to move MAVEN_HOME" + +clean || : +exec_maven "$@" diff --git a/typesense-springboot-full-text-search/mvnw.cmd b/typesense-springboot-full-text-search/mvnw.cmd new file mode 100644 index 0000000..92450f9 --- /dev/null +++ b/typesense-springboot-full-text-search/mvnw.cmd @@ -0,0 +1,189 @@ +<# : batch portion +@REM ---------------------------------------------------------------------------- +@REM Licensed to the Apache Software Foundation (ASF) under one +@REM or more contributor license agreements. See the NOTICE file +@REM distributed with this work for additional information +@REM regarding copyright ownership. The ASF licenses this file +@REM to you under the Apache License, Version 2.0 (the +@REM "License"); you may not use this file except in compliance +@REM with the License. You may obtain a copy of the License at +@REM +@REM http://www.apache.org/licenses/LICENSE-2.0 +@REM +@REM Unless required by applicable law or agreed to in writing, +@REM software distributed under the License is distributed on an +@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +@REM KIND, either express or implied. See the License for the +@REM specific language governing permissions and limitations +@REM under the License. +@REM ---------------------------------------------------------------------------- + +@REM ---------------------------------------------------------------------------- +@REM Apache Maven Wrapper startup batch script, version 3.3.4 +@REM +@REM Optional ENV vars +@REM MVNW_REPOURL - repo url base for downloading maven distribution +@REM MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven +@REM MVNW_VERBOSE - true: enable verbose log; others: silence the output +@REM ---------------------------------------------------------------------------- + +@IF "%__MVNW_ARG0_NAME__%"=="" (SET __MVNW_ARG0_NAME__=%~nx0) +@SET __MVNW_CMD__= +@SET __MVNW_ERROR__= +@SET __MVNW_PSMODULEP_SAVE=%PSModulePath% +@SET PSModulePath= +@FOR /F "usebackq tokens=1* delims==" %%A IN (`powershell -noprofile "& {$scriptDir='%~dp0'; $script='%__MVNW_ARG0_NAME__%'; icm -ScriptBlock ([Scriptblock]::Create((Get-Content -Raw '%~f0'))) -NoNewScope}"`) DO @( + IF "%%A"=="MVN_CMD" (set __MVNW_CMD__=%%B) ELSE IF "%%B"=="" (echo %%A) ELSE (echo %%A=%%B) +) +@SET PSModulePath=%__MVNW_PSMODULEP_SAVE% +@SET __MVNW_PSMODULEP_SAVE= +@SET __MVNW_ARG0_NAME__= +@SET MVNW_USERNAME= +@SET MVNW_PASSWORD= +@IF NOT "%__MVNW_CMD__%"=="" ("%__MVNW_CMD__%" %*) +@echo Cannot start maven from wrapper >&2 && exit /b 1 +@GOTO :EOF +: end batch / begin powershell #> + +$ErrorActionPreference = "Stop" +if ($env:MVNW_VERBOSE -eq "true") { + $VerbosePreference = "Continue" +} + +# calculate distributionUrl, requires .mvn/wrapper/maven-wrapper.properties +$distributionUrl = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionUrl +if (!$distributionUrl) { + Write-Error "cannot read distributionUrl property in $scriptDir/.mvn/wrapper/maven-wrapper.properties" +} + +switch -wildcard -casesensitive ( $($distributionUrl -replace '^.*/','') ) { + "maven-mvnd-*" { + $USE_MVND = $true + $distributionUrl = $distributionUrl -replace '-bin\.[^.]*$',"-windows-amd64.zip" + $MVN_CMD = "mvnd.cmd" + break + } + default { + $USE_MVND = $false + $MVN_CMD = $script -replace '^mvnw','mvn' + break + } +} + +# apply MVNW_REPOURL and calculate MAVEN_HOME +# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-,maven-mvnd--}/ +if ($env:MVNW_REPOURL) { + $MVNW_REPO_PATTERN = if ($USE_MVND -eq $False) { "/org/apache/maven/" } else { "/maven/mvnd/" } + $distributionUrl = "$env:MVNW_REPOURL$MVNW_REPO_PATTERN$($distributionUrl -replace "^.*$MVNW_REPO_PATTERN",'')" +} +$distributionUrlName = $distributionUrl -replace '^.*/','' +$distributionUrlNameMain = $distributionUrlName -replace '\.[^.]*$','' -replace '-bin$','' + +$MAVEN_M2_PATH = "$HOME/.m2" +if ($env:MAVEN_USER_HOME) { + $MAVEN_M2_PATH = "$env:MAVEN_USER_HOME" +} + +if (-not (Test-Path -Path $MAVEN_M2_PATH)) { + New-Item -Path $MAVEN_M2_PATH -ItemType Directory | Out-Null +} + +$MAVEN_WRAPPER_DISTS = $null +if ((Get-Item $MAVEN_M2_PATH).Target[0] -eq $null) { + $MAVEN_WRAPPER_DISTS = "$MAVEN_M2_PATH/wrapper/dists" +} else { + $MAVEN_WRAPPER_DISTS = (Get-Item $MAVEN_M2_PATH).Target[0] + "/wrapper/dists" +} + +$MAVEN_HOME_PARENT = "$MAVEN_WRAPPER_DISTS/$distributionUrlNameMain" +$MAVEN_HOME_NAME = ([System.Security.Cryptography.SHA256]::Create().ComputeHash([byte[]][char[]]$distributionUrl) | ForEach-Object {$_.ToString("x2")}) -join '' +$MAVEN_HOME = "$MAVEN_HOME_PARENT/$MAVEN_HOME_NAME" + +if (Test-Path -Path "$MAVEN_HOME" -PathType Container) { + Write-Verbose "found existing MAVEN_HOME at $MAVEN_HOME" + Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD" + exit $? +} + +if (! $distributionUrlNameMain -or ($distributionUrlName -eq $distributionUrlNameMain)) { + Write-Error "distributionUrl is not valid, must end with *-bin.zip, but found $distributionUrl" +} + +# prepare tmp dir +$TMP_DOWNLOAD_DIR_HOLDER = New-TemporaryFile +$TMP_DOWNLOAD_DIR = New-Item -Itemtype Directory -Path "$TMP_DOWNLOAD_DIR_HOLDER.dir" +$TMP_DOWNLOAD_DIR_HOLDER.Delete() | Out-Null +trap { + if ($TMP_DOWNLOAD_DIR.Exists) { + try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null } + catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" } + } +} + +New-Item -Itemtype Directory -Path "$MAVEN_HOME_PARENT" -Force | Out-Null + +# Download and Install Apache Maven +Write-Verbose "Couldn't find MAVEN_HOME, downloading and installing it ..." +Write-Verbose "Downloading from: $distributionUrl" +Write-Verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName" + +$webclient = New-Object System.Net.WebClient +if ($env:MVNW_USERNAME -and $env:MVNW_PASSWORD) { + $webclient.Credentials = New-Object System.Net.NetworkCredential($env:MVNW_USERNAME, $env:MVNW_PASSWORD) +} +[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 +$webclient.DownloadFile($distributionUrl, "$TMP_DOWNLOAD_DIR/$distributionUrlName") | Out-Null + +# If specified, validate the SHA-256 sum of the Maven distribution zip file +$distributionSha256Sum = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionSha256Sum +if ($distributionSha256Sum) { + if ($USE_MVND) { + Write-Error "Checksum validation is not supported for maven-mvnd. `nPlease disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." + } + Import-Module $PSHOME\Modules\Microsoft.PowerShell.Utility -Function Get-FileHash + if ((Get-FileHash "$TMP_DOWNLOAD_DIR/$distributionUrlName" -Algorithm SHA256).Hash.ToLower() -ne $distributionSha256Sum) { + Write-Error "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised. If you updated your Maven version, you need to update the specified distributionSha256Sum property." + } +} + +# unzip and move +Expand-Archive "$TMP_DOWNLOAD_DIR/$distributionUrlName" -DestinationPath "$TMP_DOWNLOAD_DIR" | Out-Null + +# Find the actual extracted directory name (handles snapshots where filename != directory name) +$actualDistributionDir = "" + +# First try the expected directory name (for regular distributions) +$expectedPath = Join-Path "$TMP_DOWNLOAD_DIR" "$distributionUrlNameMain" +$expectedMvnPath = Join-Path "$expectedPath" "bin/$MVN_CMD" +if ((Test-Path -Path $expectedPath -PathType Container) -and (Test-Path -Path $expectedMvnPath -PathType Leaf)) { + $actualDistributionDir = $distributionUrlNameMain +} + +# If not found, search for any directory with the Maven executable (for snapshots) +if (!$actualDistributionDir) { + Get-ChildItem -Path "$TMP_DOWNLOAD_DIR" -Directory | ForEach-Object { + $testPath = Join-Path $_.FullName "bin/$MVN_CMD" + if (Test-Path -Path $testPath -PathType Leaf) { + $actualDistributionDir = $_.Name + } + } +} + +if (!$actualDistributionDir) { + Write-Error "Could not find Maven distribution directory in extracted archive" +} + +Write-Verbose "Found extracted Maven distribution directory: $actualDistributionDir" +Rename-Item -Path "$TMP_DOWNLOAD_DIR/$actualDistributionDir" -NewName $MAVEN_HOME_NAME | Out-Null +try { + Move-Item -Path "$TMP_DOWNLOAD_DIR/$MAVEN_HOME_NAME" -Destination $MAVEN_HOME_PARENT | Out-Null +} catch { + if (! (Test-Path -Path "$MAVEN_HOME" -PathType Container)) { + Write-Error "fail to move MAVEN_HOME" + } +} finally { + try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null } + catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" } +} + +Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD" diff --git a/typesense-springboot-full-text-search/pom.xml b/typesense-springboot-full-text-search/pom.xml new file mode 100644 index 0000000..98c1710 --- /dev/null +++ b/typesense-springboot-full-text-search/pom.xml @@ -0,0 +1,127 @@ + + + 4.0.0 + + org.springframework.boot + spring-boot-starter-parent + 4.0.5 + + + org.typesense + full-text-search + 0.0.1-SNAPSHOT + + + + + + + + + + + + + + + + + 17 + + + + org.springframework.boot + spring-boot-starter-data-jpa + + + org.springframework.boot + spring-boot-starter-webmvc + + + + org.typesense + typesense-java + 1.3.0 + + + org.postgresql + postgresql + runtime + + + org.projectlombok + lombok + true + + + com.h2database + h2 + test + + + org.springframework.boot + spring-boot-starter-test + test + + + io.github.cdimascio + dotenv-java + 3.0.0 + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + org.projectlombok + lombok + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + + default-compile + compile + + compile + + + + + org.projectlombok + lombok + + + + + + default-testCompile + test-compile + + testCompile + + + + + org.projectlombok + lombok + + + + + + + + + + diff --git a/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/FullTextSearchApplication.java b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/FullTextSearchApplication.java new file mode 100644 index 0000000..1a63fda --- /dev/null +++ b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/FullTextSearchApplication.java @@ -0,0 +1,25 @@ +package org.typesense.full_text_search; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.scheduling.annotation.EnableAsync; +import org.springframework.scheduling.annotation.EnableScheduling; +import org.typesense.full_text_search.config.DatabaseInitializer; + +import io.github.cdimascio.dotenv.Dotenv; + +@SpringBootApplication +@EnableScheduling +@EnableAsync +public class FullTextSearchApplication { + + public static void main(String[] args) { + // Load .env variables into system properties for Spring Boot to use + Dotenv dotenv = Dotenv.configure().ignoreIfMissing().load(); + dotenv.entries().forEach(entry -> System.setProperty(entry.getKey(), entry.getValue())); + + DatabaseInitializer.ensureDatabaseExists(); + SpringApplication.run(FullTextSearchApplication.class, args); + } + +} diff --git a/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/config/AsyncConfig.java b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/config/AsyncConfig.java new file mode 100644 index 0000000..e6b8ee7 --- /dev/null +++ b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/config/AsyncConfig.java @@ -0,0 +1,22 @@ +package org.typesense.full_text_search.config; + +import java.util.concurrent.Executor; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; + +@Configuration +public class AsyncConfig { + + @Bean(name = "typesenseAsyncExecutor") + public Executor typesenseAsyncExecutor() { + ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor(); + executor.setCorePoolSize(2); + executor.setMaxPoolSize(4); + executor.setQueueCapacity(100); + executor.setThreadNamePrefix("typesense-async-"); + executor.initialize(); + return executor; + } +} diff --git a/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/config/DatabaseInitializer.java b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/config/DatabaseInitializer.java new file mode 100644 index 0000000..6a571cf --- /dev/null +++ b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/config/DatabaseInitializer.java @@ -0,0 +1,55 @@ +package org.typesense.full_text_search.config; + +import java.io.InputStream; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.Statement; +import java.util.Properties; + +public class DatabaseInitializer { + + private DatabaseInitializer() { + } + + public static void ensureDatabaseExists() { + Properties props = new Properties(); + try (InputStream is = DatabaseInitializer.class.getClassLoader() + .getResourceAsStream("application.properties")) { + if (is != null) { + props.load(is); + } + } catch (Exception e) { + System.err.println("Could not load application.properties: " + e.getMessage()); + return; + } + + String url = props.getProperty("spring.datasource.url"); + String username = props.getProperty("spring.datasource.username"); + String password = props.getProperty("spring.datasource.password"); + + if (url == null || !url.contains("postgresql")) return; + + String dbName = extractDatabaseName(url); + String baseUrl = url.substring(0, url.lastIndexOf('/')) + "/postgres"; + + try (Connection conn = DriverManager.getConnection(baseUrl, username, password); + Statement stmt = conn.createStatement()) { + + ResultSet rs = stmt.executeQuery( + "SELECT 1 FROM pg_database WHERE datname = '" + dbName + "'"); + + if (!rs.next()) { + stmt.execute("CREATE DATABASE " + dbName); + System.out.println("Database '" + dbName + "' created successfully"); + } + } catch (Exception e) { + System.err.println("Failed to create database '" + dbName + "': " + e.getMessage()); + } + } + + private static String extractDatabaseName(String url) { + String withoutParams = url.contains("?") ? url.substring(0, url.indexOf('?')) : url; + return withoutParams.substring(withoutParams.lastIndexOf('/') + 1); + } +} diff --git a/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/config/TypesenseConfig.java b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/config/TypesenseConfig.java new file mode 100644 index 0000000..0a9db11 --- /dev/null +++ b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/config/TypesenseConfig.java @@ -0,0 +1,40 @@ +package org.typesense.full_text_search.config; + +import java.time.Duration; +import java.util.List; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.typesense.api.Client; +import org.typesense.resources.Node; + +@Configuration +public class TypesenseConfig { + + @Value("${typesense.protocol}") + private String protocol; + + @Value("${typesense.host}") + private String host; + + @Value("${typesense.port}") + private String port; + + @Value("${typesense.api-key}") + private String apiKey; + + @Value("${typesense.connection-timeout-seconds}") + private int connectionTimeoutSeconds; + + @Bean + public Client typesenseClient() { + Node node = new Node(protocol, host, port); + org.typesense.api.Configuration configuration = new org.typesense.api.Configuration( + List.of(node), + Duration.ofSeconds(connectionTimeoutSeconds), + apiKey + ); + return new Client(configuration); + } +} diff --git a/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/config/WebConfig.java b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/config/WebConfig.java new file mode 100644 index 0000000..2f8d595 --- /dev/null +++ b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/config/WebConfig.java @@ -0,0 +1,18 @@ +package org.typesense.full_text_search.config; + +import org.springframework.context.annotation.Configuration; +import org.springframework.web.servlet.config.annotation.CorsRegistry; +import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; + +@Configuration +public class WebConfig implements WebMvcConfigurer { + + @Override + public void addCorsMappings(CorsRegistry registry) { + registry.addMapping("/**") + .allowedOrigins("*") + .allowedMethods("GET", "POST", "PUT", "DELETE", "OPTIONS") + .allowedHeaders("Origin", "Content-Type", "Accept", "Authorization") + .exposedHeaders("Content-Length"); + } +} diff --git a/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/controller/BookController.java b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/controller/BookController.java new file mode 100644 index 0000000..56f2225 --- /dev/null +++ b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/controller/BookController.java @@ -0,0 +1,99 @@ +package org.typesense.full_text_search.controller; + +import java.util.Map; + +import org.springframework.data.domain.Page; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.PutMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; +import org.typesense.full_text_search.model.Book; +import org.typesense.full_text_search.service.BookService; +import org.typesense.full_text_search.service.TypesenseService; + +@RestController +@RequestMapping("/books") +public class BookController { + + private final BookService bookService; + private final TypesenseService typesenseService; + + public BookController(BookService bookService, TypesenseService typesenseService) { + this.bookService = bookService; + this.typesenseService = typesenseService; + } + + @PostMapping + public ResponseEntity> createBook(@RequestBody Book book) { + Book saved = bookService.save(book); + typesenseService.syncBookAsync(saved); + return ResponseEntity.status(HttpStatus.CREATED).body(Map.of( + "message", "Book created successfully", + "book", saved + )); + } + + @GetMapping("/{id}") + public ResponseEntity> getBook(@PathVariable Long id) { + return bookService.findById(id) + .map(book -> ResponseEntity.ok(Map.of("book", book))) + .orElse(ResponseEntity.status(HttpStatus.NOT_FOUND) + .body(Map.of("error", "Book not found"))); + } + + @GetMapping + public ResponseEntity> getAllBooks( + @RequestParam(defaultValue = "1") int page, + @RequestParam(name = "page_size", defaultValue = "100") int pageSize) { + + Page books = bookService.findAll(page, pageSize); + return ResponseEntity.ok(Map.of( + "count", books.getNumberOfElements(), + "total", books.getTotalElements(), + "page", page, + "page_size", pageSize, + "books", books.getContent() + )); + } + + @PutMapping("/{id}") + public ResponseEntity> updateBook(@PathVariable Long id, @RequestBody Book updates) { + return bookService.findById(id) + .map(existing -> { + if (updates.getTitle() != null) existing.setTitle(updates.getTitle()); + if (updates.getAuthors() != null) existing.setAuthors(updates.getAuthors()); + if (updates.getPublicationYear() != null) existing.setPublicationYear(updates.getPublicationYear()); + if (updates.getAverageRating() != null) existing.setAverageRating(updates.getAverageRating()); + if (updates.getImageUrl() != null) existing.setImageUrl(updates.getImageUrl()); + if (updates.getRatingsCount() != null) existing.setRatingsCount(updates.getRatingsCount()); + + Book saved = bookService.save(existing); + typesenseService.syncBookAsync(saved); + return ResponseEntity.ok(Map.of( + "message", "Book updated successfully", + "book", saved + )); + }) + .orElse(ResponseEntity.status(HttpStatus.NOT_FOUND) + .body(Map.of("error", "Book not found"))); + } + + @DeleteMapping("/{id}") + public ResponseEntity> deleteBook(@PathVariable Long id) { + return bookService.findById(id) + .map(book -> { + bookService.deleteById(id); + typesenseService.deleteBookAsync(id); + return ResponseEntity.ok(Map.of("message", "Book deleted successfully")); + }) + .orElse(ResponseEntity.status(HttpStatus.NOT_FOUND) + .body(Map.of("error", "Book not found"))); + } +} diff --git a/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/controller/HealthController.java b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/controller/HealthController.java new file mode 100644 index 0000000..e6f001e --- /dev/null +++ b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/controller/HealthController.java @@ -0,0 +1,16 @@ +package org.typesense.full_text_search.controller; + +import java.util.Map; + +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RestController; + +@RestController +public class HealthController { + + @GetMapping("/ping") + public ResponseEntity> ping() { + return ResponseEntity.ok(Map.of("message", "pong")); + } +} diff --git a/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/controller/SearchController.java b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/controller/SearchController.java new file mode 100644 index 0000000..51d5274 --- /dev/null +++ b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/controller/SearchController.java @@ -0,0 +1,42 @@ +package org.typesense.full_text_search.controller; + +import java.util.Map; + +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; +import org.typesense.full_text_search.service.TypesenseService; +import org.typesense.model.SearchResult; + +@RestController +public class SearchController { + + private final TypesenseService typesenseService; + + public SearchController(TypesenseService typesenseService) { + this.typesenseService = typesenseService; + } + + @GetMapping("/search") + public ResponseEntity> search(@RequestParam("q") String query) { + if (query == null || query.isBlank()) { + return ResponseEntity.badRequest().body(Map.of("error", "Search query parameter 'q' is required")); + } + + try { + SearchResult result = typesenseService.search(query); + return ResponseEntity.ok(Map.of( + "query", query, + "results", result.getHits() != null ? result.getHits() : java.util.List.of(), + "found", result.getFound() != null ? result.getFound() : 0, + "took", result.getSearchTimeMs() != null ? result.getSearchTimeMs() : 0, + "facet_counts", result.getFacetCounts() != null ? result.getFacetCounts() : java.util.List.of() + )); + } catch (Exception e) { + return ResponseEntity.internalServerError().body(Map.of( + "error", "Search failed: " + e.getMessage() + )); + } + } +} diff --git a/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/controller/SyncController.java b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/controller/SyncController.java new file mode 100644 index 0000000..4f8d500 --- /dev/null +++ b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/controller/SyncController.java @@ -0,0 +1,53 @@ +package org.typesense.full_text_search.controller; + +import java.time.Instant; +import java.util.Map; + +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; +import org.typesense.full_text_search.service.TypesenseService; + +@RestController +@RequestMapping("/sync") +public class SyncController { + + private final TypesenseService typesenseService; + + public SyncController(TypesenseService typesenseService) { + this.typesenseService = typesenseService; + } + + @PostMapping + public ResponseEntity> triggerSync() { + Instant lastSyncTime = typesenseService.getLastSyncTime(); + + try { + Instant newSyncTime = typesenseService.syncBooksToTypesense(lastSyncTime); + int deletedCount = typesenseService.syncSoftDeletesToTypesense(lastSyncTime); + typesenseService.setLastSyncTime(newSyncTime); + + return ResponseEntity.ok(Map.of( + "message", "Sync completed", + "newSyncTime", newSyncTime.toString(), + "syncedAt", Instant.now().toString(), + "deletedBooks", deletedCount + )); + } catch (Exception e) { + return ResponseEntity.internalServerError().body(Map.of( + "error", "Sync failed", + "message", e.getMessage() + )); + } + } + + @GetMapping("/status") + public ResponseEntity> getSyncStatus() { + return ResponseEntity.ok(Map.of( + "lastSyncTime", typesenseService.getLastSyncTime().toString(), + "syncWorkerRunning", typesenseService.isSyncWorkerRunning() + )); + } +} diff --git a/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/model/Book.java b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/model/Book.java new file mode 100644 index 0000000..e29247a --- /dev/null +++ b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/model/Book.java @@ -0,0 +1,77 @@ +package org.typesense.full_text_search.model; + +import java.time.Instant; +import java.util.List; + +import org.hibernate.annotations.SQLDelete; +import org.hibernate.annotations.SQLRestriction; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.PreUpdate; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Entity +@Table(name = "books") +@SQLDelete(sql = "UPDATE books SET deleted_at = NOW(), updated_at = NOW() WHERE id = ?") +@SQLRestriction("deleted_at IS NULL") +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +public class Book { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + private String title; + + @Column(columnDefinition = "jsonb") + @org.hibernate.annotations.JdbcTypeCode(org.hibernate.type.SqlTypes.JSON) + private List authors; + + @Column(name = "publication_year") + private Integer publicationYear; + + @Column(name = "average_rating") + private Double averageRating; + + @Column(name = "image_url") + private String imageUrl; + + @Column(name = "ratings_count") + private Integer ratingsCount; + + @Column(name = "created_at", updatable = false) + private Instant createdAt; + + @Column(name = "updated_at") + private Instant updatedAt; + + @Column(name = "deleted_at") + private Instant deletedAt; + + @jakarta.persistence.PrePersist + protected void onCreate() { + Instant now = Instant.now(); + this.createdAt = now; + this.updatedAt = now; + } + + @PreUpdate + protected void onUpdate() { + this.updatedAt = Instant.now(); + } + + public String getTypesenseId() { + return "book_" + id; + } +} diff --git a/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/repository/BookRepository.java b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/repository/BookRepository.java new file mode 100644 index 0000000..6fbe8f9 --- /dev/null +++ b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/repository/BookRepository.java @@ -0,0 +1,26 @@ +package org.typesense.full_text_search.repository; + +import java.time.Instant; +import java.util.List; +import java.util.Optional; + +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; +import org.typesense.full_text_search.model.Book; + +public interface BookRepository extends JpaRepository { + + Page findByUpdatedAtAfterOrderByUpdatedAtAsc(Instant since, Pageable pageable); + + long countByUpdatedAtAfter(Instant since); + + @Query("SELECT MAX(b.updatedAt) FROM Book b") + Optional findLatestUpdatedAt(); + + @Query(value = "SELECT * FROM books WHERE deleted_at IS NOT NULL AND updated_at > :since", + nativeQuery = true) + List findDeletedBooksSince(@Param("since") Instant since); +} diff --git a/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/scheduler/TypesenseSyncScheduler.java b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/scheduler/TypesenseSyncScheduler.java new file mode 100644 index 0000000..6645d4e --- /dev/null +++ b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/scheduler/TypesenseSyncScheduler.java @@ -0,0 +1,82 @@ +package org.typesense.full_text_search.scheduler; + +import java.time.Instant; +import java.util.concurrent.atomic.AtomicBoolean; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.boot.context.event.ApplicationReadyEvent; +import org.springframework.context.event.EventListener; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; +import org.typesense.full_text_search.service.BookService; +import org.typesense.full_text_search.service.TypesenseService; + +@Component +public class TypesenseSyncScheduler { + + private static final Logger log = LoggerFactory.getLogger(TypesenseSyncScheduler.class); + + private final TypesenseService typesenseService; + private final BookService bookService; + private final AtomicBoolean initialSyncDone = new AtomicBoolean(false); + + public TypesenseSyncScheduler(TypesenseService typesenseService, BookService bookService) { + this.typesenseService = typesenseService; + this.bookService = bookService; + } + + @EventListener(ApplicationReadyEvent.class) + public void onApplicationReady() { + try { + typesenseService.initializeCollection(); + } catch (Exception e) { + log.error("Failed to initialize Typesense collection: {}", e.getMessage()); + return; + } + + typesenseService.setSyncWorkerRunning(true); + + try { + long docCount = typesenseService.collectionDocumentCount(); + if (docCount > 0) { + bookService.findLatestUpdatedAt().ifPresent(latest -> { + typesenseService.setLastSyncTime(latest); + log.info("Typesense already populated, seeding sync time from DB: {}", latest); + }); + } else { + log.info("Typesense collection is empty, will run full sync"); + } + + Instant lastSyncTime = typesenseService.getLastSyncTime(); + Instant newSyncTime = typesenseService.syncBooksToTypesense(lastSyncTime); + typesenseService.setLastSyncTime(newSyncTime); + log.info("Initial sync completed at {}", newSyncTime); + } catch (Exception e) { + log.error("Initial sync failed: {}", e.getMessage()); + } + + initialSyncDone.set(true); + } + + @Scheduled(fixedDelayString = "${typesense.sync.interval-ms}") + public void periodicSync() { + if (!initialSyncDone.get()) return; + + log.info("Running periodic sync..."); + Instant lastSyncTime = typesenseService.getLastSyncTime(); + + try { + Instant newSyncTime = typesenseService.syncBooksToTypesense(lastSyncTime); + typesenseService.setLastSyncTime(newSyncTime); + } catch (Exception e) { + log.error("Periodic sync failed: {}", e.getMessage()); + } + + try { + typesenseService.syncSoftDeletesToTypesense(lastSyncTime); + } catch (Exception e) { + log.error("Soft delete sync failed: {}", e.getMessage()); + } + } +} diff --git a/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/service/BookService.java b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/service/BookService.java new file mode 100644 index 0000000..80d36d8 --- /dev/null +++ b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/service/BookService.java @@ -0,0 +1,70 @@ +package org.typesense.full_text_search.service; + +import java.time.Instant; +import java.util.List; +import java.util.Optional; + +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Sort; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; +import org.typesense.full_text_search.model.Book; +import org.typesense.full_text_search.repository.BookRepository; + +@Service +public class BookService { + + private final BookRepository bookRepository; + + public BookService(BookRepository bookRepository) { + this.bookRepository = bookRepository; + } + + @Transactional + public Book save(Book book) { + return bookRepository.save(book); + } + + @Transactional(readOnly = true) + public Optional findById(Long id) { + return bookRepository.findById(id); + } + + @Transactional(readOnly = true) + public Page findAll(int page, int pageSize) { + return bookRepository.findAll( + PageRequest.of(page - 1, pageSize, Sort.by("id").ascending())); + } + + @Transactional(readOnly = true) + public long count() { + return bookRepository.count(); + } + + @Transactional + public void deleteById(Long id) { + bookRepository.deleteById(id); + } + + @Transactional(readOnly = true) + public Page findUpdatedSince(Instant since, int page, int pageSize) { + return bookRepository.findByUpdatedAtAfterOrderByUpdatedAtAsc( + since, PageRequest.of(page - 1, pageSize)); + } + + @Transactional(readOnly = true) + public long countUpdatedSince(Instant since) { + return bookRepository.countByUpdatedAtAfter(since); + } + + @Transactional(readOnly = true) + public Optional findLatestUpdatedAt() { + return bookRepository.findLatestUpdatedAt(); + } + + @Transactional(readOnly = true) + public List findDeletedSince(Instant since) { + return bookRepository.findDeletedBooksSince(since); + } +} diff --git a/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/service/TypesenseService.java b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/service/TypesenseService.java new file mode 100644 index 0000000..ed226fa --- /dev/null +++ b/typesense-springboot-full-text-search/src/main/java/org/typesense/full_text_search/service/TypesenseService.java @@ -0,0 +1,266 @@ +package org.typesense.full_text_search.service; + +import java.time.Instant; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.data.domain.Page; +import org.springframework.scheduling.annotation.Async; +import org.springframework.stereotype.Service; +import org.typesense.api.Client; +import org.typesense.api.FieldTypes; +import org.typesense.model.CollectionResponse; +import org.typesense.model.CollectionSchema; +import org.typesense.model.DeleteDocumentsParameters; +import org.typesense.model.Field; +import org.typesense.model.ImportDocumentsParameters; +import org.typesense.model.IndexAction; +import org.typesense.model.SearchParameters; +import org.typesense.model.SearchResult; +import org.typesense.full_text_search.model.Book; + + +@Service +public class TypesenseService { + + private static final Logger log = LoggerFactory.getLogger(TypesenseService.class); + + private final Client client; + private final BookService bookService; + + @Value("${typesense.collection-name}") + private String collectionName; + + @Value("${typesense.sync.batch-size}") + private int batchSize; + + @Value("${typesense.sync.page-size}") + private int pageSize; + + @Value("${typesense.sync.enable-soft-delete}") + private boolean enableSoftDelete; + + private final AtomicReference lastSyncTime = new AtomicReference<>(Instant.EPOCH); + private final AtomicBoolean syncWorkerRunning = new AtomicBoolean(false); + + public TypesenseService(Client client, BookService bookService) { + this.client = client; + this.bookService = bookService; + } + + // --- Sync state accessors (thread-safe) --- + + public Instant getLastSyncTime() { + return lastSyncTime.get(); + } + + public void setLastSyncTime(Instant time) { + lastSyncTime.set(time); + } + + public boolean isSyncWorkerRunning() { + return syncWorkerRunning.get(); + } + + public void setSyncWorkerRunning(boolean running) { + syncWorkerRunning.set(running); + } + + // --- Collection management --- + + public void initializeCollection() throws Exception { + log.info("Initializing Typesense collection '{}'...", collectionName); + try { + client.collections(collectionName).retrieve(); + log.info("Collection '{}' already exists, skipping creation", collectionName); + } catch (Exception e) { + log.info("Collection '{}' not found, creating...", collectionName); + CollectionSchema schema = new CollectionSchema(); + schema.name(collectionName) + .fields(List.of( + new Field().name("title").type(FieldTypes.STRING).facet(false), + new Field().name("authors").type(FieldTypes.STRING_ARRAY).facet(true), + new Field().name("publication_year").type(FieldTypes.INT32).facet(true), + new Field().name("average_rating").type(FieldTypes.FLOAT).facet(true), + new Field().name("image_url").type(FieldTypes.STRING).facet(false), + new Field().name("ratings_count").type(FieldTypes.INT32).facet(true).sort(true) + )) + .defaultSortingField("ratings_count"); + client.collections().create(schema); + log.info("Collection '{}' created successfully", collectionName); + } + } + + public long collectionDocumentCount() { + try { + CollectionResponse response = client.collections(collectionName).retrieve(); + return response.getNumDocuments() != null ? response.getNumDocuments() : 0; + } catch (Exception e) { + return 0; + } + } + + // --- Search --- + + public SearchResult search(String query) throws Exception { + SearchParameters params = new SearchParameters() + .q(query) + .queryBy("title,authors") + .queryByWeights("2,1") + .facetBy("authors,publication_year,average_rating"); + return client.collections(collectionName).documents().search(params); + } + + // --- Incremental sync --- + + public Instant syncBooksToTypesense(Instant since) throws Exception { + log.info("Starting incremental sync since {}", since); + + long updatedCount = bookService.countUpdatedSince(since); + if (updatedCount == 0) { + log.info("No changes to sync"); + return Instant.now(); + } + + int totalPages = (int) Math.ceil((double) updatedCount / pageSize); + log.info("Found {} books to sync ({} pages)", updatedCount, totalPages); + + int totalSuccess = 0; + int totalFailure = 0; + + for (int page = 1; page <= totalPages; page++) { + Page books = bookService.findUpdatedSince(since, page, pageSize); + if (!books.hasContent()) break; + + log.info("Processing page {}/{} ({} books)", page, totalPages, books.getNumberOfElements()); + + String jsonl = booksToJsonl(books.getContent()); + ImportDocumentsParameters importParams = new ImportDocumentsParameters(); + importParams.action(IndexAction.UPSERT); + + String response = client.collections(collectionName).documents().import_(jsonl, importParams); + int[] counts = countImportResults(response); + totalSuccess += counts[0]; + totalFailure += counts[1]; + + log.info("Page {}/{}: {} succeeded, {} failed", page, totalPages, counts[0], counts[1]); + } + + Instant newSyncTime = Instant.now(); + log.info("Incremental sync completed: {} upserted, {} failed out of {} total", + totalSuccess, totalFailure, updatedCount); + return newSyncTime; + } + + // --- Soft delete sync --- + + public int syncSoftDeletesToTypesense(Instant since) throws Exception { + List deletedBooks = bookService.findDeletedSince(since); + if (deletedBooks.isEmpty()) return 0; + + String idFilter = deletedBooks.stream() + .map(Book::getTypesenseId) + .collect(Collectors.joining(",")); + String filterBy = "id:[" + idFilter + "]"; + + log.info("Deleting {} documents from Typesense", deletedBooks.size()); + + DeleteDocumentsParameters params = new DeleteDocumentsParameters(); + params.filterBy(filterBy); + client.collections(collectionName).documents().delete(params); + + log.info("Successfully deleted {} documents from Typesense", deletedBooks.size()); + return deletedBooks.size(); + } + + // --- Single document sync (for real-time CRUD operations) --- + + @Async("typesenseAsyncExecutor") + public void syncBookAsync(Book book) { + try { + client.collections(collectionName).documents().upsert(bookToDocument(book)); + setLastSyncTime(Instant.now()); + log.info("Synced book to Typesense: id={}, title={}", book.getId(), book.getTitle()); + } catch (Exception e) { + log.error("Async Typesense sync failed for book {}: {}", book.getId(), e.getMessage()); + } + } + + @Async("typesenseAsyncExecutor") + public void deleteBookAsync(Long bookId) { + try { + String documentId = "book_" + bookId; + client.collections(collectionName).documents(documentId).delete(); + setLastSyncTime(Instant.now()); + log.info("Deleted book from Typesense: id={}", bookId); + } catch (Exception e) { + log.error("Async Typesense deletion failed for book {}: {}", bookId, e.getMessage()); + } + } + + // --- Helpers --- + + private Map bookToDocument(Book book) { + Map doc = new HashMap<>(); + doc.put("id", book.getTypesenseId()); + doc.put("title", book.getTitle()); + doc.put("authors", book.getAuthors() != null ? book.getAuthors() : List.of()); + doc.put("publication_year", book.getPublicationYear() != null ? book.getPublicationYear() : 0); + doc.put("average_rating", book.getAverageRating() != null ? book.getAverageRating() : 0.0); + doc.put("image_url", book.getImageUrl() != null ? book.getImageUrl() : ""); + doc.put("ratings_count", book.getRatingsCount() != null ? book.getRatingsCount() : 0); + return doc; + } + + private String booksToJsonl(List books) { + return books.stream() + .map(this::bookToJsonLine) + .collect(Collectors.joining("\n")); + } + + private String bookToJsonLine(Book book) { + String authors = "[]"; + if (book.getAuthors() != null && !book.getAuthors().isEmpty()) { + authors = "[" + book.getAuthors().stream() + .map(a -> "\"" + escapeJson(a) + "\"") + .collect(Collectors.joining(",")) + "]"; + } + return "{" + + "\"id\":\"" + escapeJson(book.getTypesenseId()) + "\"," + + "\"title\":\"" + escapeJson(book.getTitle() != null ? book.getTitle() : "") + "\"," + + "\"authors\":" + authors + "," + + "\"publication_year\":" + (book.getPublicationYear() != null ? book.getPublicationYear() : 0) + "," + + "\"average_rating\":" + (book.getAverageRating() != null ? book.getAverageRating() : 0.0) + "," + + "\"image_url\":\"" + escapeJson(book.getImageUrl() != null ? book.getImageUrl() : "") + "\"," + + "\"ratings_count\":" + (book.getRatingsCount() != null ? book.getRatingsCount() : 0) + + "}"; + } + + private static String escapeJson(String value) { + if (value == null) return ""; + return value.replace("\\", "\\\\").replace("\"", "\\\"").replace("\n", "\\n").replace("\r", "\\r"); + } + + private int[] countImportResults(String response) { + int success = 0, failure = 0; + if (response == null || response.isBlank()) return new int[]{success, failure}; + for (String line : response.split("\n")) { + if (line.contains("\"success\":true")) { + success++; + } else { + failure++; + if (failure <= 5) { + log.warn("Import error: {}", line); + } + } + } + return new int[]{success, failure}; + } +} diff --git a/typesense-springboot-full-text-search/src/main/resources/application.properties b/typesense-springboot-full-text-search/src/main/resources/application.properties new file mode 100644 index 0000000..4ba09e0 --- /dev/null +++ b/typesense-springboot-full-text-search/src/main/resources/application.properties @@ -0,0 +1,27 @@ +spring.application.name=full-text-search +server.port=${PORT:4000} + +# Database Configuration +spring.datasource.url=jdbc:postgresql://${DB_HOST}:${DB_PORT}/${DB_NAME} +spring.datasource.username=${DB_USER} +spring.datasource.password=${DB_PASSWORD} +spring.datasource.driver-class-name=org.postgresql.Driver + +# JPA / Hibernate +spring.jpa.hibernate.ddl-auto=${HIBERNATE_DDL_AUTO:update} +spring.jpa.open-in-view=false +spring.jpa.properties.hibernate.jdbc.time_zone=UTC + +# Typesense Configuration +typesense.host=${TYPESENSE_HOST} +typesense.port=${TYPESENSE_PORT} +typesense.protocol=${TYPESENSE_PROTOCOL} +typesense.api-key=${TYPESENSE_API_KEY} +typesense.collection-name=${TYPESENSE_COLLECTION_NAME} +typesense.connection-timeout-seconds=${TYPESENSE_CONNECTION_TIMEOUT} + +# Sync Configuration +typesense.sync.interval-ms=${TYPESENSE_SYNC_INTERVAL:60000} +typesense.sync.batch-size=${TYPESENSE_SYNC_BATCH_SIZE:1000} +typesense.sync.page-size=${TYPESENSE_SYNC_PAGE_SIZE:1000} +typesense.sync.enable-soft-delete=${TYPESENSE_SYNC_ENABLE_SOFT_DELETE:true} diff --git a/typesense-springboot-full-text-search/src/test/java/org/typesense/full_text_search/FullTextSearchApplicationTests.java b/typesense-springboot-full-text-search/src/test/java/org/typesense/full_text_search/FullTextSearchApplicationTests.java new file mode 100644 index 0000000..31059e2 --- /dev/null +++ b/typesense-springboot-full-text-search/src/test/java/org/typesense/full_text_search/FullTextSearchApplicationTests.java @@ -0,0 +1,14 @@ +package org.typesense.full_text_search; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertNotNull; + +class FullTextSearchApplicationTests { + + @Test + void applicationClassExists() { + assertNotNull(FullTextSearchApplication.class); + } + +}