commit a70e9b7a79f9daf07b64a256f95a5e52dd7bb502 Author: Andrewkydev Date: Thu Jan 15 22:38:46 2026 +0300 first commit diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a446190 --- /dev/null +++ b/.gitignore @@ -0,0 +1,43 @@ +# Compiled class file +*.class + +# Log file +*.log + +# BlueJ files +*.ctxt + + + +# Mobile Tools for Java (J2ME) +.mtj.tmp/ + +# Package Files # +*.jar +*.war +*.nar +*.ear +*.zip +*.tar.gz +*.rar + +# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml +hs_err_pid* + +.gradle +build/ + +# Ignore Gradle GUI config +gradle-app.setting + +# Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored) +!gradle-wrapper.jar + +# Cache of project +.gradletasknamecache + +# # Work around https://youtrack.jetbrains.com/issue/IDEA-116898 +# gradle/wrapper/gradle-wrapper.properties + +# End of https://mrkandreev.name/snippets/gitignore-generator/#Java,Gradle +/examples/ diff --git a/README.md b/README.md new file mode 100644 index 0000000..0bf3706 --- /dev/null +++ b/README.md @@ -0,0 +1,14 @@ +# Database Plugin (Lumi) + +SQL-oriented database plugin for Lumi with MySQL/PostgreSQL support, HikariCP pooling, +schema helpers, and a lightweight annotation-based ORM layer. + +## Docs + +- `docs/overview.md` +- `docs/config.md` +- `docs/api-schema.md` +- `docs/api-query.md` +- `docs/orm.md` +- `docs/orm-query.md` +- `docs/orm-schema.md` diff --git a/build.gradle.kts b/build.gradle.kts new file mode 100644 index 0000000..6d2d7b5 --- /dev/null +++ b/build.gradle.kts @@ -0,0 +1,132 @@ +import java.io.BufferedReader +import java.io.InputStreamReader + + +plugins { + id("java") + id("java-library") + id("com.github.johnrengelman.shadow") version "8.1.1" + id("maven-publish") +} + +group = "com.andrewkydev" +version = "1.0-SNAPSHOT" + +val copyTo = "../server/plugins" +val copyFrom = "build/libs/${project.name}-${project.version}.jar" + +repositories { + mavenCentral() + + maven { + name = "luminiadevRepositorySnapshots" + url = uri("https://repo.luminiadev.com/snapshots") + } +} + +dependencies { + compileOnly("com.koshakmine:Lumi:1.4.0-SNAPSHOT") + implementation("com.zaxxer:HikariCP:5.1.0") + implementation("com.mysql:mysql-connector-j:8.3.0") + implementation("org.postgresql:postgresql:42.7.2") + implementation("com.google.code.gson:gson:2.11.0") + +} + + +tasks.build { +// finalizedBy(tasks.shadowJar) +} + +tasks.shadowJar { +// finalizedBy(tasks.named("copyToPath")) +} + + +tasks { + shadowJar { + + setProperty("zip64", true) + mergeServiceFiles() + archiveClassifier.set("") + archiveFileName.set("${project.name}-${project.version}.jar") + + dependencies { + exclude( + "**/**.properties", + "**/*.swp", + "addons/*", + "creativeitems*.json", + "*.dat", + "*.dat", + "*.xsd", + "structures/**", + "RuntimeBlockStatesExtras/**", + "recipes/**", + "cn/nukkit/**", + "co/aikar/timings/**", + "it/uni mi/dsi/fastutil/**", + "assets/org/apache/commons/math3/random/**", + ) + } + mergeServiceFiles() + } +} + +//tasks.register("copyToPath") { +// doLast { +// copy { +// from(copyFrom) +// into(copyTo) +// } +// } +// dependsOn(tasks.named("shadowJar")) +// finalizedBy(tasks.named("reloadServer")) +//} + +tasks.register("reloadServer") { + + doLast { + val commands = listOf( + listOf("../server/mcrcon.exe", "-H", "127.0.0.1", "-p", "5MDRlOTFk1", "-w", "1", "fr"), + listOf("../server/mcrcon.exe", "-H", "127.0.0.1", "-p", "5MDRlOTFk1", "-w", "1", "reload again") + ) + + fun executeCommand(command: List) { + val processBuilder = ProcessBuilder(command) + val process = processBuilder.start() + val reader = + BufferedReader(InputStreamReader(process.inputStream)) + var line: String? + + while (reader.readLine().also { line = it } != null) { + println(line) + } + + process.waitFor() + } + + executeCommand(commands[0]) + + executeCommand(commands[1]) + executeCommand(commands[1]) + } + dependsOn(tasks.named("copyToPath")) +} + +publishing { + publications { + create("mavenJava") { + groupId = project.group.toString() + artifactId = project.name + version = project.version.toString() + + artifact(tasks.shadowJar.get()) { + classifier = null + } + } + } + repositories { + mavenLocal() + } +} diff --git a/docs/api-query.md b/docs/api-query.md new file mode 100644 index 0000000..f429cd9 --- /dev/null +++ b/docs/api-query.md @@ -0,0 +1,33 @@ +# Query API + +Raw SQL is available via `api.query()` with parameter binding. + +## Execute + +```java +int rows = api.query().execute( + "UPDATE players SET level = level + 1 WHERE id = ?", + java.util.Collections.singletonList(1) +); +``` + +## Query + +```java +List names = api.query().query( + "SELECT name FROM players WHERE level >= ?", + java.util.Collections.singletonList(10), + rs -> rs.getString("name") +); +``` + +## Transactions + +```java +try (Transaction tx = api.beginTransaction()) { + tx.execute("UPDATE players SET level = level + 1 WHERE id = ?", java.util.Collections.singletonList(1)); + tx.commit(); +} catch (Exception ex) { + // rollback on error if needed +} +``` diff --git a/docs/api-schema.md b/docs/api-schema.md new file mode 100644 index 0000000..ddb3d94 --- /dev/null +++ b/docs/api-schema.md @@ -0,0 +1,35 @@ +# Schema API + +Schema helpers let you create and update tables programmatically. + +## Example: create table + +```java +import com.andrewkydev.database.schema.ColumnSpec; +import com.andrewkydev.database.schema.IndexSpec; +import com.andrewkydev.database.schema.TableSpec; + +TableSpec table = TableSpec.builder("players") + .column(ColumnSpec.builder("id", "BIGINT").primaryKey(true).autoIncrement(true).nullable(false).build()) + .column(ColumnSpec.builder("name", "VARCHAR(32)").nullable(false).build()) + .index(new IndexSpec("players_name_idx", java.util.Arrays.asList("name"), false)) + .build(); + +api.schema().createTable(table); +``` + +## API Methods + +```java +api.schema().createDatabase("primalix"); +api.schema().dropDatabase("primalix"); +api.schema().createTable(spec); +api.schema().dropTable("players"); +api.schema().addColumn("players", ColumnSpec.builder("level", "INT").build()); +api.schema().updateColumn("players", ColumnSpec.builder("level", "INT").nullable(false).build()); +api.schema().dropColumn("players", "level"); +api.schema().addIndex("players", new IndexSpec("players_name_idx", java.util.Arrays.asList("name"), false)); +api.schema().dropIndex("players", "players_name_idx"); +``` + +All methods also have async variants returning `CompletableFuture`. diff --git a/docs/config.md b/docs/config.md new file mode 100644 index 0000000..c900522 --- /dev/null +++ b/docs/config.md @@ -0,0 +1,40 @@ +# Configuration + +The default config file is `config.yml`: + +```yaml +driver: "mysql" +host: "localhost" +port: 3306 +database: "primalix" +username: "root" +password: "" +adminDatabase: "postgres" +autoTransactions: true + +pool: + maxPoolSize: 10 + minIdle: 2 + connectionTimeoutMs: 30000 + idleTimeoutMs: 600000 + maxLifetimeMs: 1800000 +``` + +## Fields + +- `driver`: `mysql` or `postgres`. +- `host`: database host. +- `port`: database port. +- `database`: default database name for the pool. +- `username`: login user. +- `password`: login password. +- `adminDatabase`: PostgreSQL admin database used for create/drop database. +- `autoTransactions`: wrap schema operations in a transaction when possible. + +## Pool Options + +- `maxPoolSize`: maximum connections in pool. +- `minIdle`: minimum idle connections. +- `connectionTimeoutMs`: connection timeout. +- `idleTimeoutMs`: idle timeout. +- `maxLifetimeMs`: maximum connection lifetime. diff --git a/docs/examples.md b/docs/examples.md new file mode 100644 index 0000000..2926ea2 --- /dev/null +++ b/docs/examples.md @@ -0,0 +1,91 @@ +# Feature Examples + +## 1) Auto ID after insert + +```java +PlayerModel player = new PlayerModel(); +player.setName("Steve"); +api.orm().insert(player); +long id = player.getId(); +``` + +## 2) snake_case default + +```java +class PlayerStats { + private int totalKills; +} +// table: player_stats, column: total_kills +``` + +## 3) findOneWhere & deleteWhere + +```java +PlayerModel one = api.orm().findOneWhere( + PlayerModel.class, + "level >= ?", + java.util.Collections.singletonList(10) +); + +int deleted = api.orm().deleteWhere( + PlayerModel.class, + "level < ?", + java.util.Collections.singletonList(1) +); +``` + +## 4) @DbColumn(length/unique/nullable) + +```java +@DbColumn(length = 32, unique = true, nullable = false) +private String username; +``` + +## 5) findWhere with order/limit/offset + +```java +List page = api.orm().findWhere( + PlayerModel.class, + "level >= ?", + java.util.Collections.singletonList(10), + "level DESC", + 10, + 0 +); +``` + +## 6) Query Builder + +```java +List top = api.orm().query(PlayerModel.class) + .where("level >= ?", 10) + .orderBy("level DESC") + .limit(10) + .list(); +``` + +## 7) Query Builder + Fluent Conditions + Join + +```java +import static com.andrewkydev.database.orm.Conditions.*; + +List rows = api.orm().query(PlayerModel.class) + .select("players.*") + .join("LEFT JOIN clans ON clans.id = players.clan_id") + .where(eq("players.status", "ACTIVE").and(gt("players.level", 10))) + .groupBy("players.id") + .having("COUNT(clans.id) > ?", 0) + .orderBy("players.level DESC") + .limit(10, 0) + .list(); +``` + +## 8) Custom types (UUID/JSON) + +```java +@DbJson +private java.util.Map metadata; + +// UUID is built-in. Custom type example: +api.orm().registerAdapter(MyType.class, new TypeAdapter() { ... }); +``` diff --git a/docs/orm-query.md b/docs/orm-query.md new file mode 100644 index 0000000..1f6af84 --- /dev/null +++ b/docs/orm-query.md @@ -0,0 +1,43 @@ +# ORM Query Builder + +Fluent query builder on top of the ORM. + +## Basic + +```java +List top = api.orm().query(PlayerModel.class) + .where("level >= ?", 10) + .orderBy("level DESC") + .limit(10) + .list(); +``` + +## Fluent Conditions + +```java +import static com.andrewkydev.database.orm.Conditions.*; + +List players = api.orm().query(PlayerModel.class) + .where(eq("status", "ACTIVE").and(gt("level", 10))) + .orderBy("level DESC") + .list(); +``` + +## Joins + Group By + Having + +```java +List rows = api.orm().query(PlayerModel.class) + .select("players.*") + .join("LEFT JOIN clans ON clans.id = players.clan_id") + .where("players.level >= ?", 10) + .groupBy("players.id") + .having("COUNT(clans.id) > ?", 0) + .orderBy("players.level DESC") + .limit(10, 0) + .list(); +``` + +## Select Columns + +If you select a subset of columns, only those are mapped. +Missing fields keep default values. diff --git a/docs/orm-schema.md b/docs/orm-schema.md new file mode 100644 index 0000000..3653970 --- /dev/null +++ b/docs/orm-schema.md @@ -0,0 +1,30 @@ +# ORM Schema Generation + +`OrmSchema` builds a `TableSpec` from annotated entities. + +## Example + +```java +import com.andrewkydev.database.orm.OrmSchema; +import com.andrewkydev.database.schema.SqlDialect; +import com.andrewkydev.database.schema.TableSpec; + +TableSpec spec = OrmSchema.fromEntity(PlayerModel.class, SqlDialect.MYSQL); +api.schema().createTable(spec); +``` + +## Type Mapping + +Default mapping (when `@DbColumn(type = "...")` is not specified): + +- `String` -> `VARCHAR(length)` +- `int`/`Integer` -> `INT` +- `long`/`Long` -> `BIGINT` +- `short`/`Short` -> `SMALLINT` +- `boolean`/`Boolean` -> `BOOLEAN` (PostgreSQL) or `TINYINT(1)` (MySQL) +- `float`/`Float` -> `FLOAT` +- `double`/`Double` -> `DOUBLE` +- `UUID` -> `UUID` (PostgreSQL) or `CHAR(36)` (MySQL) +- `@DbJson` -> `JSON` (MySQL) or `JSONB` (PostgreSQL) + +`@DbColumn(length, unique, nullable)` is also applied. diff --git a/docs/orm.md b/docs/orm.md new file mode 100644 index 0000000..4ff2045 --- /dev/null +++ b/docs/orm.md @@ -0,0 +1,119 @@ +# ORM + +The ORM is lightweight: it maps fields to columns and does not manage complex +relationships. It supports sync and async operations. + +## Annotations + +- `@DbEntity(table = "players")` sets table name. Default: snake_case class name. +- `@DbColumn(name = "username")` overrides column name. +- `@DbColumn(type = "VARCHAR(32)")` overrides SQL type for schema generation. +- `@DbColumn(length = 32, unique = true, nullable = false)` influences schema generation. +- `@DbId(autoIncrement = true)` marks the primary key. +- `@DbJson` stores the field as JSON. +- `@DbTransient` ignores the field. + +## Example entity + +```java +@DbEntity(table = "players") +public class PlayerModel { + @DbId(autoIncrement = true) + private long id; + + @DbColumn(name = "username", nullable = false, unique = true, length = 32) + private String name; + + @DbColumn + private int level; + + @DbJson + private java.util.Map metadata; + + @DbTransient + private String temp; + + public PlayerModel() { + } +} +``` + +## CRUD + +```java +EntityManager orm = api.orm(); + +orm.insert(entity); +orm.update(entity); +orm.delete(entity); +PlayerModel player = orm.findById(PlayerModel.class, 1L); +List all = orm.findAll(PlayerModel.class); +PlayerModel one = orm.findOneWhere(PlayerModel.class, "level >= ?", java.util.Collections.singletonList(10)); +List many = orm.findWhere(PlayerModel.class, "level >= ?", java.util.Collections.singletonList(10)); +List paged = orm.findWhere(PlayerModel.class, "level >= ?", java.util.Collections.singletonList(10), "level DESC", 10, 0); +long count = orm.count(PlayerModel.class, "level >= ?", java.util.Collections.singletonList(10)); +boolean exists = orm.exists(PlayerModel.class, "username = ?", java.util.Collections.singletonList("Steve")); +int deleted = orm.deleteWhere(PlayerModel.class, "level < ?", java.util.Collections.singletonList(1)); +``` + +## Query Builder + +```java +EntityManager orm = api.orm(); + +List top = orm.query(PlayerModel.class) + .where("level >= ?", 10) + .orderBy("level DESC") + .limit(10) + .list(); + +boolean exists = orm.query(PlayerModel.class) + .where("username = ?", "Steve") + .exists(); +``` + +## Fluent Conditions + +```java +import static com.andrewkydev.database.orm.Conditions.*; + +List players = orm.query(PlayerModel.class) + .where(eq("status", "ACTIVE").and(gt("level", 10))) + .list(); +``` + +## Joins, Group By, Having + +```java +List rows = orm.query(PlayerModel.class) + .select("players.*") + .join("LEFT JOIN clans ON clans.id = players.clan_id") + .groupBy("players.id") + .having("COUNT(clans.id) > ?", 0) + .list(); +``` + +Selecting a subset of columns maps only those fields; missing fields keep default values. + +## Auto ID on insert + +If `@DbId(autoIncrement = true)` is used and the id is empty (0 or null), +`insert()` will read generated keys and set the id back on the entity. + +## Custom Type Adapters + +UUID mapping is built-in. For other types: + +```java +orm.registerAdapter(SomeType.class, new TypeAdapter() { + @Override + public Object toDatabase(SomeType value) { + return value == null ? null : value.toString(); + } + + @Override + public SomeType fromDatabase(Object value) { + return value == null ? null : SomeType.parse(value.toString()); + } +}); +``` diff --git a/docs/overview.md b/docs/overview.md new file mode 100644 index 0000000..9cc9268 --- /dev/null +++ b/docs/overview.md @@ -0,0 +1,40 @@ +# Overview + +Database is a Lumi plugin that provides: + +- HikariCP-backed MySQL/PostgreSQL connections. +- Schema helpers for creating/updating tables and indexes. +- Raw SQL queries with parameter binding. +- Lightweight ORM with annotations, fluent query builder, and async support. + +## Install + +1. Build the jar with Gradle. +2. Put the jar into your server plugins folder. +3. Start the server once to generate `config.yml`. + +## Accessing the API + +```java +import com.andrewkydev.database.DatabaseProvider; +import com.andrewkydev.database.DatabaseApi; + +DatabaseApi api = DatabaseProvider.get(); +``` + +## Feature Highlights + +### Auto ID on insert + +If an entity uses `@DbId(autoIncrement = true)` and the id value is empty (0 or null), +`insert()` will fetch generated keys and assign the id back to the object. + +### Snake case by default + +If `@DbEntity` and `@DbColumn` names are not set, class and field names are converted to +snake_case. + +### Query helpers + +`findOneWhere`, `deleteWhere`, and `findWhere` with sorting/limit/offset are available +in the ORM. diff --git a/docs/ru/api-query.md b/docs/ru/api-query.md new file mode 100644 index 0000000..3b2fcdb --- /dev/null +++ b/docs/ru/api-query.md @@ -0,0 +1,33 @@ +# Query API (RU) + +Raw SQL через `api.query()` с биндингом параметров. + +## Execute + +```java +int rows = api.query().execute( + "UPDATE players SET level = level + 1 WHERE id = ?", + java.util.Collections.singletonList(1) +); +``` + +## Query + +```java +List names = api.query().query( + "SELECT name FROM players WHERE level >= ?", + java.util.Collections.singletonList(10), + rs -> rs.getString("name") +); +``` + +## Транзакции + +```java +try (Transaction tx = api.beginTransaction()) { + tx.execute("UPDATE players SET level = level + 1 WHERE id = ?", java.util.Collections.singletonList(1)); + tx.commit(); +} catch (Exception ex) { + // rollback при ошибке +} +``` diff --git a/docs/ru/api-schema.md b/docs/ru/api-schema.md new file mode 100644 index 0000000..e59c0dd --- /dev/null +++ b/docs/ru/api-schema.md @@ -0,0 +1,35 @@ +# Schema API (RU) + +Помощники для создания и обновления таблиц. + +## Пример: create table + +```java +import com.andrewkydev.database.schema.ColumnSpec; +import com.andrewkydev.database.schema.IndexSpec; +import com.andrewkydev.database.schema.TableSpec; + +TableSpec table = TableSpec.builder("players") + .column(ColumnSpec.builder("id", "BIGINT").primaryKey(true).autoIncrement(true).nullable(false).build()) + .column(ColumnSpec.builder("name", "VARCHAR(32)").nullable(false).build()) + .index(new IndexSpec("players_name_idx", java.util.Arrays.asList("name"), false)) + .build(); + +api.schema().createTable(table); +``` + +## Методы + +```java +api.schema().createDatabase("primalix"); +api.schema().dropDatabase("primalix"); +api.schema().createTable(spec); +api.schema().dropTable("players"); +api.schema().addColumn("players", ColumnSpec.builder("level", "INT").build()); +api.schema().updateColumn("players", ColumnSpec.builder("level", "INT").nullable(false).build()); +api.schema().dropColumn("players", "level"); +api.schema().addIndex("players", new IndexSpec("players_name_idx", java.util.Arrays.asList("name"), false)); +api.schema().dropIndex("players", "players_name_idx"); +``` + +У всех методов есть async версии с `CompletableFuture`. diff --git a/docs/ru/config.md b/docs/ru/config.md new file mode 100644 index 0000000..edf23d7 --- /dev/null +++ b/docs/ru/config.md @@ -0,0 +1,40 @@ +# Конфигурация + +Файл `config.yml`: + +```yaml +driver: "mysql" +host: "localhost" +port: 3306 +database: "primalix" +username: "root" +password: "" +adminDatabase: "postgres" +autoTransactions: true + +pool: + maxPoolSize: 10 + minIdle: 2 + connectionTimeoutMs: 30000 + idleTimeoutMs: 600000 + maxLifetimeMs: 1800000 +``` + +## Поля + +- `driver`: `mysql` или `postgres`. +- `host`: адрес БД. +- `port`: порт БД. +- `database`: основная база. +- `username`: логин. +- `password`: пароль. +- `adminDatabase`: PostgreSQL админ‑база для create/drop database. +- `autoTransactions`: оборачивать schema операции в транзакцию. + +## Pool + +- `maxPoolSize`: максимум соединений. +- `minIdle`: минимум idle. +- `connectionTimeoutMs`: таймаут подключения. +- `idleTimeoutMs`: таймаут простоя. +- `maxLifetimeMs`: максимальная жизнь соединения. diff --git a/docs/ru/orm-query.md b/docs/ru/orm-query.md new file mode 100644 index 0000000..9acc2f6 --- /dev/null +++ b/docs/ru/orm-query.md @@ -0,0 +1,43 @@ +# ORM Query Builder (RU) + +Fluent builder над ORM. + +## База + +```java +List top = api.orm().query(PlayerModel.class) + .where("level >= ?", 10) + .orderBy("level DESC") + .limit(10) + .list(); +``` + +## Fluent Conditions + +```java +import static com.andrewkydev.database.orm.Conditions.*; + +List players = api.orm().query(PlayerModel.class) + .where(eq("status", "ACTIVE").and(gt("level", 10))) + .orderBy("level DESC") + .list(); +``` + +## Join + Group By + Having + +```java +List rows = api.orm().query(PlayerModel.class) + .select("players.*") + .join("LEFT JOIN clans ON clans.id = players.clan_id") + .where("players.level >= ?", 10) + .groupBy("players.id") + .having("COUNT(clans.id) > ?", 0) + .orderBy("players.level DESC") + .limit(10, 0) + .list(); +``` + +## Select Columns + +Если выбираете часть колонок, маппятся только они. +Остальные поля остаются дефолтными. diff --git a/docs/ru/orm-schema.md b/docs/ru/orm-schema.md new file mode 100644 index 0000000..f92d7fb --- /dev/null +++ b/docs/ru/orm-schema.md @@ -0,0 +1,30 @@ +# ORM Schema (RU) + +`OrmSchema` строит `TableSpec` на основе аннотаций. + +## Пример + +```java +import com.andrewkydev.database.orm.OrmSchema; +import com.andrewkydev.database.schema.SqlDialect; +import com.andrewkydev.database.schema.TableSpec; + +TableSpec spec = OrmSchema.fromEntity(PlayerModel.class, SqlDialect.MYSQL); +api.schema().createTable(spec); +``` + +## Маппинг типов + +По умолчанию: + +- `String` -> `VARCHAR(length)` +- `int`/`Integer` -> `INT` +- `long`/`Long` -> `BIGINT` +- `short`/`Short` -> `SMALLINT` +- `boolean`/`Boolean` -> `BOOLEAN` (PostgreSQL) или `TINYINT(1)` (MySQL) +- `float`/`Float` -> `FLOAT` +- `double`/`Double` -> `DOUBLE` +- `UUID` -> `UUID` (PostgreSQL) или `CHAR(36)` (MySQL) +- `@DbJson` -> `JSON` (MySQL) или `JSONB` (PostgreSQL) + +`@DbColumn(length, unique, nullable)` применяется автоматически. diff --git a/docs/ru/orm.md b/docs/ru/orm.md new file mode 100644 index 0000000..27cccc6 --- /dev/null +++ b/docs/ru/orm.md @@ -0,0 +1,119 @@ +# ORM (RU) + +Легковесный ORM: поля мапятся в колонки, без сложных связей. +Есть sync и async версии. + +## Аннотации + +- `@DbEntity(table = "players")` - имя таблицы. По умолчанию snake_case. +- `@DbColumn(name = "username")` - имя колонки. +- `@DbColumn(type = "VARCHAR(32)")` - SQL тип для генерации схемы. +- `@DbColumn(length = 32, unique = true, nullable = false)` - влияет на schema генерацию. +- `@DbId(autoIncrement = true)` - primary key. +- `@DbJson` - JSON поле. +- `@DbTransient` - игнорировать поле. + +## Пример сущности + +```java +@DbEntity(table = "players") +public class PlayerModel { + @DbId(autoIncrement = true) + private long id; + + @DbColumn(name = "username", nullable = false, unique = true, length = 32) + private String name; + + @DbColumn + private int level; + + @DbJson + private java.util.Map metadata; + + @DbTransient + private String temp; + + public PlayerModel() { + } +} +``` + +## CRUD + +```java +EntityManager orm = api.orm(); + +orm.insert(entity); +orm.update(entity); +orm.delete(entity); +PlayerModel player = orm.findById(PlayerModel.class, 1L); +List all = orm.findAll(PlayerModel.class); +PlayerModel one = orm.findOneWhere(PlayerModel.class, "level >= ?", java.util.Collections.singletonList(10)); +List many = orm.findWhere(PlayerModel.class, "level >= ?", java.util.Collections.singletonList(10)); +List paged = orm.findWhere(PlayerModel.class, "level >= ?", java.util.Collections.singletonList(10), "level DESC", 10, 0); +long count = orm.count(PlayerModel.class, "level >= ?", java.util.Collections.singletonList(10)); +boolean exists = orm.exists(PlayerModel.class, "username = ?", java.util.Collections.singletonList("Steve")); +int deleted = orm.deleteWhere(PlayerModel.class, "level < ?", java.util.Collections.singletonList(1)); +``` + +## Query Builder + +```java +EntityManager orm = api.orm(); + +List top = orm.query(PlayerModel.class) + .where("level >= ?", 10) + .orderBy("level DESC") + .limit(10) + .list(); + +boolean exists = orm.query(PlayerModel.class) + .where("username = ?", "Steve") + .exists(); +``` + +## Fluent Conditions + +```java +import static com.andrewkydev.database.orm.Conditions.*; + +List players = orm.query(PlayerModel.class) + .where(eq("status", "ACTIVE").and(gt("level", 10))) + .list(); +``` + +## Join, Group By, Having + +```java +List rows = orm.query(PlayerModel.class) + .select("players.*") + .join("LEFT JOIN clans ON clans.id = players.clan_id") + .groupBy("players.id") + .having("COUNT(clans.id) > ?", 0) + .list(); +``` + +Если выбрать часть колонок, маппятся только они, остальные остаются дефолтными. + +## Auto ID + +Если `@DbId(autoIncrement = true)` и id пустой (0/null), +`insert()` получает generated keys и прописывает id в объект. + +## Type Adapters + +UUID уже поддержан. Для других типов: + +```java +orm.registerAdapter(SomeType.class, new TypeAdapter() { + @Override + public Object toDatabase(SomeType value) { + return value == null ? null : value.toString(); + } + + @Override + public SomeType fromDatabase(Object value) { + return value == null ? null : SomeType.parse(value.toString()); + } +}); +``` diff --git a/docs/ru/overview.md b/docs/ru/overview.md new file mode 100644 index 0000000..ec42ff6 --- /dev/null +++ b/docs/ru/overview.md @@ -0,0 +1,34 @@ +# Overview (RU) + +Database - SQL plugin for Lumi with MySQL/PostgreSQL, HikariCP pooling, +schema helpers, raw SQL, and a lightweight ORM with fluent query builder. + +## Установка + +1. Соберите jar через Gradle. +2. Поместите jar в папку plugins сервера. +3. Запустите сервер один раз для генерации `config.yml`. + +## Доступ к API + +```java +import com.andrewkydev.database.DatabaseProvider; +import com.andrewkydev.database.DatabaseApi; + +DatabaseApi api = DatabaseProvider.get(); +``` + +## Основные возможности + +### Auto ID после insert + +Если у поля есть `@DbId(autoIncrement = true)` и id пустой (0 или null), +`insert()` получает generated keys и прописывает id в объект. + +### Snake case по умолчанию + +Если не задано `@DbEntity`/`@DbColumn`, имена берутся в snake_case. + +### Удобные ORM методы + +Доступны `findOneWhere`, `deleteWhere` и `findWhere` с сортировкой/лимитом/offset. diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000..1b33c55 Binary files /dev/null and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000..ca025c8 --- /dev/null +++ b/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,7 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-8.14-bin.zip +networkTimeout=10000 +validateDistributionUrl=true +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew new file mode 100644 index 0000000..23d15a9 --- /dev/null +++ b/gradlew @@ -0,0 +1,251 @@ +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 +# + +############################################################################## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# +############################################################################## + +# Attempt to set APP_HOME + +# Resolve links: $0 may be a link +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac +done + +# This is normally unused +# shellcheck disable=SC2034 +APP_BASE_NAME=${0##*/} +# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) +APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD=maximum + +warn () { + echo "$*" +} >&2 + +die () { + echo + echo "$*" + echo + exit 1 +} >&2 + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; +esac + +CLASSPATH="\\\"\\\"" + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD=$JAVA_HOME/jre/sh/java + else + JAVACMD=$JAVA_HOME/bin/java + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD=java + if ! command -v java >/dev/null 2>&1 + then + die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +fi + +# Increase the maximum file descriptors if we can. +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac +fi + +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + + # Now convert the arguments - kludge to limit ourselves to /bin/sh + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) + fi + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg + done +fi + + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Collect all arguments for the java command: +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, +# and any embedded shellness will be escaped. +# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be +# treated as '${Hostname}' itself on the command line. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + -jar "$APP_HOME/gradle/wrapper/gradle-wrapper.jar" \ + "$@" + +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + +exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat new file mode 100644 index 0000000..db3a6ac --- /dev/null +++ b/gradlew.bat @@ -0,0 +1,94 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem +@rem SPDX-License-Identifier: Apache-2.0 +@rem + +@if "%DEBUG%"=="" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if %ERRORLEVEL% equ 0 goto execute + +echo. 1>&2 +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. 1>&2 +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH= + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" -jar "%APP_HOME%\gradle\wrapper\gradle-wrapper.jar" %* + +:end +@rem End local scope for the variables with windows NT shell +if %ERRORLEVEL% equ 0 goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/settings.gradle.kts b/settings.gradle.kts new file mode 100644 index 0000000..5522c4c --- /dev/null +++ b/settings.gradle.kts @@ -0,0 +1 @@ +rootProject.name = "Database" \ No newline at end of file diff --git a/src/main/java/com/andrewkydev/Loader.java b/src/main/java/com/andrewkydev/Loader.java new file mode 100644 index 0000000..de0316f --- /dev/null +++ b/src/main/java/com/andrewkydev/Loader.java @@ -0,0 +1,62 @@ +package com.andrewkydev; + +import cn.nukkit.plugin.PluginBase; +import com.andrewkydev.database.DatabaseApi; +import com.andrewkydev.database.DatabaseProvider; +import com.andrewkydev.database.config.DatabaseConfig; +import com.andrewkydev.database.config.DatabaseConfigLoader; +import com.andrewkydev.database.internal.DatabaseImpl; +import com.andrewkydev.database.internal.JdbcUrlBuilder; +import com.andrewkydev.database.schema.SqlDialect; +import com.zaxxer.hikari.HikariConfig; +import com.zaxxer.hikari.HikariDataSource; + +public class Loader extends PluginBase { + private DatabaseApi databaseApi; + private HikariDataSource dataSource; + + @Override + public void onLoad() { + super.onLoad(); + } + + @Override + public void onEnable() { + super.onEnable(); + saveDefaultConfig(); + reloadConfig(); + + DatabaseConfig config = DatabaseConfigLoader.load(this); + HikariConfig hikariConfig = new HikariConfig(); + hikariConfig.setJdbcUrl(JdbcUrlBuilder.build(config.dialect(), config.host(), config.port(), config.database())); + hikariConfig.setUsername(config.username()); + hikariConfig.setPassword(config.password()); + hikariConfig.setDriverClassName(driverClassName(config.dialect())); + hikariConfig.setMaximumPoolSize(config.pool().maxPoolSize()); + hikariConfig.setMinimumIdle(config.pool().minIdle()); + hikariConfig.setConnectionTimeout(config.pool().connectionTimeoutMs()); + hikariConfig.setIdleTimeout(config.pool().idleTimeoutMs()); + hikariConfig.setMaxLifetime(config.pool().maxLifetimeMs()); + + dataSource = new HikariDataSource(hikariConfig); + databaseApi = new DatabaseImpl(dataSource, config); + DatabaseProvider.set(databaseApi); + } + + @Override + public void onDisable() { + super.onDisable(); + if (databaseApi != null) { + databaseApi.close(); + databaseApi = null; + } + dataSource = null; + DatabaseProvider.set(null); + } + + private String driverClassName(SqlDialect dialect) { + return dialect == SqlDialect.POSTGRESQL + ? "org.postgresql.Driver" + : "com.mysql.cj.jdbc.Driver"; + } +} diff --git a/src/main/java/com/andrewkydev/database/DatabaseApi.java b/src/main/java/com/andrewkydev/database/DatabaseApi.java new file mode 100644 index 0000000..874d5bf --- /dev/null +++ b/src/main/java/com/andrewkydev/database/DatabaseApi.java @@ -0,0 +1,23 @@ +package com.andrewkydev.database; + +import com.andrewkydev.database.query.QueryRunner; +import com.andrewkydev.database.query.Transaction; +import com.andrewkydev.database.schema.Schema; +import com.andrewkydev.database.orm.EntityManager; +import java.util.concurrent.CompletableFuture; + +public interface DatabaseApi extends AutoCloseable { + + Schema schema(); + + QueryRunner query(); + + EntityManager orm(); + + Transaction beginTransaction(); + + CompletableFuture beginTransactionAsync(); + + @Override + void close(); +} diff --git a/src/main/java/com/andrewkydev/database/DatabaseProvider.java b/src/main/java/com/andrewkydev/database/DatabaseProvider.java new file mode 100644 index 0000000..438568d --- /dev/null +++ b/src/main/java/com/andrewkydev/database/DatabaseProvider.java @@ -0,0 +1,20 @@ +package com.andrewkydev.database; + +public final class DatabaseProvider { + private static volatile DatabaseApi api; + + private DatabaseProvider() { + } + + public static void set(DatabaseApi api) { + DatabaseProvider.api = api; + } + + public static DatabaseApi get() { + DatabaseApi current = api; + if (current == null) { + throw new IllegalStateException("DatabaseApi is not initialized"); + } + return current; + } +} diff --git a/src/main/java/com/andrewkydev/database/config/DatabaseConfig.java b/src/main/java/com/andrewkydev/database/config/DatabaseConfig.java new file mode 100644 index 0000000..4798257 --- /dev/null +++ b/src/main/java/com/andrewkydev/database/config/DatabaseConfig.java @@ -0,0 +1,115 @@ +package com.andrewkydev.database.config; + +import com.andrewkydev.database.schema.SqlDialect; + +public final class DatabaseConfig { + private final SqlDialect dialect; + private final String host; + private final int port; + private final String database; + private final String username; + private final String password; + private final String adminDatabase; + private final PoolConfig pool; + private final boolean autoTransactions; + + public DatabaseConfig( + SqlDialect dialect, + String host, + int port, + String database, + String username, + String password, + String adminDatabase, + PoolConfig pool, + boolean autoTransactions + ) { + this.dialect = dialect; + this.host = host; + this.port = port; + this.database = database; + this.username = username; + this.password = password; + this.adminDatabase = adminDatabase; + this.pool = pool; + this.autoTransactions = autoTransactions; + } + + public SqlDialect dialect() { + return dialect; + } + + public String host() { + return host; + } + + public int port() { + return port; + } + + public String database() { + return database; + } + + public String username() { + return username; + } + + public String password() { + return password; + } + + public String adminDatabase() { + return adminDatabase; + } + + public PoolConfig pool() { + return pool; + } + + public boolean autoTransactions() { + return autoTransactions; + } + + public static final class PoolConfig { + private final int maxPoolSize; + private final int minIdle; + private final long connectionTimeoutMs; + private final long idleTimeoutMs; + private final long maxLifetimeMs; + + public PoolConfig( + int maxPoolSize, + int minIdle, + long connectionTimeoutMs, + long idleTimeoutMs, + long maxLifetimeMs + ) { + this.maxPoolSize = maxPoolSize; + this.minIdle = minIdle; + this.connectionTimeoutMs = connectionTimeoutMs; + this.idleTimeoutMs = idleTimeoutMs; + this.maxLifetimeMs = maxLifetimeMs; + } + + public int maxPoolSize() { + return maxPoolSize; + } + + public int minIdle() { + return minIdle; + } + + public long connectionTimeoutMs() { + return connectionTimeoutMs; + } + + public long idleTimeoutMs() { + return idleTimeoutMs; + } + + public long maxLifetimeMs() { + return maxLifetimeMs; + } + } +} diff --git a/src/main/java/com/andrewkydev/database/config/DatabaseConfigLoader.java b/src/main/java/com/andrewkydev/database/config/DatabaseConfigLoader.java new file mode 100644 index 0000000..96d45e9 --- /dev/null +++ b/src/main/java/com/andrewkydev/database/config/DatabaseConfigLoader.java @@ -0,0 +1,56 @@ +package com.andrewkydev.database.config; + +import cn.nukkit.plugin.PluginBase; +import cn.nukkit.utils.Config; +import cn.nukkit.utils.ConfigSection; +import com.andrewkydev.database.schema.SqlDialect; + +public final class DatabaseConfigLoader { + + private DatabaseConfigLoader() { + } + + public static DatabaseConfig load(PluginBase plugin) { + Config config = plugin.getConfig(); + + String dialectValue = config.getString("driver", "mysql"); + SqlDialect dialect = "postgres".equalsIgnoreCase(dialectValue) + ? SqlDialect.POSTGRESQL + : SqlDialect.MYSQL; + + String host = config.getString("host", "localhost"); + int port = config.getInt("port", dialect == SqlDialect.POSTGRESQL ? 5432 : 3306); + String database = config.getString("database", "primalix"); + String username = config.getString("username", "root"); + String password = config.getString("password", ""); + String adminDatabase = config.getString("adminDatabase", "postgres"); + boolean autoTransactions = config.getBoolean("autoTransactions", true); + + ConfigSection poolConfig = config.getSection("pool"); + int maxPoolSize = poolConfig.getInt("maxPoolSize", 10); + int minIdle = poolConfig.getInt("minIdle", 2); + long connectionTimeoutMs = poolConfig.getLong("connectionTimeoutMs", 30_000); + long idleTimeoutMs = poolConfig.getLong("idleTimeoutMs", 600_000); + long maxLifetimeMs = poolConfig.getLong("maxLifetimeMs", 1_800_000); + + DatabaseConfig.PoolConfig pool = new DatabaseConfig.PoolConfig( + maxPoolSize, + minIdle, + connectionTimeoutMs, + idleTimeoutMs, + maxLifetimeMs + ); + + return new DatabaseConfig( + dialect, + host, + port, + database, + username, + password, + adminDatabase, + pool, + autoTransactions + ); + } +} diff --git a/src/main/java/com/andrewkydev/database/internal/DatabaseImpl.java b/src/main/java/com/andrewkydev/database/internal/DatabaseImpl.java new file mode 100644 index 0000000..f99e41d --- /dev/null +++ b/src/main/java/com/andrewkydev/database/internal/DatabaseImpl.java @@ -0,0 +1,75 @@ +package com.andrewkydev.database.internal; + +import com.andrewkydev.database.DatabaseApi; +import com.andrewkydev.database.config.DatabaseConfig; +import com.andrewkydev.database.query.QueryRunner; +import com.andrewkydev.database.query.Transaction; +import com.andrewkydev.database.schema.Schema; +import com.andrewkydev.database.orm.EntityManager; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import javax.sql.DataSource; + +public final class DatabaseImpl implements DatabaseApi { + private final DataSource dataSource; + private final DatabaseConfig config; + private final ExecutorService executor; + private final Schema schema; + private final QueryRunner queryRunner; + private final EntityManager entityManager; + + public DatabaseImpl(DataSource dataSource, DatabaseConfig config) { + this.dataSource = dataSource; + this.config = config; + this.executor = Executors.newFixedThreadPool(Math.max(2, config.pool().maxPoolSize() / 2)); + this.schema = new SchemaImpl(dataSource, config, executor); + this.queryRunner = new JdbcQueryRunner(dataSource, executor); + this.entityManager = new EntityManagerImpl(dataSource, executor); + } + + @Override + public Schema schema() { + return schema; + } + + @Override + public QueryRunner query() { + return queryRunner; + } + + @Override + public EntityManager orm() { + return entityManager; + } + + @Override + public Transaction beginTransaction() { + try { + Connection connection = dataSource.getConnection(); + connection.setAutoCommit(false); + return new JdbcTransaction(connection, executor); + } catch (SQLException ex) { + throw new IllegalStateException("Failed to start transaction", ex); + } + } + + @Override + public CompletableFuture beginTransactionAsync() { + return CompletableFuture.supplyAsync(this::beginTransaction, executor); + } + + @Override + public void close() { + executor.shutdown(); + if (dataSource instanceof AutoCloseable) { + try { + ((AutoCloseable) dataSource).close(); + } catch (Exception ex) { + throw new IllegalStateException("Failed to close datasource", ex); + } + } + } +} diff --git a/src/main/java/com/andrewkydev/database/internal/EntityManagerImpl.java b/src/main/java/com/andrewkydev/database/internal/EntityManagerImpl.java new file mode 100644 index 0000000..176bb6c --- /dev/null +++ b/src/main/java/com/andrewkydev/database/internal/EntityManagerImpl.java @@ -0,0 +1,539 @@ +package com.andrewkydev.database.internal; + +import com.andrewkydev.database.orm.EntityManager; +import com.andrewkydev.database.orm.EntityQuery; +import com.andrewkydev.database.orm.TypeAdapter; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import java.lang.reflect.Constructor; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.Executor; +import java.util.UUID; +import javax.sql.DataSource; + +public final class EntityManagerImpl implements EntityManager { + private final DataSource dataSource; + private final Executor executor; + private final Map, EntityMetadata> metadataCache = new ConcurrentHashMap<>(); + private final Map, TypeAdapter> adapters = new ConcurrentHashMap<>(); + private final Gson gson = new GsonBuilder().create(); + + public EntityManagerImpl(DataSource dataSource, Executor executor) { + this.dataSource = dataSource; + this.executor = executor; + registerDefaults(); + } + + @Override + public void insert(T entity) { + EntityMetadata metadata = metadata(entity.getClass()); + List columns = new ArrayList<>(); + List values = new ArrayList<>(); + EntityMetadata.FieldMapping idField = metadata.idFieldOrNull(); + boolean generateId = false; + + for (EntityMetadata.FieldMapping mapping : metadata.fields()) { + Object value = getValue(mapping, entity); + if (mapping.isId() && mapping.autoIncrement() && !hasExplicitValue(value)) { + generateId = true; + continue; + } + columns.add(mapping); + values.add(value); + } + + StringBuilder sql = new StringBuilder(); + sql.append("INSERT INTO ").append(metadata.table()).append(" ("); + for (int i = 0; i < columns.size(); i++) { + if (i > 0) { + sql.append(", "); + } + sql.append(columns.get(i).column()); + } + sql.append(") VALUES ("); + for (int i = 0; i < columns.size(); i++) { + if (i > 0) { + sql.append(", "); + } + sql.append("?"); + } + sql.append(")"); + + if (generateId && idField != null) { + Object generatedId = executeInsert(sql.toString(), values); + if (generatedId != null) { + setValue(idField, entity, generatedId); + } + } else { + executeUpdate(sql.toString(), values); + } + } + + @Override + public void update(T entity) { + EntityMetadata metadata = metadata(entity.getClass()); + EntityMetadata.FieldMapping idField = metadata.idField(); + Object idValue = getValue(idField, entity); + if (!hasExplicitValue(idValue)) { + throw new IllegalStateException("Entity id is required for update"); + } + + List params = new ArrayList<>(); + StringBuilder sql = new StringBuilder(); + sql.append("UPDATE ").append(metadata.table()).append(" SET "); + + boolean first = true; + for (EntityMetadata.FieldMapping mapping : metadata.fields()) { + if (mapping.isId()) { + continue; + } + if (!first) { + sql.append(", "); + } + first = false; + sql.append(mapping.column()).append("=?"); + params.add(getValue(mapping, entity)); + } + sql.append(" WHERE ").append(idField.column()).append("=?"); + params.add(idValue); + + executeUpdate(sql.toString(), params); + } + + @Override + public void delete(T entity) { + EntityMetadata metadata = metadata(entity.getClass()); + EntityMetadata.FieldMapping idField = metadata.idField(); + Object idValue = getValue(idField, entity); + if (!hasExplicitValue(idValue)) { + throw new IllegalStateException("Entity id is required for delete"); + } + String sql = "DELETE FROM " + metadata.table() + " WHERE " + idField.column() + "=?"; + executeUpdate(sql, Collections.singletonList(idValue)); + } + + @Override + public T findById(Class type, Object id) { + EntityMetadata metadata = metadata(type); + String sql = "SELECT * FROM " + metadata.table() + " WHERE " + metadata.idField().column() + "=?"; + List result = query(sql, Collections.singletonList(id), type); + return result.isEmpty() ? null : result.get(0); + } + + @Override + public List findAll(Class type) { + EntityMetadata metadata = metadata(type); + String sql = "SELECT * FROM " + metadata.table(); + return query(sql, Collections.emptyList(), type); + } + + @Override + public T findOneWhere(Class type, String where, List params) { + List results = findWhere(type, where, params, null, 1, null); + return results.isEmpty() ? null : results.get(0); + } + + @Override + public List findWhere(Class type, String where, List params) { + EntityMetadata metadata = metadata(type); + String sql = "SELECT * FROM " + metadata.table() + whereClause(where); + return query(sql, normalizeParams(params), type); + } + + @Override + public long count(Class type) { + return count(type, "", Collections.emptyList()); + } + + @Override + public long count(Class type, String where, List params) { + EntityMetadata metadata = metadata(type); + String sql = "SELECT COUNT(*) FROM " + metadata.table() + whereClause(where); + return queryCount(sql, normalizeParams(params)); + } + + @Override + public boolean exists(Class type, String where, List params) { + EntityMetadata metadata = metadata(type); + String sql = "SELECT 1 FROM " + metadata.table() + whereClause(where) + " LIMIT 1"; + List result = queryScalar(sql, normalizeParams(params)); + return !result.isEmpty(); + } + + @Override + public int deleteWhere(Class type, String where, List params) { + if (where == null || where.trim().isEmpty()) { + throw new IllegalStateException("deleteWhere requires a WHERE clause"); + } + EntityMetadata metadata = metadata(type); + String sql = "DELETE FROM " + metadata.table() + whereClause(where); + return executeUpdate(sql, normalizeParams(params)); + } + + @Override + public List findWhere( + Class type, + String where, + List params, + String orderBy, + Integer limit, + Integer offset + ) { + EntityMetadata metadata = metadata(type); + String sql = "SELECT * FROM " + metadata.table() + + whereClause(where) + + orderByClause(orderBy) + + limitClause(limit, offset); + return query(sql, normalizeParams(params), type); + } + + @Override + public CompletableFuture> findWhereAsync( + Class type, + String where, + List params, + String orderBy, + Integer limit, + Integer offset + ) { + return CompletableFuture.supplyAsync( + () -> findWhere(type, where, params, orderBy, limit, offset), + executor + ); + } + + @Override + public void registerAdapter(Class type, TypeAdapter adapter) { + adapters.put(type, adapter); + } + + @Override + public EntityQuery query(Class type) { + return new EntityQueryImpl<>(this, type); + } + + @Override + public CompletableFuture insertAsync(T entity) { + return CompletableFuture.runAsync(() -> insert(entity), executor); + } + + @Override + public CompletableFuture updateAsync(T entity) { + return CompletableFuture.runAsync(() -> update(entity), executor); + } + + @Override + public CompletableFuture deleteAsync(T entity) { + return CompletableFuture.runAsync(() -> delete(entity), executor); + } + + @Override + public CompletableFuture findByIdAsync(Class type, Object id) { + return CompletableFuture.supplyAsync(() -> findById(type, id), executor); + } + + @Override + public CompletableFuture> findAllAsync(Class type) { + return CompletableFuture.supplyAsync(() -> findAll(type), executor); + } + + @Override + public CompletableFuture findOneWhereAsync(Class type, String where, List params) { + return CompletableFuture.supplyAsync(() -> findOneWhere(type, where, params), executor); + } + + @Override + public CompletableFuture> findWhereAsync(Class type, String where, List params) { + return CompletableFuture.supplyAsync(() -> findWhere(type, where, params), executor); + } + + @Override + public CompletableFuture countAsync(Class type) { + return CompletableFuture.supplyAsync(() -> count(type), executor); + } + + @Override + public CompletableFuture countAsync(Class type, String where, List params) { + return CompletableFuture.supplyAsync(() -> count(type, where, params), executor); + } + + @Override + public CompletableFuture existsAsync(Class type, String where, List params) { + return CompletableFuture.supplyAsync(() -> exists(type, where, params), executor); + } + + @Override + public CompletableFuture deleteWhereAsync(Class type, String where, List params) { + return CompletableFuture.supplyAsync(() -> deleteWhere(type, where, params), executor); + } + + private EntityMetadata metadata(Class type) { + return metadataCache.computeIfAbsent(type, EntityMetadata::resolve); + } + + private List query(String sql, List params, Class type) { + try (Connection connection = dataSource.getConnection(); + PreparedStatement statement = connection.prepareStatement(sql)) { + JdbcSupport.bindParams(statement, params); + try (ResultSet resultSet = statement.executeQuery()) { + Set columns = resolveColumns(resultSet); + List results = new ArrayList<>(); + while (resultSet.next()) { + results.add(mapRow(resultSet, type, columns)); + } + return results; + } + } catch (SQLException ex) { + throw new IllegalStateException("Failed to query SQL: " + sql, ex); + } + } + + List queryCustom(String sql, List params, Class type) { + return query(sql, normalizeParams(params), type); + } + + long queryCountCustom(String sql, List params) { + return queryCount(sql, normalizeParams(params)); + } + + boolean queryExistsCustom(String sql, List params) { + List result = queryScalar(sql, normalizeParams(params)); + return !result.isEmpty(); + } + + Executor executor() { + return executor; + } + + String tableFor(Class type) { + return metadata(type).table(); + } + + private long queryCount(String sql, List params) { + try (Connection connection = dataSource.getConnection(); + PreparedStatement statement = connection.prepareStatement(sql)) { + JdbcSupport.bindParams(statement, params); + try (ResultSet resultSet = statement.executeQuery()) { + return resultSet.next() ? resultSet.getLong(1) : 0L; + } + } catch (SQLException ex) { + throw new IllegalStateException("Failed to query SQL: " + sql, ex); + } + } + + private List queryScalar(String sql, List params) { + try (Connection connection = dataSource.getConnection(); + PreparedStatement statement = connection.prepareStatement(sql)) { + JdbcSupport.bindParams(statement, params); + try (ResultSet resultSet = statement.executeQuery()) { + List results = new ArrayList<>(); + while (resultSet.next()) { + results.add(resultSet.getInt(1)); + } + return results; + } + } catch (SQLException ex) { + throw new IllegalStateException("Failed to query SQL: " + sql, ex); + } + } + + private int executeUpdate(String sql, List params) { + try (Connection connection = dataSource.getConnection(); + PreparedStatement statement = connection.prepareStatement(sql)) { + JdbcSupport.bindParams(statement, params); + return statement.executeUpdate(); + } catch (SQLException ex) { + throw new IllegalStateException("Failed to execute SQL: " + sql, ex); + } + } + + private Object executeInsert(String sql, List params) { + try (Connection connection = dataSource.getConnection(); + PreparedStatement statement = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS)) { + JdbcSupport.bindParams(statement, params); + statement.executeUpdate(); + try (ResultSet resultSet = statement.getGeneratedKeys()) { + if (resultSet.next()) { + return resultSet.getObject(1); + } + } + return null; + } catch (SQLException ex) { + throw new IllegalStateException("Failed to execute SQL: " + sql, ex); + } + } + + private String whereClause(String where) { + if (where == null || where.trim().isEmpty()) { + return ""; + } + return " WHERE " + where; + } + + private String orderByClause(String orderBy) { + if (orderBy == null || orderBy.trim().isEmpty()) { + return ""; + } + return " ORDER BY " + orderBy; + } + + private String limitClause(Integer limit, Integer offset) { + if (limit == null) { + return ""; + } + if (offset == null) { + return " LIMIT " + limit; + } + return " LIMIT " + limit + " OFFSET " + offset; + } + + private List normalizeParams(List params) { + if (params == null) { + return Collections.emptyList(); + } + return params; + } + + private boolean hasExplicitValue(Object value) { + if (value == null) { + return false; + } + if (value instanceof Number) { + return ((Number) value).longValue() != 0L; + } + return true; + } + + private Object getValue(EntityMetadata.FieldMapping mapping, T entity) { + try { + Object value = mapping.field().get(entity); + if (value == null) { + return null; + } + if (mapping.json()) { + return gson.toJson(value, mapping.field().getGenericType()); + } + TypeAdapter adapter = adapterFor(mapping.field().getType()); + if (adapter != null) { + return adapter.toDatabase(value); + } + return value; + } catch (IllegalAccessException ex) { + throw new IllegalStateException("Failed to access field " + mapping.field().getName(), ex); + } + } + + private void setValue(EntityMetadata.FieldMapping mapping, T entity, Object value) { + try { + Class targetType = mapping.field().getType(); + Object coerced; + if (mapping.json()) { + coerced = gson.fromJson(value == null ? "null" : value.toString(), mapping.field().getGenericType()); + } else { + TypeAdapter adapter = adapterFor(targetType); + coerced = adapter == null ? coerceValue(targetType, value) : adapter.fromDatabase(value); + } + mapping.field().set(entity, coerced); + } catch (IllegalAccessException ex) { + throw new IllegalStateException("Failed to set field " + mapping.field().getName(), ex); + } + } + + private Object coerceValue(Class targetType, Object value) { + if (value == null) { + return null; + } + if (targetType.isAssignableFrom(value.getClass())) { + return value; + } + if (targetType == long.class || targetType == Long.class) { + return ((Number) value).longValue(); + } + if (targetType == int.class || targetType == Integer.class) { + return ((Number) value).intValue(); + } + if (targetType == short.class || targetType == Short.class) { + return ((Number) value).shortValue(); + } + if (targetType == String.class) { + return value.toString(); + } + return value; + } + + @SuppressWarnings("unchecked") + private TypeAdapter adapterFor(Class type) { + return (TypeAdapter) adapters.get(type); + } + + private T mapRow(ResultSet resultSet, Class type, Set columns) { + EntityMetadata metadata = metadata(type); + T instance = instantiate(type); + for (EntityMetadata.FieldMapping mapping : metadata.fields()) { + if (!columns.contains(mapping.column().toLowerCase())) { + continue; + } + try { + Object value = resultSet.getObject(mapping.column()); + if (value == null && mapping.field().getType().isPrimitive()) { + continue; + } + setValue(mapping, instance, value); + } catch (SQLException ex) { + throw new IllegalStateException("Failed to read column " + mapping.column(), ex); + } + } + return instance; + } + + private Set resolveColumns(ResultSet resultSet) throws SQLException { + Set columns = new HashSet<>(); + int count = resultSet.getMetaData().getColumnCount(); + for (int i = 1; i <= count; i++) { + String label = resultSet.getMetaData().getColumnLabel(i); + if (label != null) { + columns.add(label.toLowerCase()); + } + } + return columns; + } + + private T instantiate(Class type) { + try { + Constructor constructor = type.getDeclaredConstructor(); + constructor.setAccessible(true); + return constructor.newInstance(); + } catch (Exception ex) { + throw new IllegalStateException("Entity must have a no-arg constructor: " + type.getName(), ex); + } + } + + private void registerDefaults() { + registerAdapter(UUID.class, new TypeAdapter() { + @Override + public Object toDatabase(UUID value) { + return value == null ? null : value.toString(); + } + + @Override + public UUID fromDatabase(Object value) { + if (value == null) { + return null; + } + return UUID.fromString(value.toString()); + } + }); + } +} diff --git a/src/main/java/com/andrewkydev/database/internal/EntityMetadata.java b/src/main/java/com/andrewkydev/database/internal/EntityMetadata.java new file mode 100644 index 0000000..56e0674 --- /dev/null +++ b/src/main/java/com/andrewkydev/database/internal/EntityMetadata.java @@ -0,0 +1,149 @@ +package com.andrewkydev.database.internal; + +import com.andrewkydev.database.orm.DbColumn; +import com.andrewkydev.database.orm.DbEntity; +import com.andrewkydev.database.orm.DbId; +import com.andrewkydev.database.orm.DbJson; +import com.andrewkydev.database.orm.DbTransient; +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; +import java.util.ArrayList; +import java.util.List; + +final class EntityMetadata { + private final String table; + private final List fields; + private final FieldMapping idField; + + private EntityMetadata(String table, List fields, FieldMapping idField) { + this.table = table; + this.fields = fields; + this.idField = idField; + } + + static EntityMetadata resolve(Class type) { + String table = resolveTable(type); + List fields = new ArrayList<>(); + FieldMapping idField = null; + + Class current = type; + while (current != null && current != Object.class) { + for (Field field : current.getDeclaredFields()) { + if (Modifier.isStatic(field.getModifiers())) { + continue; + } + if (field.isAnnotationPresent(DbTransient.class)) { + continue; + } + String columnName = resolveColumnName(field); + DbId id = field.getAnnotation(DbId.class); + DbJson json = field.getAnnotation(DbJson.class); + boolean isId = id != null; + boolean autoIncrement = id != null && id.autoIncrement(); + boolean jsonField = json != null; + FieldMapping mapping = new FieldMapping(field, columnName, isId, autoIncrement, jsonField); + fields.add(mapping); + if (isId) { + if (idField != null) { + throw new IllegalStateException("Multiple @DbId fields found for " + type.getName()); + } + idField = mapping; + } + } + current = current.getSuperclass(); + } + + if (fields.isEmpty()) { + throw new IllegalStateException("No mappable fields found for " + type.getName()); + } + return new EntityMetadata(table, fields, idField); + } + + String table() { + return table; + } + + List fields() { + return fields; + } + + FieldMapping idField() { + if (idField == null) { + throw new IllegalStateException("No @DbId field defined for entity table " + table); + } + return idField; + } + + FieldMapping idFieldOrNull() { + return idField; + } + + private static String resolveTable(Class type) { + DbEntity entity = type.getAnnotation(DbEntity.class); + if (entity != null && !entity.table().isEmpty()) { + return entity.table(); + } + return toSnakeCase(type.getSimpleName()); + } + + private static String resolveColumnName(Field field) { + DbColumn column = field.getAnnotation(DbColumn.class); + if (column != null && !column.name().isEmpty()) { + return column.name(); + } + return toSnakeCase(field.getName()); + } + + private static String toSnakeCase(String value) { + StringBuilder builder = new StringBuilder(); + for (int i = 0; i < value.length(); i++) { + char c = value.charAt(i); + if (Character.isUpperCase(c)) { + if (i > 0) { + builder.append('_'); + } + builder.append(Character.toLowerCase(c)); + } else { + builder.append(c); + } + } + return builder.toString(); + } + + static final class FieldMapping { + private final Field field; + private final String column; + private final boolean id; + private final boolean autoIncrement; + private final boolean json; + + FieldMapping(Field field, String column, boolean id, boolean autoIncrement, boolean json) { + this.field = field; + this.column = column; + this.id = id; + this.autoIncrement = autoIncrement; + this.json = json; + this.field.setAccessible(true); + } + + Field field() { + return field; + } + + String column() { + return column; + } + + boolean isId() { + return id; + } + + boolean autoIncrement() { + return autoIncrement; + } + + boolean json() { + return json; + } + } +} diff --git a/src/main/java/com/andrewkydev/database/internal/EntityQueryImpl.java b/src/main/java/com/andrewkydev/database/internal/EntityQueryImpl.java new file mode 100644 index 0000000..5f7cd5a --- /dev/null +++ b/src/main/java/com/andrewkydev/database/internal/EntityQueryImpl.java @@ -0,0 +1,329 @@ +package com.andrewkydev.database.internal; + +import com.andrewkydev.database.orm.EntityQuery; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import com.andrewkydev.database.orm.Condition; + +final class EntityQueryImpl implements EntityQuery { + private final EntityManagerImpl manager; + private final Class type; + private List selectColumns; + private String joinSql; + private String where; + private List params; + private String orderBy; + private String groupBy; + private String having; + private List havingParams; + private Integer limit; + private Integer offset; + + EntityQueryImpl(EntityManagerImpl manager, Class type) { + this.manager = manager; + this.type = type; + } + + @Override + public EntityQuery select(String... columns) { + if (columns == null || columns.length == 0) { + this.selectColumns = null; + return this; + } + this.selectColumns = new ArrayList<>(Arrays.asList(columns)); + return this; + } + + @Override + public EntityQuery join(String joinSql) { + this.joinSql = joinSql; + return this; + } + + @Override + public EntityQuery where(String where, List params) { + this.where = where; + this.params = params == null ? null : new ArrayList<>(params); + return this; + } + + @Override + public EntityQuery where(String where, Object... params) { + this.where = where; + if (params == null || params.length == 0) { + this.params = null; + } else { + this.params = new ArrayList<>(Arrays.asList(params)); + } + return this; + } + + @Override + public EntityQuery where(Condition condition) { + if (condition == null) { + this.where = null; + this.params = null; + return this; + } + this.where = condition.sql(); + this.params = new ArrayList<>(condition.params()); + return this; + } + + @Override + public EntityQuery and(Condition condition) { + if (condition == null) { + return this; + } + if (this.where == null || this.where.trim().isEmpty()) { + return where(condition); + } + this.where = "(" + this.where + " AND " + condition.sql() + ")"; + if (this.params == null) { + this.params = new ArrayList<>(); + } + this.params.addAll(condition.params()); + return this; + } + + @Override + public EntityQuery or(Condition condition) { + if (condition == null) { + return this; + } + if (this.where == null || this.where.trim().isEmpty()) { + return where(condition); + } + this.where = "(" + this.where + " OR " + condition.sql() + ")"; + if (this.params == null) { + this.params = new ArrayList<>(); + } + this.params.addAll(condition.params()); + return this; + } + + @Override + public EntityQuery orderBy(String orderBy) { + this.orderBy = orderBy; + return this; + } + + @Override + public EntityQuery groupBy(String groupBy) { + this.groupBy = groupBy; + return this; + } + + @Override + public EntityQuery having(String having, List params) { + this.having = having; + this.havingParams = params == null ? null : new ArrayList<>(params); + return this; + } + + @Override + public EntityQuery having(String having, Object... params) { + this.having = having; + if (params == null || params.length == 0) { + this.havingParams = null; + } else { + this.havingParams = new ArrayList<>(Arrays.asList(params)); + } + return this; + } + + @Override + public EntityQuery having(Condition condition) { + if (condition == null) { + this.having = null; + this.havingParams = null; + return this; + } + this.having = condition.sql(); + this.havingParams = new ArrayList<>(condition.params()); + return this; + } + + @Override + public EntityQuery limit(int limit) { + this.limit = limit; + return this; + } + + @Override + public EntityQuery offset(int offset) { + this.offset = offset; + return this; + } + + @Override + public EntityQuery limit(int limit, int offset) { + this.limit = limit; + this.offset = offset; + return this; + } + + @Override + public List list() { + return manager.queryCustom( + buildSelectSql(), + mergeParams(), + type + ); + } + + @Override + public T one() { + List results = manager.queryCustom( + buildSelectSqlWithLimit(1), + mergeParams(), + type + ); + return results.isEmpty() ? null : results.get(0); + } + + @Override + public long count() { + return manager.queryCountCustom(buildCountSql(), mergeParams()); + } + + @Override + public boolean exists() { + return manager.queryExistsCustom(buildExistsSql(), mergeParams()); + } + + @Override + public int delete() { + if (joinSql != null || groupBy != null || having != null) { + throw new IllegalStateException("delete does not support join/group/having"); + } + return manager.deleteWhere(type, where, params); + } + + @Override + public CompletableFuture> listAsync() { + return CompletableFuture.supplyAsync(this::list, manager.executor()); + } + + @Override + public CompletableFuture oneAsync() { + return CompletableFuture.supplyAsync(this::one, manager.executor()); + } + + @Override + public CompletableFuture countAsync() { + return CompletableFuture.supplyAsync(this::count, manager.executor()); + } + + @Override + public CompletableFuture existsAsync() { + return CompletableFuture.supplyAsync(this::exists, manager.executor()); + } + + @Override + public CompletableFuture deleteAsync() { + return CompletableFuture.supplyAsync(this::delete, manager.executor()); + } + + private String buildSelectSql() { + return buildSelectSqlWithLimit(null); + } + + private String buildSelectSqlWithLimit(Integer overrideLimit) { + String select = selectColumns == null || selectColumns.isEmpty() + ? "*" + : String.join(", ", selectColumns); + StringBuilder sql = new StringBuilder("SELECT "); + sql.append(select).append(" FROM ").append(manager.tableFor(type)); + if (joinSql != null && !joinSql.trim().isEmpty()) { + sql.append(" ").append(joinSql); + } + if (where != null && !where.trim().isEmpty()) { + sql.append(" WHERE ").append(where); + } + if (groupBy != null && !groupBy.trim().isEmpty()) { + sql.append(" GROUP BY ").append(groupBy); + } + if (having != null && !having.trim().isEmpty()) { + sql.append(" HAVING ").append(having); + } + if (orderBy != null && !orderBy.trim().isEmpty()) { + sql.append(" ORDER BY ").append(orderBy); + } + Integer effectiveLimit = overrideLimit == null ? limit : overrideLimit; + if (effectiveLimit != null) { + sql.append(" LIMIT ").append(effectiveLimit); + } + if (offset != null) { + if (effectiveLimit == null) { + sql.append(" LIMIT 2147483647"); + } + sql.append(" OFFSET ").append(offset); + } + return sql.toString(); + } + + private String buildCountSql() { + StringBuilder sql = new StringBuilder(); + if (groupBy == null || groupBy.trim().isEmpty()) { + sql.append("SELECT COUNT(*) FROM ").append(manager.tableFor(type)); + if (joinSql != null && !joinSql.trim().isEmpty()) { + sql.append(" ").append(joinSql); + } + if (where != null && !where.trim().isEmpty()) { + sql.append(" WHERE ").append(where); + } + if (having != null && !having.trim().isEmpty()) { + sql.append(" HAVING ").append(having); + } + return sql.toString(); + } + + sql.append("SELECT COUNT(*) FROM ("); + sql.append("SELECT 1 FROM ").append(manager.tableFor(type)); + if (joinSql != null && !joinSql.trim().isEmpty()) { + sql.append(" ").append(joinSql); + } + if (where != null && !where.trim().isEmpty()) { + sql.append(" WHERE ").append(where); + } + sql.append(" GROUP BY ").append(groupBy); + if (having != null && !having.trim().isEmpty()) { + sql.append(" HAVING ").append(having); + } + sql.append(") t"); + return sql.toString(); + } + + private String buildExistsSql() { + StringBuilder sql = new StringBuilder("SELECT 1 FROM "); + sql.append(manager.tableFor(type)); + if (joinSql != null && !joinSql.trim().isEmpty()) { + sql.append(" ").append(joinSql); + } + if (where != null && !where.trim().isEmpty()) { + sql.append(" WHERE ").append(where); + } + if (groupBy != null && !groupBy.trim().isEmpty()) { + sql.append(" GROUP BY ").append(groupBy); + } + if (having != null && !having.trim().isEmpty()) { + sql.append(" HAVING ").append(having); + } + sql.append(" LIMIT 1"); + return sql.toString(); + } + + private List mergeParams() { + List merged = new ArrayList<>(); + if (params != null) { + merged.addAll(params); + } + if (havingParams != null) { + merged.addAll(havingParams); + } + return merged; + } +} diff --git a/src/main/java/com/andrewkydev/database/internal/JdbcQueryRunner.java b/src/main/java/com/andrewkydev/database/internal/JdbcQueryRunner.java new file mode 100644 index 0000000..0b986b9 --- /dev/null +++ b/src/main/java/com/andrewkydev/database/internal/JdbcQueryRunner.java @@ -0,0 +1,82 @@ +package com.andrewkydev.database.internal; + +import com.andrewkydev.database.query.QueryRunner; +import com.andrewkydev.database.query.RowMapper; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.Executor; +import javax.sql.DataSource; + +public final class JdbcQueryRunner implements QueryRunner { + private final DataSource dataSource; + private final Executor executor; + + public JdbcQueryRunner(DataSource dataSource, Executor executor) { + this.dataSource = dataSource; + this.executor = executor; + } + + @Override + public int execute(String sql) { + return execute(sql, Collections.emptyList()); + } + + @Override + public int execute(String sql, List params) { + try (Connection connection = dataSource.getConnection(); + PreparedStatement statement = connection.prepareStatement(sql)) { + JdbcSupport.bindParams(statement, params); + return statement.executeUpdate(); + } catch (SQLException ex) { + throw new IllegalStateException("Failed to execute SQL: " + sql, ex); + } + } + + @Override + public List query(String sql, RowMapper mapper) { + return query(sql, Collections.emptyList(), mapper); + } + + @Override + public List query(String sql, List params, RowMapper mapper) { + try (Connection connection = dataSource.getConnection(); + PreparedStatement statement = connection.prepareStatement(sql)) { + JdbcSupport.bindParams(statement, params); + try (ResultSet resultSet = statement.executeQuery()) { + List results = new ArrayList<>(); + while (resultSet.next()) { + results.add(mapper.map(resultSet)); + } + return results; + } + } catch (SQLException ex) { + throw new IllegalStateException("Failed to query SQL: " + sql, ex); + } + } + + @Override + public CompletableFuture executeAsync(String sql) { + return CompletableFuture.supplyAsync(() -> execute(sql), executor); + } + + @Override + public CompletableFuture executeAsync(String sql, List params) { + return CompletableFuture.supplyAsync(() -> execute(sql, params), executor); + } + + @Override + public CompletableFuture> queryAsync(String sql, RowMapper mapper) { + return CompletableFuture.supplyAsync(() -> query(sql, mapper), executor); + } + + @Override + public CompletableFuture> queryAsync(String sql, List params, RowMapper mapper) { + return CompletableFuture.supplyAsync(() -> query(sql, params, mapper), executor); + } +} diff --git a/src/main/java/com/andrewkydev/database/internal/JdbcSupport.java b/src/main/java/com/andrewkydev/database/internal/JdbcSupport.java new file mode 100644 index 0000000..eeeae6d --- /dev/null +++ b/src/main/java/com/andrewkydev/database/internal/JdbcSupport.java @@ -0,0 +1,19 @@ +package com.andrewkydev.database.internal; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.util.List; + +final class JdbcSupport { + private JdbcSupport() { + } + + static void bindParams(PreparedStatement statement, List params) throws SQLException { + if (params == null || params.isEmpty()) { + return; + } + for (int i = 0; i < params.size(); i++) { + statement.setObject(i + 1, params.get(i)); + } + } +} diff --git a/src/main/java/com/andrewkydev/database/internal/JdbcTransaction.java b/src/main/java/com/andrewkydev/database/internal/JdbcTransaction.java new file mode 100644 index 0000000..b48500c --- /dev/null +++ b/src/main/java/com/andrewkydev/database/internal/JdbcTransaction.java @@ -0,0 +1,116 @@ +package com.andrewkydev.database.internal; + +import com.andrewkydev.database.query.RowMapper; +import com.andrewkydev.database.query.Transaction; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.Executor; + +public final class JdbcTransaction implements Transaction { + private final Connection connection; + private final Executor executor; + + public JdbcTransaction(Connection connection, Executor executor) { + this.connection = connection; + this.executor = executor; + } + + @Override + public int execute(String sql) { + return execute(sql, Collections.emptyList()); + } + + @Override + public int execute(String sql, List params) { + try (PreparedStatement statement = connection.prepareStatement(sql)) { + JdbcSupport.bindParams(statement, params); + return statement.executeUpdate(); + } catch (SQLException ex) { + throw new IllegalStateException("Failed to execute SQL: " + sql, ex); + } + } + + @Override + public List query(String sql, RowMapper mapper) { + return query(sql, Collections.emptyList(), mapper); + } + + @Override + public List query(String sql, List params, RowMapper mapper) { + try (PreparedStatement statement = connection.prepareStatement(sql)) { + JdbcSupport.bindParams(statement, params); + try (ResultSet resultSet = statement.executeQuery()) { + List results = new ArrayList<>(); + while (resultSet.next()) { + results.add(mapper.map(resultSet)); + } + return results; + } + } catch (SQLException ex) { + throw new IllegalStateException("Failed to query SQL: " + sql, ex); + } + } + + @Override + public CompletableFuture executeAsync(String sql) { + return CompletableFuture.supplyAsync(() -> execute(sql), executor); + } + + @Override + public CompletableFuture executeAsync(String sql, List params) { + return CompletableFuture.supplyAsync(() -> execute(sql, params), executor); + } + + @Override + public CompletableFuture> queryAsync(String sql, RowMapper mapper) { + return CompletableFuture.supplyAsync(() -> query(sql, mapper), executor); + } + + @Override + public CompletableFuture> queryAsync(String sql, List params, RowMapper mapper) { + return CompletableFuture.supplyAsync(() -> query(sql, params, mapper), executor); + } + + @Override + public void commit() { + try { + connection.commit(); + } catch (SQLException ex) { + throw new IllegalStateException("Failed to commit transaction", ex); + } + } + + @Override + public void rollback() { + try { + connection.rollback(); + } catch (SQLException ex) { + throw new IllegalStateException("Failed to rollback transaction", ex); + } + } + + @Override + public CompletableFuture commitAsync() { + return CompletableFuture.runAsync(this::commit, executor); + } + + @Override + public CompletableFuture rollbackAsync() { + return CompletableFuture.runAsync(this::rollback, executor); + } + + @Override + public void close() { + try { + connection.close(); + } catch (SQLException ex) { + throw new IllegalStateException("Failed to close transaction connection", ex); + } + } +} diff --git a/src/main/java/com/andrewkydev/database/internal/JdbcUrlBuilder.java b/src/main/java/com/andrewkydev/database/internal/JdbcUrlBuilder.java new file mode 100644 index 0000000..6f3f45d --- /dev/null +++ b/src/main/java/com/andrewkydev/database/internal/JdbcUrlBuilder.java @@ -0,0 +1,27 @@ +package com.andrewkydev.database.internal; + +import com.andrewkydev.database.schema.SqlDialect; + +public final class JdbcUrlBuilder { + private JdbcUrlBuilder() { + } + + public static String build(SqlDialect dialect, String host, int port, String database) { + String dbSegment = database == null ? "" : database; + return switch (dialect) { + case POSTGRESQL -> { + if (dbSegment.isEmpty()) { + dbSegment = "postgres"; + } + yield "jdbc:postgresql://" + host + ":" + port + "/" + dbSegment; + } + default -> { + if (dbSegment.isEmpty()) { + yield "jdbc:mysql://" + host + ":" + port + "/"; + } + yield "jdbc:mysql://" + host + ":" + port + "/" + dbSegment + + "?useSSL=false&allowPublicKeyRetrieval=true"; + } + }; + } +} diff --git a/src/main/java/com/andrewkydev/database/internal/SchemaImpl.java b/src/main/java/com/andrewkydev/database/internal/SchemaImpl.java new file mode 100644 index 0000000..533f380 --- /dev/null +++ b/src/main/java/com/andrewkydev/database/internal/SchemaImpl.java @@ -0,0 +1,271 @@ +package com.andrewkydev.database.internal; + +import com.andrewkydev.database.config.DatabaseConfig; +import com.andrewkydev.database.schema.ColumnSpec; +import com.andrewkydev.database.schema.IndexSpec; +import com.andrewkydev.database.schema.Schema; +import com.andrewkydev.database.schema.SqlDialect; +import com.andrewkydev.database.schema.TableSpec; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.StringJoiner; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.Executor; +import javax.sql.DataSource; + +public final class SchemaImpl implements Schema { + private final DataSource dataSource; + private final DatabaseConfig config; + private final SqlDialect dialect; + private final Executor executor; + + public SchemaImpl(DataSource dataSource, DatabaseConfig config, Executor executor) { + this.dataSource = dataSource; + this.config = config; + this.dialect = config.dialect(); + this.executor = executor; + } + + @Override + public void createDatabase(String name) { + String sql = "CREATE DATABASE " + name; + executeAdmin(sql); + } + + @Override + public void dropDatabase(String name) { + String sql = "DROP DATABASE " + name; + executeAdmin(sql); + } + + @Override + public void createTable(TableSpec spec) { + List statements = new ArrayList<>(); + statements.add(buildCreateTable(spec)); + for (IndexSpec index : spec.indexes()) { + statements.add(buildCreateIndex(spec.name(), index)); + } + executeStatements(statements, true); + } + + @Override + public void dropTable(String table) { + executeStatements(singletonList("DROP TABLE " + table), true); + } + + @Override + public void addColumn(String table, ColumnSpec column) { + executeStatements(singletonList("ALTER TABLE " + table + " ADD COLUMN " + columnDefinition(column)), true); + } + + @Override + public void updateColumn(String table, ColumnSpec column) { + executeStatements(buildUpdateColumnStatements(table, column), true); + } + + @Override + public void dropColumn(String table, String column) { + executeStatements(singletonList("ALTER TABLE " + table + " DROP COLUMN " + column), true); + } + + @Override + public void addIndex(String table, IndexSpec index) { + executeStatements(singletonList(buildCreateIndex(table, index)), true); + } + + @Override + public void dropIndex(String table, String indexName) { + String sql = dialect == SqlDialect.POSTGRESQL + ? "DROP INDEX " + indexName + : "DROP INDEX " + indexName + " ON " + table; + executeStatements(singletonList(sql), true); + } + + @Override + public CompletableFuture createDatabaseAsync(String name) { + return CompletableFuture.runAsync(() -> createDatabase(name), executor); + } + + @Override + public CompletableFuture dropDatabaseAsync(String name) { + return CompletableFuture.runAsync(() -> dropDatabase(name), executor); + } + + @Override + public CompletableFuture createTableAsync(TableSpec spec) { + return CompletableFuture.runAsync(() -> createTable(spec), executor); + } + + @Override + public CompletableFuture dropTableAsync(String table) { + return CompletableFuture.runAsync(() -> dropTable(table), executor); + } + + @Override + public CompletableFuture addColumnAsync(String table, ColumnSpec column) { + return CompletableFuture.runAsync(() -> addColumn(table, column), executor); + } + + @Override + public CompletableFuture updateColumnAsync(String table, ColumnSpec column) { + return CompletableFuture.runAsync(() -> updateColumn(table, column), executor); + } + + @Override + public CompletableFuture dropColumnAsync(String table, String column) { + return CompletableFuture.runAsync(() -> dropColumn(table, column), executor); + } + + @Override + public CompletableFuture addIndexAsync(String table, IndexSpec index) { + return CompletableFuture.runAsync(() -> addIndex(table, index), executor); + } + + @Override + public CompletableFuture dropIndexAsync(String table, String indexName) { + return CompletableFuture.runAsync(() -> dropIndex(table, indexName), executor); + } + + private void executeAdmin(String sql) { + try (Connection connection = DriverManager.getConnection( + JdbcUrlBuilder.build(dialect, config.host(), config.port(), adminDatabase()), + config.username(), + config.password() + ); + PreparedStatement statement = connection.prepareStatement(sql)) { + statement.executeUpdate(); + } catch (SQLException ex) { + throw new IllegalStateException("Failed to execute admin SQL: " + sql, ex); + } + } + + private String adminDatabase() { + if (dialect == SqlDialect.POSTGRESQL) { + return config.adminDatabase(); + } + return ""; + } + + private void executeStatements(List statements, boolean allowTransactions) { + boolean useTransaction = allowTransactions && config.autoTransactions(); + if (!useTransaction) { + for (String statement : statements) { + executeStatement(statement); + } + return; + } + + try (Connection connection = dataSource.getConnection()) { + connection.setAutoCommit(false); + try { + for (String sql : statements) { + try (PreparedStatement statement = connection.prepareStatement(sql)) { + statement.executeUpdate(); + } + } + connection.commit(); + } catch (SQLException ex) { + connection.rollback(); + throw ex; + } + } catch (SQLException ex) { + throw new IllegalStateException("Failed to execute SQL statements", ex); + } + } + + private List singletonList(String statement) { + List statements = new ArrayList<>(1); + statements.add(statement); + return statements; + } + + private void executeStatement(String sql) { + try (Connection connection = dataSource.getConnection(); + PreparedStatement statement = connection.prepareStatement(sql)) { + statement.executeUpdate(); + } catch (SQLException ex) { + throw new IllegalStateException("Failed to execute SQL: " + sql, ex); + } + } + + private String buildCreateTable(TableSpec spec) { + StringJoiner joiner = new StringJoiner(", "); + for (ColumnSpec column : spec.columns()) { + joiner.add(columnDefinition(column)); + } + if (!spec.primaryKey().isEmpty()) { + joiner.add("PRIMARY KEY (" + String.join(", ", spec.primaryKey()) + ")"); + } else { + List inlineKeys = new ArrayList<>(); + for (ColumnSpec column : spec.columns()) { + if (column.primaryKey()) { + inlineKeys.add(column.name()); + } + } + if (!inlineKeys.isEmpty()) { + joiner.add("PRIMARY KEY (" + String.join(", ", inlineKeys) + ")"); + } + } + return "CREATE TABLE " + spec.name() + " (" + joiner + ")"; + } + + private String buildCreateIndex(String table, IndexSpec index) { + String prefix = index.unique() ? "CREATE UNIQUE INDEX " : "CREATE INDEX "; + return prefix + index.name() + " ON " + table + " (" + String.join(", ", index.columns()) + ")"; + } + + private String columnDefinition(ColumnSpec column) { + StringBuilder builder = new StringBuilder(); + builder.append(column.name()).append(" "); + builder.append(resolveColumnType(column)); + if (!column.nullable()) { + builder.append(" NOT NULL"); + } + if (column.defaultValue() != null && !column.defaultValue().isEmpty()) { + builder.append(" DEFAULT ").append(column.defaultValue()); + } + if (column.autoIncrement() && dialect == SqlDialect.MYSQL) { + builder.append(" AUTO_INCREMENT"); + } + return builder.toString(); + } + + private String resolveColumnType(ColumnSpec column) { + if (!column.autoIncrement() || dialect != SqlDialect.POSTGRESQL) { + return column.type(); + } + String type = column.type().toUpperCase(); + if (type.contains("BIG")) { + return "BIGSERIAL"; + } + if (type.contains("INT")) { + return "SERIAL"; + } + return column.type() + " GENERATED BY DEFAULT AS IDENTITY"; + } + + private List buildUpdateColumnStatements(String table, ColumnSpec column) { + List statements = new ArrayList<>(); + if (dialect == SqlDialect.MYSQL) { + statements.add("ALTER TABLE " + table + " MODIFY COLUMN " + columnDefinition(column)); + return statements; + } + + statements.add("ALTER TABLE " + table + " ALTER COLUMN " + column.name() + " TYPE " + column.type()); + if (column.nullable()) { + statements.add("ALTER TABLE " + table + " ALTER COLUMN " + column.name() + " DROP NOT NULL"); + } else { + statements.add("ALTER TABLE " + table + " ALTER COLUMN " + column.name() + " SET NOT NULL"); + } + if (column.defaultValue() == null || column.defaultValue().isEmpty()) { + statements.add("ALTER TABLE " + table + " ALTER COLUMN " + column.name() + " DROP DEFAULT"); + } else { + statements.add("ALTER TABLE " + table + " ALTER COLUMN " + column.name() + " SET DEFAULT " + column.defaultValue()); + } + return statements; + } +} diff --git a/src/main/java/com/andrewkydev/database/orm/Condition.java b/src/main/java/com/andrewkydev/database/orm/Condition.java new file mode 100644 index 0000000..2d3e756 --- /dev/null +++ b/src/main/java/com/andrewkydev/database/orm/Condition.java @@ -0,0 +1,38 @@ +package com.andrewkydev.database.orm; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public final class Condition { + private final String sql; + private final List params; + + Condition(String sql, List params) { + this.sql = sql; + this.params = params == null ? Collections.emptyList() : params; + } + + public String sql() { + return sql; + } + + public List params() { + return params; + } + + public Condition and(Condition other) { + return combine("AND", other); + } + + public Condition or(Condition other) { + return combine("OR", other); + } + + private Condition combine(String op, Condition other) { + List combined = new ArrayList<>(params.size() + other.params.size()); + combined.addAll(params); + combined.addAll(other.params); + return new Condition("(" + sql + " " + op + " " + other.sql + ")", combined); + } +} diff --git a/src/main/java/com/andrewkydev/database/orm/Conditions.java b/src/main/java/com/andrewkydev/database/orm/Conditions.java new file mode 100644 index 0000000..e20ceb6 --- /dev/null +++ b/src/main/java/com/andrewkydev/database/orm/Conditions.java @@ -0,0 +1,85 @@ +package com.andrewkydev.database.orm; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public final class Conditions { + private Conditions() { + } + + public static Condition raw(String sql, List params) { + return new Condition(sql, params); + } + + public static Condition raw(String sql, Object... params) { + return new Condition(sql, params == null ? Collections.emptyList() : asList(params)); + } + + public static Condition eq(String column, Object value) { + return new Condition(column + " = ?", Collections.singletonList(value)); + } + + public static Condition ne(String column, Object value) { + return new Condition(column + " <> ?", Collections.singletonList(value)); + } + + public static Condition gt(String column, Object value) { + return new Condition(column + " > ?", Collections.singletonList(value)); + } + + public static Condition gte(String column, Object value) { + return new Condition(column + " >= ?", Collections.singletonList(value)); + } + + public static Condition lt(String column, Object value) { + return new Condition(column + " < ?", Collections.singletonList(value)); + } + + public static Condition lte(String column, Object value) { + return new Condition(column + " <= ?", Collections.singletonList(value)); + } + + public static Condition like(String column, Object value) { + return new Condition(column + " LIKE ?", Collections.singletonList(value)); + } + + public static Condition in(String column, List values) { + if (values == null || values.isEmpty()) { + return new Condition("1=0", Collections.emptyList()); + } + StringBuilder builder = new StringBuilder(); + builder.append(column).append(" IN ("); + for (int i = 0; i < values.size(); i++) { + if (i > 0) { + builder.append(", "); + } + builder.append("?"); + } + builder.append(")"); + return new Condition(builder.toString(), values); + } + + public static Condition in(String column, Object... values) { + if (values == null || values.length == 0) { + return new Condition("1=0", Collections.emptyList()); + } + List list = new ArrayList<>(values.length); + Collections.addAll(list, values); + return in(column, list); + } + + public static Condition isNull(String column) { + return new Condition(column + " IS NULL", Collections.emptyList()); + } + + public static Condition isNotNull(String column) { + return new Condition(column + " IS NOT NULL", Collections.emptyList()); + } + + private static List asList(Object[] params) { + List list = new ArrayList<>(params.length); + Collections.addAll(list, params); + return list; + } +} diff --git a/src/main/java/com/andrewkydev/database/orm/DbColumn.java b/src/main/java/com/andrewkydev/database/orm/DbColumn.java new file mode 100644 index 0000000..113bf22 --- /dev/null +++ b/src/main/java/com/andrewkydev/database/orm/DbColumn.java @@ -0,0 +1,20 @@ +package com.andrewkydev.database.orm; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.FIELD) +public @interface DbColumn { + String name() default ""; + + boolean nullable() default true; + + int length() default 255; + + boolean unique() default false; + + String type() default ""; +} diff --git a/src/main/java/com/andrewkydev/database/orm/DbEntity.java b/src/main/java/com/andrewkydev/database/orm/DbEntity.java new file mode 100644 index 0000000..79b1546 --- /dev/null +++ b/src/main/java/com/andrewkydev/database/orm/DbEntity.java @@ -0,0 +1,12 @@ +package com.andrewkydev.database.orm; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.TYPE) +public @interface DbEntity { + String table() default ""; +} diff --git a/src/main/java/com/andrewkydev/database/orm/DbId.java b/src/main/java/com/andrewkydev/database/orm/DbId.java new file mode 100644 index 0000000..202ca90 --- /dev/null +++ b/src/main/java/com/andrewkydev/database/orm/DbId.java @@ -0,0 +1,12 @@ +package com.andrewkydev.database.orm; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.FIELD) +public @interface DbId { + boolean autoIncrement() default true; +} diff --git a/src/main/java/com/andrewkydev/database/orm/DbJson.java b/src/main/java/com/andrewkydev/database/orm/DbJson.java new file mode 100644 index 0000000..aee5979 --- /dev/null +++ b/src/main/java/com/andrewkydev/database/orm/DbJson.java @@ -0,0 +1,11 @@ +package com.andrewkydev.database.orm; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.FIELD) +public @interface DbJson { +} diff --git a/src/main/java/com/andrewkydev/database/orm/DbTransient.java b/src/main/java/com/andrewkydev/database/orm/DbTransient.java new file mode 100644 index 0000000..c824059 --- /dev/null +++ b/src/main/java/com/andrewkydev/database/orm/DbTransient.java @@ -0,0 +1,11 @@ +package com.andrewkydev.database.orm; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.FIELD) +public @interface DbTransient { +} diff --git a/src/main/java/com/andrewkydev/database/orm/EntityManager.java b/src/main/java/com/andrewkydev/database/orm/EntityManager.java new file mode 100644 index 0000000..dc90053 --- /dev/null +++ b/src/main/java/com/andrewkydev/database/orm/EntityManager.java @@ -0,0 +1,73 @@ +package com.andrewkydev.database.orm; + +import java.util.List; +import java.util.concurrent.CompletableFuture; + +public interface EntityManager { + + void insert(T entity); + + void update(T entity); + + void delete(T entity); + + T findById(Class type, Object id); + + List findAll(Class type); + + T findOneWhere(Class type, String where, List params); + + List findWhere(Class type, String where, List params); + + long count(Class type); + + long count(Class type, String where, List params); + + boolean exists(Class type, String where, List params); + + int deleteWhere(Class type, String where, List params); + + List findWhere( + Class type, + String where, + List params, + String orderBy, + Integer limit, + Integer offset + ); + + CompletableFuture> findWhereAsync( + Class type, + String where, + List params, + String orderBy, + Integer limit, + Integer offset + ); + + void registerAdapter(Class type, TypeAdapter adapter); + + EntityQuery query(Class type); + + CompletableFuture insertAsync(T entity); + + CompletableFuture updateAsync(T entity); + + CompletableFuture deleteAsync(T entity); + + CompletableFuture findByIdAsync(Class type, Object id); + + CompletableFuture> findAllAsync(Class type); + + CompletableFuture findOneWhereAsync(Class type, String where, List params); + + CompletableFuture> findWhereAsync(Class type, String where, List params); + + CompletableFuture countAsync(Class type); + + CompletableFuture countAsync(Class type, String where, List params); + + CompletableFuture existsAsync(Class type, String where, List params); + + CompletableFuture deleteWhereAsync(Class type, String where, List params); +} diff --git a/src/main/java/com/andrewkydev/database/orm/EntityQuery.java b/src/main/java/com/andrewkydev/database/orm/EntityQuery.java new file mode 100644 index 0000000..a95b370 --- /dev/null +++ b/src/main/java/com/andrewkydev/database/orm/EntityQuery.java @@ -0,0 +1,57 @@ +package com.andrewkydev.database.orm; + +import java.util.List; +import java.util.concurrent.CompletableFuture; + +public interface EntityQuery { + + EntityQuery select(String... columns); + + EntityQuery join(String joinSql); + + EntityQuery where(String where, List params); + + EntityQuery where(String where, Object... params); + + EntityQuery where(Condition condition); + + EntityQuery and(Condition condition); + + EntityQuery or(Condition condition); + + EntityQuery orderBy(String orderBy); + + EntityQuery groupBy(String groupBy); + + EntityQuery having(String having, List params); + + EntityQuery having(String having, Object... params); + + EntityQuery having(Condition condition); + + EntityQuery limit(int limit); + + EntityQuery offset(int offset); + + EntityQuery limit(int limit, int offset); + + List list(); + + T one(); + + long count(); + + boolean exists(); + + int delete(); + + CompletableFuture> listAsync(); + + CompletableFuture oneAsync(); + + CompletableFuture countAsync(); + + CompletableFuture existsAsync(); + + CompletableFuture deleteAsync(); +} diff --git a/src/main/java/com/andrewkydev/database/orm/OrmSchema.java b/src/main/java/com/andrewkydev/database/orm/OrmSchema.java new file mode 100644 index 0000000..a2f0564 --- /dev/null +++ b/src/main/java/com/andrewkydev/database/orm/OrmSchema.java @@ -0,0 +1,132 @@ +package com.andrewkydev.database.orm; + +import com.andrewkydev.database.schema.ColumnSpec; +import com.andrewkydev.database.schema.IndexSpec; +import com.andrewkydev.database.schema.SqlDialect; +import com.andrewkydev.database.schema.TableSpec; +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; +import java.util.ArrayList; +import java.util.List; + +public final class OrmSchema { + private OrmSchema() { + } + + public static TableSpec fromEntity(Class type, SqlDialect dialect) { + DbEntity entity = type.getAnnotation(DbEntity.class); + String table = entity != null && !entity.table().isEmpty() + ? entity.table() + : toSnakeCase(type.getSimpleName()); + + TableSpec.Builder builder = TableSpec.builder(table); + List primaryKeys = new ArrayList<>(); + List indexes = new ArrayList<>(); + + Class current = type; + while (current != null && current != Object.class) { + for (Field field : current.getDeclaredFields()) { + if (Modifier.isStatic(field.getModifiers())) { + continue; + } + if (field.isAnnotationPresent(DbTransient.class)) { + continue; + } + + DbColumn column = field.getAnnotation(DbColumn.class); + DbId id = field.getAnnotation(DbId.class); + DbJson json = field.getAnnotation(DbJson.class); + + String columnName = column != null && !column.name().isEmpty() + ? column.name() + : toSnakeCase(field.getName()); + boolean nullable = column == null || column.nullable(); + boolean unique = column != null && column.unique(); + int length = column != null ? column.length() : 255; + boolean autoIncrement = id != null && id.autoIncrement(); + boolean primaryKey = id != null; + + String typeName = column != null ? column.type() : ""; + if (typeName == null || typeName.trim().isEmpty()) { + typeName = resolveType(field.getType(), length, json != null, dialect); + } + + builder.column(ColumnSpec.builder(columnName, typeName) + .nullable(nullable) + .autoIncrement(autoIncrement) + .primaryKey(primaryKey) + .build()); + + if (primaryKey) { + primaryKeys.add(columnName); + } + if (unique) { + indexes.add(new IndexSpec(table + "_" + columnName + "_uk", listOf(columnName), true)); + } + } + current = current.getSuperclass(); + } + + if (!primaryKeys.isEmpty()) { + builder.primaryKey(primaryKeys); + } + if (!indexes.isEmpty()) { + builder.indexes(indexes); + } + + return builder.build(); + } + + private static String resolveType(Class fieldType, int length, boolean json, SqlDialect dialect) { + if (json) { + return dialect == SqlDialect.POSTGRESQL ? "JSONB" : "JSON"; + } + if (fieldType == String.class) { + return "VARCHAR(" + length + ")"; + } + if (fieldType == int.class || fieldType == Integer.class) { + return "INT"; + } + if (fieldType == long.class || fieldType == Long.class) { + return "BIGINT"; + } + if (fieldType == short.class || fieldType == Short.class) { + return "SMALLINT"; + } + if (fieldType == boolean.class || fieldType == Boolean.class) { + return dialect == SqlDialect.POSTGRESQL ? "BOOLEAN" : "TINYINT(1)"; + } + if (fieldType == float.class || fieldType == Float.class) { + return "FLOAT"; + } + if (fieldType == double.class || fieldType == Double.class) { + return "DOUBLE"; + } + if (fieldType == java.util.UUID.class) { + return dialect == SqlDialect.POSTGRESQL ? "UUID" : "CHAR(36)"; + } + return "TEXT"; + } + + private static List listOf(String value) { + List list = new ArrayList<>(1); + list.add(value); + return list; + } + + private static String toSnakeCase(String value) { + StringBuilder builder = new StringBuilder(); + for (int i = 0; i < value.length(); i++) { + char c = value.charAt(i); + if (Character.isUpperCase(c)) { + if (i > 0) { + builder.append('_'); + } + builder.append(Character.toLowerCase(c)); + } else { + builder.append(c); + } + } + return builder.toString(); + } +} diff --git a/src/main/java/com/andrewkydev/database/orm/TypeAdapter.java b/src/main/java/com/andrewkydev/database/orm/TypeAdapter.java new file mode 100644 index 0000000..2eb7dcf --- /dev/null +++ b/src/main/java/com/andrewkydev/database/orm/TypeAdapter.java @@ -0,0 +1,7 @@ +package com.andrewkydev.database.orm; + +public interface TypeAdapter { + Object toDatabase(T value); + + T fromDatabase(Object value); +} diff --git a/src/main/java/com/andrewkydev/database/query/QueryRunner.java b/src/main/java/com/andrewkydev/database/query/QueryRunner.java new file mode 100644 index 0000000..193f310 --- /dev/null +++ b/src/main/java/com/andrewkydev/database/query/QueryRunner.java @@ -0,0 +1,23 @@ +package com.andrewkydev.database.query; + +import java.util.List; +import java.util.concurrent.CompletableFuture; + +public interface QueryRunner { + + int execute(String sql); + + int execute(String sql, List params); + + List query(String sql, RowMapper mapper); + + List query(String sql, List params, RowMapper mapper); + + CompletableFuture executeAsync(String sql); + + CompletableFuture executeAsync(String sql, List params); + + CompletableFuture> queryAsync(String sql, RowMapper mapper); + + CompletableFuture> queryAsync(String sql, List params, RowMapper mapper); +} diff --git a/src/main/java/com/andrewkydev/database/query/RowMapper.java b/src/main/java/com/andrewkydev/database/query/RowMapper.java new file mode 100644 index 0000000..e9acbdb --- /dev/null +++ b/src/main/java/com/andrewkydev/database/query/RowMapper.java @@ -0,0 +1,9 @@ +package com.andrewkydev.database.query; + +import java.sql.ResultSet; +import java.sql.SQLException; + +@FunctionalInterface +public interface RowMapper { + T map(ResultSet resultSet) throws SQLException; +} diff --git a/src/main/java/com/andrewkydev/database/query/Transaction.java b/src/main/java/com/andrewkydev/database/query/Transaction.java new file mode 100644 index 0000000..e031240 --- /dev/null +++ b/src/main/java/com/andrewkydev/database/query/Transaction.java @@ -0,0 +1,34 @@ +package com.andrewkydev.database.query; + +import java.util.List; +import java.util.concurrent.CompletableFuture; + +public interface Transaction extends AutoCloseable { + + int execute(String sql); + + int execute(String sql, List params); + + List query(String sql, RowMapper mapper); + + List query(String sql, List params, RowMapper mapper); + + CompletableFuture executeAsync(String sql); + + CompletableFuture executeAsync(String sql, List params); + + CompletableFuture> queryAsync(String sql, RowMapper mapper); + + CompletableFuture> queryAsync(String sql, List params, RowMapper mapper); + + void commit(); + + void rollback(); + + CompletableFuture commitAsync(); + + CompletableFuture rollbackAsync(); + + @Override + void close(); +} diff --git a/src/main/java/com/andrewkydev/database/schema/ColumnSpec.java b/src/main/java/com/andrewkydev/database/schema/ColumnSpec.java new file mode 100644 index 0000000..7a23f01 --- /dev/null +++ b/src/main/java/com/andrewkydev/database/schema/ColumnSpec.java @@ -0,0 +1,85 @@ +package com.andrewkydev.database.schema; + +public final class ColumnSpec { + private final String name; + private final String type; + private final boolean nullable; + private final String defaultValue; + private final boolean autoIncrement; + private final boolean primaryKey; + + private ColumnSpec(Builder builder) { + this.name = builder.name; + this.type = builder.type; + this.nullable = builder.nullable; + this.defaultValue = builder.defaultValue; + this.autoIncrement = builder.autoIncrement; + this.primaryKey = builder.primaryKey; + } + + public static Builder builder(String name, String type) { + return new Builder(name, type); + } + + public String name() { + return name; + } + + public String type() { + return type; + } + + public boolean nullable() { + return nullable; + } + + public String defaultValue() { + return defaultValue; + } + + public boolean autoIncrement() { + return autoIncrement; + } + + public boolean primaryKey() { + return primaryKey; + } + + public static final class Builder { + private final String name; + private final String type; + private boolean nullable = true; + private String defaultValue; + private boolean autoIncrement; + private boolean primaryKey; + + private Builder(String name, String type) { + this.name = name; + this.type = type; + } + + public Builder nullable(boolean nullable) { + this.nullable = nullable; + return this; + } + + public Builder defaultValue(String defaultValue) { + this.defaultValue = defaultValue; + return this; + } + + public Builder autoIncrement(boolean autoIncrement) { + this.autoIncrement = autoIncrement; + return this; + } + + public Builder primaryKey(boolean primaryKey) { + this.primaryKey = primaryKey; + return this; + } + + public ColumnSpec build() { + return new ColumnSpec(this); + } + } +} diff --git a/src/main/java/com/andrewkydev/database/schema/IndexSpec.java b/src/main/java/com/andrewkydev/database/schema/IndexSpec.java new file mode 100644 index 0000000..53b672e --- /dev/null +++ b/src/main/java/com/andrewkydev/database/schema/IndexSpec.java @@ -0,0 +1,29 @@ +package com.andrewkydev.database.schema; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public final class IndexSpec { + private final String name; + private final List columns; + private final boolean unique; + + public IndexSpec(String name, List columns, boolean unique) { + this.name = name; + this.columns = Collections.unmodifiableList(new ArrayList<>(columns)); + this.unique = unique; + } + + public String name() { + return name; + } + + public List columns() { + return columns; + } + + public boolean unique() { + return unique; + } +} diff --git a/src/main/java/com/andrewkydev/database/schema/Schema.java b/src/main/java/com/andrewkydev/database/schema/Schema.java new file mode 100644 index 0000000..b6eed13 --- /dev/null +++ b/src/main/java/com/andrewkydev/database/schema/Schema.java @@ -0,0 +1,42 @@ +package com.andrewkydev.database.schema; + +import java.util.concurrent.CompletableFuture; + +public interface Schema { + + void createDatabase(String name); + + void dropDatabase(String name); + + void createTable(TableSpec spec); + + void dropTable(String table); + + void addColumn(String table, ColumnSpec column); + + void updateColumn(String table, ColumnSpec column); + + void dropColumn(String table, String column); + + void addIndex(String table, IndexSpec index); + + void dropIndex(String table, String indexName); + + CompletableFuture createDatabaseAsync(String name); + + CompletableFuture dropDatabaseAsync(String name); + + CompletableFuture createTableAsync(TableSpec spec); + + CompletableFuture dropTableAsync(String table); + + CompletableFuture addColumnAsync(String table, ColumnSpec column); + + CompletableFuture updateColumnAsync(String table, ColumnSpec column); + + CompletableFuture dropColumnAsync(String table, String column); + + CompletableFuture addIndexAsync(String table, IndexSpec index); + + CompletableFuture dropIndexAsync(String table, String indexName); +} diff --git a/src/main/java/com/andrewkydev/database/schema/SqlDialect.java b/src/main/java/com/andrewkydev/database/schema/SqlDialect.java new file mode 100644 index 0000000..a859a1f --- /dev/null +++ b/src/main/java/com/andrewkydev/database/schema/SqlDialect.java @@ -0,0 +1,6 @@ +package com.andrewkydev.database.schema; + +public enum SqlDialect { + MYSQL, + POSTGRESQL +} diff --git a/src/main/java/com/andrewkydev/database/schema/TableSpec.java b/src/main/java/com/andrewkydev/database/schema/TableSpec.java new file mode 100644 index 0000000..8e9d32f --- /dev/null +++ b/src/main/java/com/andrewkydev/database/schema/TableSpec.java @@ -0,0 +1,80 @@ +package com.andrewkydev.database.schema; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public final class TableSpec { + private final String name; + private final List columns; + private final List primaryKey; + private final List indexes; + + private TableSpec(Builder builder) { + this.name = builder.name; + this.columns = Collections.unmodifiableList(new ArrayList<>(builder.columns)); + this.primaryKey = Collections.unmodifiableList(new ArrayList<>(builder.primaryKey)); + this.indexes = Collections.unmodifiableList(new ArrayList<>(builder.indexes)); + } + + public static Builder builder(String name) { + return new Builder(name); + } + + public String name() { + return name; + } + + public List columns() { + return columns; + } + + public List primaryKey() { + return primaryKey; + } + + public List indexes() { + return indexes; + } + + public static final class Builder { + private final String name; + private final List columns = new ArrayList<>(); + private final List primaryKey = new ArrayList<>(); + private final List indexes = new ArrayList<>(); + + private Builder(String name) { + this.name = name; + } + + public Builder column(ColumnSpec column) { + this.columns.add(column); + return this; + } + + public Builder columns(List columns) { + this.columns.addAll(columns); + return this; + } + + public Builder primaryKey(List columns) { + this.primaryKey.clear(); + this.primaryKey.addAll(columns); + return this; + } + + public Builder index(IndexSpec index) { + this.indexes.add(index); + return this; + } + + public Builder indexes(List indexes) { + this.indexes.addAll(indexes); + return this; + } + + public TableSpec build() { + return new TableSpec(this); + } + } +} diff --git a/src/main/resources/config.yml b/src/main/resources/config.yml new file mode 100644 index 0000000..8cf8017 --- /dev/null +++ b/src/main/resources/config.yml @@ -0,0 +1,16 @@ +#support mysql | postgres +driver: "mysql" +host: "localhost" +port: 3306 +database: "primalix" +username: "root" +password: "" +adminDatabase: "postgres" +autoTransactions: true + +pool: + maxPoolSize: 10 + minIdle: 2 + connectionTimeoutMs: 30000 + idleTimeoutMs: 600000 + maxLifetimeMs: 1800000 diff --git a/src/main/resources/plugin.yml b/src/main/resources/plugin.yml new file mode 100644 index 0000000..b6ba7da --- /dev/null +++ b/src/main/resources/plugin.yml @@ -0,0 +1,8 @@ +name: Database +description: "Database plugin for Primalix" +main: org.andrewkydev.Loader +version: "0.0.1" +api: [ 1.1.0 ] + +load: POSTWORLD +