first commit

This commit is contained in:
2026-01-15 22:38:46 +03:00
commit a70e9b7a79
58 changed files with 3980 additions and 0 deletions

43
.gitignore vendored Normal file
View File

@@ -0,0 +1,43 @@
# Compiled class file
*.class
# Log file
*.log
# BlueJ files
*.ctxt
# Mobile Tools for Java (J2ME)
.mtj.tmp/
# Package Files #
*.jar
*.war
*.nar
*.ear
*.zip
*.tar.gz
*.rar
# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
hs_err_pid*
.gradle
build/
# Ignore Gradle GUI config
gradle-app.setting
# Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored)
!gradle-wrapper.jar
# Cache of project
.gradletasknamecache
# # Work around https://youtrack.jetbrains.com/issue/IDEA-116898
# gradle/wrapper/gradle-wrapper.properties
# End of https://mrkandreev.name/snippets/gitignore-generator/#Java,Gradle
/examples/

14
README.md Normal file
View File

@@ -0,0 +1,14 @@
# Database Plugin (Lumi)
SQL-oriented database plugin for Lumi with MySQL/PostgreSQL support, HikariCP pooling,
schema helpers, and a lightweight annotation-based ORM layer.
## Docs
- `docs/overview.md`
- `docs/config.md`
- `docs/api-schema.md`
- `docs/api-query.md`
- `docs/orm.md`
- `docs/orm-query.md`
- `docs/orm-schema.md`

132
build.gradle.kts Normal file
View File

@@ -0,0 +1,132 @@
import java.io.BufferedReader
import java.io.InputStreamReader
plugins {
id("java")
id("java-library")
id("com.github.johnrengelman.shadow") version "8.1.1"
id("maven-publish")
}
group = "com.andrewkydev"
version = "1.0-SNAPSHOT"
val copyTo = "../server/plugins"
val copyFrom = "build/libs/${project.name}-${project.version}.jar"
repositories {
mavenCentral()
maven {
name = "luminiadevRepositorySnapshots"
url = uri("https://repo.luminiadev.com/snapshots")
}
}
dependencies {
compileOnly("com.koshakmine:Lumi:1.4.0-SNAPSHOT")
implementation("com.zaxxer:HikariCP:5.1.0")
implementation("com.mysql:mysql-connector-j:8.3.0")
implementation("org.postgresql:postgresql:42.7.2")
implementation("com.google.code.gson:gson:2.11.0")
}
tasks.build {
// finalizedBy(tasks.shadowJar)
}
tasks.shadowJar {
// finalizedBy(tasks.named("copyToPath"))
}
tasks {
shadowJar {
setProperty("zip64", true)
mergeServiceFiles()
archiveClassifier.set("")
archiveFileName.set("${project.name}-${project.version}.jar")
dependencies {
exclude(
"**/**.properties",
"**/*.swp",
"addons/*",
"creativeitems*.json",
"*.dat",
"*.dat",
"*.xsd",
"structures/**",
"RuntimeBlockStatesExtras/**",
"recipes/**",
"cn/nukkit/**",
"co/aikar/timings/**",
"it/uni mi/dsi/fastutil/**",
"assets/org/apache/commons/math3/random/**",
)
}
mergeServiceFiles()
}
}
//tasks.register("copyToPath") {
// doLast {
// copy {
// from(copyFrom)
// into(copyTo)
// }
// }
// dependsOn(tasks.named("shadowJar"))
// finalizedBy(tasks.named("reloadServer"))
//}
tasks.register("reloadServer") {
doLast {
val commands = listOf(
listOf("../server/mcrcon.exe", "-H", "127.0.0.1", "-p", "5MDRlOTFk1", "-w", "1", "fr"),
listOf("../server/mcrcon.exe", "-H", "127.0.0.1", "-p", "5MDRlOTFk1", "-w", "1", "reload again")
)
fun executeCommand(command: List<String>) {
val processBuilder = ProcessBuilder(command)
val process = processBuilder.start()
val reader =
BufferedReader(InputStreamReader(process.inputStream))
var line: String?
while (reader.readLine().also { line = it } != null) {
println(line)
}
process.waitFor()
}
executeCommand(commands[0])
executeCommand(commands[1])
executeCommand(commands[1])
}
dependsOn(tasks.named("copyToPath"))
}
publishing {
publications {
create<MavenPublication>("mavenJava") {
groupId = project.group.toString()
artifactId = project.name
version = project.version.toString()
artifact(tasks.shadowJar.get()) {
classifier = null
}
}
}
repositories {
mavenLocal()
}
}

33
docs/api-query.md Normal file
View File

@@ -0,0 +1,33 @@
# Query API
Raw SQL is available via `api.query()` with parameter binding.
## Execute
```java
int rows = api.query().execute(
"UPDATE players SET level = level + 1 WHERE id = ?",
java.util.Collections.singletonList(1)
);
```
## Query
```java
List<String> names = api.query().query(
"SELECT name FROM players WHERE level >= ?",
java.util.Collections.singletonList(10),
rs -> rs.getString("name")
);
```
## Transactions
```java
try (Transaction tx = api.beginTransaction()) {
tx.execute("UPDATE players SET level = level + 1 WHERE id = ?", java.util.Collections.singletonList(1));
tx.commit();
} catch (Exception ex) {
// rollback on error if needed
}
```

35
docs/api-schema.md Normal file
View File

@@ -0,0 +1,35 @@
# Schema API
Schema helpers let you create and update tables programmatically.
## Example: create table
```java
import com.andrewkydev.database.schema.ColumnSpec;
import com.andrewkydev.database.schema.IndexSpec;
import com.andrewkydev.database.schema.TableSpec;
TableSpec table = TableSpec.builder("players")
.column(ColumnSpec.builder("id", "BIGINT").primaryKey(true).autoIncrement(true).nullable(false).build())
.column(ColumnSpec.builder("name", "VARCHAR(32)").nullable(false).build())
.index(new IndexSpec("players_name_idx", java.util.Arrays.asList("name"), false))
.build();
api.schema().createTable(table);
```
## API Methods
```java
api.schema().createDatabase("primalix");
api.schema().dropDatabase("primalix");
api.schema().createTable(spec);
api.schema().dropTable("players");
api.schema().addColumn("players", ColumnSpec.builder("level", "INT").build());
api.schema().updateColumn("players", ColumnSpec.builder("level", "INT").nullable(false).build());
api.schema().dropColumn("players", "level");
api.schema().addIndex("players", new IndexSpec("players_name_idx", java.util.Arrays.asList("name"), false));
api.schema().dropIndex("players", "players_name_idx");
```
All methods also have async variants returning `CompletableFuture`.

40
docs/config.md Normal file
View File

@@ -0,0 +1,40 @@
# Configuration
The default config file is `config.yml`:
```yaml
driver: "mysql"
host: "localhost"
port: 3306
database: "primalix"
username: "root"
password: ""
adminDatabase: "postgres"
autoTransactions: true
pool:
maxPoolSize: 10
minIdle: 2
connectionTimeoutMs: 30000
idleTimeoutMs: 600000
maxLifetimeMs: 1800000
```
## Fields
- `driver`: `mysql` or `postgres`.
- `host`: database host.
- `port`: database port.
- `database`: default database name for the pool.
- `username`: login user.
- `password`: login password.
- `adminDatabase`: PostgreSQL admin database used for create/drop database.
- `autoTransactions`: wrap schema operations in a transaction when possible.
## Pool Options
- `maxPoolSize`: maximum connections in pool.
- `minIdle`: minimum idle connections.
- `connectionTimeoutMs`: connection timeout.
- `idleTimeoutMs`: idle timeout.
- `maxLifetimeMs`: maximum connection lifetime.

91
docs/examples.md Normal file
View File

@@ -0,0 +1,91 @@
# Feature Examples
## 1) Auto ID after insert
```java
PlayerModel player = new PlayerModel();
player.setName("Steve");
api.orm().insert(player);
long id = player.getId();
```
## 2) snake_case default
```java
class PlayerStats {
private int totalKills;
}
// table: player_stats, column: total_kills
```
## 3) findOneWhere & deleteWhere
```java
PlayerModel one = api.orm().findOneWhere(
PlayerModel.class,
"level >= ?",
java.util.Collections.singletonList(10)
);
int deleted = api.orm().deleteWhere(
PlayerModel.class,
"level < ?",
java.util.Collections.singletonList(1)
);
```
## 4) @DbColumn(length/unique/nullable)
```java
@DbColumn(length = 32, unique = true, nullable = false)
private String username;
```
## 5) findWhere with order/limit/offset
```java
List<PlayerModel> page = api.orm().findWhere(
PlayerModel.class,
"level >= ?",
java.util.Collections.singletonList(10),
"level DESC",
10,
0
);
```
## 6) Query Builder
```java
List<PlayerModel> top = api.orm().query(PlayerModel.class)
.where("level >= ?", 10)
.orderBy("level DESC")
.limit(10)
.list();
```
## 7) Query Builder + Fluent Conditions + Join
```java
import static com.andrewkydev.database.orm.Conditions.*;
List<PlayerModel> rows = api.orm().query(PlayerModel.class)
.select("players.*")
.join("LEFT JOIN clans ON clans.id = players.clan_id")
.where(eq("players.status", "ACTIVE").and(gt("players.level", 10)))
.groupBy("players.id")
.having("COUNT(clans.id) > ?", 0)
.orderBy("players.level DESC")
.limit(10, 0)
.list();
```
## 8) Custom types (UUID/JSON)
```java
@DbJson
private java.util.Map<String, Object> metadata;
// UUID is built-in. Custom type example:
api.orm().registerAdapter(MyType.class, new TypeAdapter<MyType>() { ... });
```

43
docs/orm-query.md Normal file
View File

@@ -0,0 +1,43 @@
# ORM Query Builder
Fluent query builder on top of the ORM.
## Basic
```java
List<PlayerModel> top = api.orm().query(PlayerModel.class)
.where("level >= ?", 10)
.orderBy("level DESC")
.limit(10)
.list();
```
## Fluent Conditions
```java
import static com.andrewkydev.database.orm.Conditions.*;
List<PlayerModel> players = api.orm().query(PlayerModel.class)
.where(eq("status", "ACTIVE").and(gt("level", 10)))
.orderBy("level DESC")
.list();
```
## Joins + Group By + Having
```java
List<PlayerModel> rows = api.orm().query(PlayerModel.class)
.select("players.*")
.join("LEFT JOIN clans ON clans.id = players.clan_id")
.where("players.level >= ?", 10)
.groupBy("players.id")
.having("COUNT(clans.id) > ?", 0)
.orderBy("players.level DESC")
.limit(10, 0)
.list();
```
## Select Columns
If you select a subset of columns, only those are mapped.
Missing fields keep default values.

30
docs/orm-schema.md Normal file
View File

@@ -0,0 +1,30 @@
# ORM Schema Generation
`OrmSchema` builds a `TableSpec` from annotated entities.
## Example
```java
import com.andrewkydev.database.orm.OrmSchema;
import com.andrewkydev.database.schema.SqlDialect;
import com.andrewkydev.database.schema.TableSpec;
TableSpec spec = OrmSchema.fromEntity(PlayerModel.class, SqlDialect.MYSQL);
api.schema().createTable(spec);
```
## Type Mapping
Default mapping (when `@DbColumn(type = "...")` is not specified):
- `String` -> `VARCHAR(length)`
- `int`/`Integer` -> `INT`
- `long`/`Long` -> `BIGINT`
- `short`/`Short` -> `SMALLINT`
- `boolean`/`Boolean` -> `BOOLEAN` (PostgreSQL) or `TINYINT(1)` (MySQL)
- `float`/`Float` -> `FLOAT`
- `double`/`Double` -> `DOUBLE`
- `UUID` -> `UUID` (PostgreSQL) or `CHAR(36)` (MySQL)
- `@DbJson` -> `JSON` (MySQL) or `JSONB` (PostgreSQL)
`@DbColumn(length, unique, nullable)` is also applied.

119
docs/orm.md Normal file
View File

@@ -0,0 +1,119 @@
# ORM
The ORM is lightweight: it maps fields to columns and does not manage complex
relationships. It supports sync and async operations.
## Annotations
- `@DbEntity(table = "players")` sets table name. Default: snake_case class name.
- `@DbColumn(name = "username")` overrides column name.
- `@DbColumn(type = "VARCHAR(32)")` overrides SQL type for schema generation.
- `@DbColumn(length = 32, unique = true, nullable = false)` influences schema generation.
- `@DbId(autoIncrement = true)` marks the primary key.
- `@DbJson` stores the field as JSON.
- `@DbTransient` ignores the field.
## Example entity
```java
@DbEntity(table = "players")
public class PlayerModel {
@DbId(autoIncrement = true)
private long id;
@DbColumn(name = "username", nullable = false, unique = true, length = 32)
private String name;
@DbColumn
private int level;
@DbJson
private java.util.Map<String, Object> metadata;
@DbTransient
private String temp;
public PlayerModel() {
}
}
```
## CRUD
```java
EntityManager orm = api.orm();
orm.insert(entity);
orm.update(entity);
orm.delete(entity);
PlayerModel player = orm.findById(PlayerModel.class, 1L);
List<PlayerModel> all = orm.findAll(PlayerModel.class);
PlayerModel one = orm.findOneWhere(PlayerModel.class, "level >= ?", java.util.Collections.singletonList(10));
List<PlayerModel> many = orm.findWhere(PlayerModel.class, "level >= ?", java.util.Collections.singletonList(10));
List<PlayerModel> paged = orm.findWhere(PlayerModel.class, "level >= ?", java.util.Collections.singletonList(10), "level DESC", 10, 0);
long count = orm.count(PlayerModel.class, "level >= ?", java.util.Collections.singletonList(10));
boolean exists = orm.exists(PlayerModel.class, "username = ?", java.util.Collections.singletonList("Steve"));
int deleted = orm.deleteWhere(PlayerModel.class, "level < ?", java.util.Collections.singletonList(1));
```
## Query Builder
```java
EntityManager orm = api.orm();
List<PlayerModel> top = orm.query(PlayerModel.class)
.where("level >= ?", 10)
.orderBy("level DESC")
.limit(10)
.list();
boolean exists = orm.query(PlayerModel.class)
.where("username = ?", "Steve")
.exists();
```
## Fluent Conditions
```java
import static com.andrewkydev.database.orm.Conditions.*;
List<PlayerModel> players = orm.query(PlayerModel.class)
.where(eq("status", "ACTIVE").and(gt("level", 10)))
.list();
```
## Joins, Group By, Having
```java
List<PlayerModel> rows = orm.query(PlayerModel.class)
.select("players.*")
.join("LEFT JOIN clans ON clans.id = players.clan_id")
.groupBy("players.id")
.having("COUNT(clans.id) > ?", 0)
.list();
```
Selecting a subset of columns maps only those fields; missing fields keep default values.
## Auto ID on insert
If `@DbId(autoIncrement = true)` is used and the id is empty (0 or null),
`insert()` will read generated keys and set the id back on the entity.
## Custom Type Adapters
UUID mapping is built-in. For other types:
```java
orm.registerAdapter(SomeType.class, new TypeAdapter<SomeType>() {
@Override
public Object toDatabase(SomeType value) {
return value == null ? null : value.toString();
}
@Override
public SomeType fromDatabase(Object value) {
return value == null ? null : SomeType.parse(value.toString());
}
});
```

40
docs/overview.md Normal file
View File

@@ -0,0 +1,40 @@
# Overview
Database is a Lumi plugin that provides:
- HikariCP-backed MySQL/PostgreSQL connections.
- Schema helpers for creating/updating tables and indexes.
- Raw SQL queries with parameter binding.
- Lightweight ORM with annotations, fluent query builder, and async support.
## Install
1. Build the jar with Gradle.
2. Put the jar into your server plugins folder.
3. Start the server once to generate `config.yml`.
## Accessing the API
```java
import com.andrewkydev.database.DatabaseProvider;
import com.andrewkydev.database.DatabaseApi;
DatabaseApi api = DatabaseProvider.get();
```
## Feature Highlights
### Auto ID on insert
If an entity uses `@DbId(autoIncrement = true)` and the id value is empty (0 or null),
`insert()` will fetch generated keys and assign the id back to the object.
### Snake case by default
If `@DbEntity` and `@DbColumn` names are not set, class and field names are converted to
snake_case.
### Query helpers
`findOneWhere`, `deleteWhere`, and `findWhere` with sorting/limit/offset are available
in the ORM.

33
docs/ru/api-query.md Normal file
View File

@@ -0,0 +1,33 @@
# Query API (RU)
Raw SQL через `api.query()` с биндингом параметров.
## Execute
```java
int rows = api.query().execute(
"UPDATE players SET level = level + 1 WHERE id = ?",
java.util.Collections.singletonList(1)
);
```
## Query
```java
List<String> names = api.query().query(
"SELECT name FROM players WHERE level >= ?",
java.util.Collections.singletonList(10),
rs -> rs.getString("name")
);
```
## Транзакции
```java
try (Transaction tx = api.beginTransaction()) {
tx.execute("UPDATE players SET level = level + 1 WHERE id = ?", java.util.Collections.singletonList(1));
tx.commit();
} catch (Exception ex) {
// rollback при ошибке
}
```

35
docs/ru/api-schema.md Normal file
View File

@@ -0,0 +1,35 @@
# Schema API (RU)
Помощники для создания и обновления таблиц.
## Пример: create table
```java
import com.andrewkydev.database.schema.ColumnSpec;
import com.andrewkydev.database.schema.IndexSpec;
import com.andrewkydev.database.schema.TableSpec;
TableSpec table = TableSpec.builder("players")
.column(ColumnSpec.builder("id", "BIGINT").primaryKey(true).autoIncrement(true).nullable(false).build())
.column(ColumnSpec.builder("name", "VARCHAR(32)").nullable(false).build())
.index(new IndexSpec("players_name_idx", java.util.Arrays.asList("name"), false))
.build();
api.schema().createTable(table);
```
## Методы
```java
api.schema().createDatabase("primalix");
api.schema().dropDatabase("primalix");
api.schema().createTable(spec);
api.schema().dropTable("players");
api.schema().addColumn("players", ColumnSpec.builder("level", "INT").build());
api.schema().updateColumn("players", ColumnSpec.builder("level", "INT").nullable(false).build());
api.schema().dropColumn("players", "level");
api.schema().addIndex("players", new IndexSpec("players_name_idx", java.util.Arrays.asList("name"), false));
api.schema().dropIndex("players", "players_name_idx");
```
У всех методов есть async версии с `CompletableFuture`.

40
docs/ru/config.md Normal file
View File

@@ -0,0 +1,40 @@
# Конфигурация
Файл `config.yml`:
```yaml
driver: "mysql"
host: "localhost"
port: 3306
database: "primalix"
username: "root"
password: ""
adminDatabase: "postgres"
autoTransactions: true
pool:
maxPoolSize: 10
minIdle: 2
connectionTimeoutMs: 30000
idleTimeoutMs: 600000
maxLifetimeMs: 1800000
```
## Поля
- `driver`: `mysql` или `postgres`.
- `host`: адрес БД.
- `port`: порт БД.
- `database`: основная база.
- `username`: логин.
- `password`: пароль.
- `adminDatabase`: PostgreSQL админ‑база для create/drop database.
- `autoTransactions`: оборачивать schema операции в транзакцию.
## Pool
- `maxPoolSize`: максимум соединений.
- `minIdle`: минимум idle.
- `connectionTimeoutMs`: таймаут подключения.
- `idleTimeoutMs`: таймаут простоя.
- `maxLifetimeMs`: максимальная жизнь соединения.

43
docs/ru/orm-query.md Normal file
View File

@@ -0,0 +1,43 @@
# ORM Query Builder (RU)
Fluent builder над ORM.
## База
```java
List<PlayerModel> top = api.orm().query(PlayerModel.class)
.where("level >= ?", 10)
.orderBy("level DESC")
.limit(10)
.list();
```
## Fluent Conditions
```java
import static com.andrewkydev.database.orm.Conditions.*;
List<PlayerModel> players = api.orm().query(PlayerModel.class)
.where(eq("status", "ACTIVE").and(gt("level", 10)))
.orderBy("level DESC")
.list();
```
## Join + Group By + Having
```java
List<PlayerModel> rows = api.orm().query(PlayerModel.class)
.select("players.*")
.join("LEFT JOIN clans ON clans.id = players.clan_id")
.where("players.level >= ?", 10)
.groupBy("players.id")
.having("COUNT(clans.id) > ?", 0)
.orderBy("players.level DESC")
.limit(10, 0)
.list();
```
## Select Columns
Если выбираете часть колонок, маппятся только они.
Остальные поля остаются дефолтными.

30
docs/ru/orm-schema.md Normal file
View File

@@ -0,0 +1,30 @@
# ORM Schema (RU)
`OrmSchema` строит `TableSpec` на основе аннотаций.
## Пример
```java
import com.andrewkydev.database.orm.OrmSchema;
import com.andrewkydev.database.schema.SqlDialect;
import com.andrewkydev.database.schema.TableSpec;
TableSpec spec = OrmSchema.fromEntity(PlayerModel.class, SqlDialect.MYSQL);
api.schema().createTable(spec);
```
## Маппинг типов
По умолчанию:
- `String` -> `VARCHAR(length)`
- `int`/`Integer` -> `INT`
- `long`/`Long` -> `BIGINT`
- `short`/`Short` -> `SMALLINT`
- `boolean`/`Boolean` -> `BOOLEAN` (PostgreSQL) или `TINYINT(1)` (MySQL)
- `float`/`Float` -> `FLOAT`
- `double`/`Double` -> `DOUBLE`
- `UUID` -> `UUID` (PostgreSQL) или `CHAR(36)` (MySQL)
- `@DbJson` -> `JSON` (MySQL) или `JSONB` (PostgreSQL)
`@DbColumn(length, unique, nullable)` применяется автоматически.

119
docs/ru/orm.md Normal file
View File

@@ -0,0 +1,119 @@
# ORM (RU)
Легковесный ORM: поля мапятся в колонки, без сложных связей.
Есть sync и async версии.
## Аннотации
- `@DbEntity(table = "players")` - имя таблицы. По умолчанию snake_case.
- `@DbColumn(name = "username")` - имя колонки.
- `@DbColumn(type = "VARCHAR(32)")` - SQL тип для генерации схемы.
- `@DbColumn(length = 32, unique = true, nullable = false)` - влияет на schema генерацию.
- `@DbId(autoIncrement = true)` - primary key.
- `@DbJson` - JSON поле.
- `@DbTransient` - игнорировать поле.
## Пример сущности
```java
@DbEntity(table = "players")
public class PlayerModel {
@DbId(autoIncrement = true)
private long id;
@DbColumn(name = "username", nullable = false, unique = true, length = 32)
private String name;
@DbColumn
private int level;
@DbJson
private java.util.Map<String, Object> metadata;
@DbTransient
private String temp;
public PlayerModel() {
}
}
```
## CRUD
```java
EntityManager orm = api.orm();
orm.insert(entity);
orm.update(entity);
orm.delete(entity);
PlayerModel player = orm.findById(PlayerModel.class, 1L);
List<PlayerModel> all = orm.findAll(PlayerModel.class);
PlayerModel one = orm.findOneWhere(PlayerModel.class, "level >= ?", java.util.Collections.singletonList(10));
List<PlayerModel> many = orm.findWhere(PlayerModel.class, "level >= ?", java.util.Collections.singletonList(10));
List<PlayerModel> paged = orm.findWhere(PlayerModel.class, "level >= ?", java.util.Collections.singletonList(10), "level DESC", 10, 0);
long count = orm.count(PlayerModel.class, "level >= ?", java.util.Collections.singletonList(10));
boolean exists = orm.exists(PlayerModel.class, "username = ?", java.util.Collections.singletonList("Steve"));
int deleted = orm.deleteWhere(PlayerModel.class, "level < ?", java.util.Collections.singletonList(1));
```
## Query Builder
```java
EntityManager orm = api.orm();
List<PlayerModel> top = orm.query(PlayerModel.class)
.where("level >= ?", 10)
.orderBy("level DESC")
.limit(10)
.list();
boolean exists = orm.query(PlayerModel.class)
.where("username = ?", "Steve")
.exists();
```
## Fluent Conditions
```java
import static com.andrewkydev.database.orm.Conditions.*;
List<PlayerModel> players = orm.query(PlayerModel.class)
.where(eq("status", "ACTIVE").and(gt("level", 10)))
.list();
```
## Join, Group By, Having
```java
List<PlayerModel> rows = orm.query(PlayerModel.class)
.select("players.*")
.join("LEFT JOIN clans ON clans.id = players.clan_id")
.groupBy("players.id")
.having("COUNT(clans.id) > ?", 0)
.list();
```
Если выбрать часть колонок, маппятся только они, остальные остаются дефолтными.
## Auto ID
Если `@DbId(autoIncrement = true)` и id пустой (0/null),
`insert()` получает generated keys и прописывает id в объект.
## Type Adapters
UUID уже поддержан. Для других типов:
```java
orm.registerAdapter(SomeType.class, new TypeAdapter<SomeType>() {
@Override
public Object toDatabase(SomeType value) {
return value == null ? null : value.toString();
}
@Override
public SomeType fromDatabase(Object value) {
return value == null ? null : SomeType.parse(value.toString());
}
});
```

34
docs/ru/overview.md Normal file
View File

@@ -0,0 +1,34 @@
# Overview (RU)
Database - SQL plugin for Lumi with MySQL/PostgreSQL, HikariCP pooling,
schema helpers, raw SQL, and a lightweight ORM with fluent query builder.
## Установка
1. Соберите jar через Gradle.
2. Поместите jar в папку plugins сервера.
3. Запустите сервер один раз для генерации `config.yml`.
## Доступ к API
```java
import com.andrewkydev.database.DatabaseProvider;
import com.andrewkydev.database.DatabaseApi;
DatabaseApi api = DatabaseProvider.get();
```
## Основные возможности
### Auto ID после insert
Если у поля есть `@DbId(autoIncrement = true)` и id пустой (0 или null),
`insert()` получает generated keys и прописывает id в объект.
### Snake case по умолчанию
Если не задано `@DbEntity`/`@DbColumn`, имена берутся в snake_case.
### Удобные ORM методы
Доступны `findOneWhere`, `deleteWhere` и `findWhere` с сортировкой/лимитом/offset.

BIN
gradle/wrapper/gradle-wrapper.jar vendored Normal file

Binary file not shown.

View File

@@ -0,0 +1,7 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.14-bin.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

251
gradlew vendored Normal file
View File

@@ -0,0 +1,251 @@
#!/bin/sh
#
# Copyright © 2015-2021 the original authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
#
##############################################################################
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
done
# This is normally unused
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
warn () {
echo "$*"
} >&2
die () {
echo
echo "$*"
echo
exit 1
} >&2
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
esac
CLASSPATH="\\\"\\\""
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD=$JAVA_HOME/jre/sh/java
else
JAVACMD=$JAVA_HOME/bin/java
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD=java
if ! command -v java >/dev/null 2>&1
then
die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
fi
# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC2039,SC3045
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC2039,SC3045
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
fi
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
JAVACMD=$( cygpath --unix "$JAVACMD" )
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
done
fi
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Collect all arguments for the java command:
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments,
# and any embedded shellness will be escaped.
# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be
# treated as '${Hostname}' itself on the command line.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
-jar "$APP_HOME/gradle/wrapper/gradle-wrapper.jar" \
"$@"
# Stop when "xargs" is not available.
if ! command -v xargs >/dev/null 2>&1
then
die "xargs is not available"
fi
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
exec "$JAVACMD" "$@"

94
gradlew.bat vendored Normal file
View File

@@ -0,0 +1,94 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@rem SPDX-License-Identifier: Apache-2.0
@rem
@if "%DEBUG%"=="" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%"=="" set DIRNAME=.
@rem This is normally unused
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if %ERRORLEVEL% equ 0 goto execute
echo. 1>&2
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2
echo. 1>&2
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
echo location of your Java installation. 1>&2
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo. 1>&2
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2
echo. 1>&2
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
echo location of your Java installation. 1>&2
goto fail
:execute
@rem Setup the command line
set CLASSPATH=
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" -jar "%APP_HOME%\gradle\wrapper\gradle-wrapper.jar" %*
:end
@rem End local scope for the variables with windows NT shell
if %ERRORLEVEL% equ 0 goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
set EXIT_CODE=%ERRORLEVEL%
if %EXIT_CODE% equ 0 set EXIT_CODE=1
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
exit /b %EXIT_CODE%
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

1
settings.gradle.kts Normal file
View File

@@ -0,0 +1 @@
rootProject.name = "Database"

View File

@@ -0,0 +1,62 @@
package com.andrewkydev;
import cn.nukkit.plugin.PluginBase;
import com.andrewkydev.database.DatabaseApi;
import com.andrewkydev.database.DatabaseProvider;
import com.andrewkydev.database.config.DatabaseConfig;
import com.andrewkydev.database.config.DatabaseConfigLoader;
import com.andrewkydev.database.internal.DatabaseImpl;
import com.andrewkydev.database.internal.JdbcUrlBuilder;
import com.andrewkydev.database.schema.SqlDialect;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
public class Loader extends PluginBase {
private DatabaseApi databaseApi;
private HikariDataSource dataSource;
@Override
public void onLoad() {
super.onLoad();
}
@Override
public void onEnable() {
super.onEnable();
saveDefaultConfig();
reloadConfig();
DatabaseConfig config = DatabaseConfigLoader.load(this);
HikariConfig hikariConfig = new HikariConfig();
hikariConfig.setJdbcUrl(JdbcUrlBuilder.build(config.dialect(), config.host(), config.port(), config.database()));
hikariConfig.setUsername(config.username());
hikariConfig.setPassword(config.password());
hikariConfig.setDriverClassName(driverClassName(config.dialect()));
hikariConfig.setMaximumPoolSize(config.pool().maxPoolSize());
hikariConfig.setMinimumIdle(config.pool().minIdle());
hikariConfig.setConnectionTimeout(config.pool().connectionTimeoutMs());
hikariConfig.setIdleTimeout(config.pool().idleTimeoutMs());
hikariConfig.setMaxLifetime(config.pool().maxLifetimeMs());
dataSource = new HikariDataSource(hikariConfig);
databaseApi = new DatabaseImpl(dataSource, config);
DatabaseProvider.set(databaseApi);
}
@Override
public void onDisable() {
super.onDisable();
if (databaseApi != null) {
databaseApi.close();
databaseApi = null;
}
dataSource = null;
DatabaseProvider.set(null);
}
private String driverClassName(SqlDialect dialect) {
return dialect == SqlDialect.POSTGRESQL
? "org.postgresql.Driver"
: "com.mysql.cj.jdbc.Driver";
}
}

View File

@@ -0,0 +1,23 @@
package com.andrewkydev.database;
import com.andrewkydev.database.query.QueryRunner;
import com.andrewkydev.database.query.Transaction;
import com.andrewkydev.database.schema.Schema;
import com.andrewkydev.database.orm.EntityManager;
import java.util.concurrent.CompletableFuture;
public interface DatabaseApi extends AutoCloseable {
Schema schema();
QueryRunner query();
EntityManager orm();
Transaction beginTransaction();
CompletableFuture<Transaction> beginTransactionAsync();
@Override
void close();
}

View File

@@ -0,0 +1,20 @@
package com.andrewkydev.database;
public final class DatabaseProvider {
private static volatile DatabaseApi api;
private DatabaseProvider() {
}
public static void set(DatabaseApi api) {
DatabaseProvider.api = api;
}
public static DatabaseApi get() {
DatabaseApi current = api;
if (current == null) {
throw new IllegalStateException("DatabaseApi is not initialized");
}
return current;
}
}

View File

@@ -0,0 +1,115 @@
package com.andrewkydev.database.config;
import com.andrewkydev.database.schema.SqlDialect;
public final class DatabaseConfig {
private final SqlDialect dialect;
private final String host;
private final int port;
private final String database;
private final String username;
private final String password;
private final String adminDatabase;
private final PoolConfig pool;
private final boolean autoTransactions;
public DatabaseConfig(
SqlDialect dialect,
String host,
int port,
String database,
String username,
String password,
String adminDatabase,
PoolConfig pool,
boolean autoTransactions
) {
this.dialect = dialect;
this.host = host;
this.port = port;
this.database = database;
this.username = username;
this.password = password;
this.adminDatabase = adminDatabase;
this.pool = pool;
this.autoTransactions = autoTransactions;
}
public SqlDialect dialect() {
return dialect;
}
public String host() {
return host;
}
public int port() {
return port;
}
public String database() {
return database;
}
public String username() {
return username;
}
public String password() {
return password;
}
public String adminDatabase() {
return adminDatabase;
}
public PoolConfig pool() {
return pool;
}
public boolean autoTransactions() {
return autoTransactions;
}
public static final class PoolConfig {
private final int maxPoolSize;
private final int minIdle;
private final long connectionTimeoutMs;
private final long idleTimeoutMs;
private final long maxLifetimeMs;
public PoolConfig(
int maxPoolSize,
int minIdle,
long connectionTimeoutMs,
long idleTimeoutMs,
long maxLifetimeMs
) {
this.maxPoolSize = maxPoolSize;
this.minIdle = minIdle;
this.connectionTimeoutMs = connectionTimeoutMs;
this.idleTimeoutMs = idleTimeoutMs;
this.maxLifetimeMs = maxLifetimeMs;
}
public int maxPoolSize() {
return maxPoolSize;
}
public int minIdle() {
return minIdle;
}
public long connectionTimeoutMs() {
return connectionTimeoutMs;
}
public long idleTimeoutMs() {
return idleTimeoutMs;
}
public long maxLifetimeMs() {
return maxLifetimeMs;
}
}
}

View File

@@ -0,0 +1,56 @@
package com.andrewkydev.database.config;
import cn.nukkit.plugin.PluginBase;
import cn.nukkit.utils.Config;
import cn.nukkit.utils.ConfigSection;
import com.andrewkydev.database.schema.SqlDialect;
public final class DatabaseConfigLoader {
private DatabaseConfigLoader() {
}
public static DatabaseConfig load(PluginBase plugin) {
Config config = plugin.getConfig();
String dialectValue = config.getString("driver", "mysql");
SqlDialect dialect = "postgres".equalsIgnoreCase(dialectValue)
? SqlDialect.POSTGRESQL
: SqlDialect.MYSQL;
String host = config.getString("host", "localhost");
int port = config.getInt("port", dialect == SqlDialect.POSTGRESQL ? 5432 : 3306);
String database = config.getString("database", "primalix");
String username = config.getString("username", "root");
String password = config.getString("password", "");
String adminDatabase = config.getString("adminDatabase", "postgres");
boolean autoTransactions = config.getBoolean("autoTransactions", true);
ConfigSection poolConfig = config.getSection("pool");
int maxPoolSize = poolConfig.getInt("maxPoolSize", 10);
int minIdle = poolConfig.getInt("minIdle", 2);
long connectionTimeoutMs = poolConfig.getLong("connectionTimeoutMs", 30_000);
long idleTimeoutMs = poolConfig.getLong("idleTimeoutMs", 600_000);
long maxLifetimeMs = poolConfig.getLong("maxLifetimeMs", 1_800_000);
DatabaseConfig.PoolConfig pool = new DatabaseConfig.PoolConfig(
maxPoolSize,
minIdle,
connectionTimeoutMs,
idleTimeoutMs,
maxLifetimeMs
);
return new DatabaseConfig(
dialect,
host,
port,
database,
username,
password,
adminDatabase,
pool,
autoTransactions
);
}
}

View File

@@ -0,0 +1,75 @@
package com.andrewkydev.database.internal;
import com.andrewkydev.database.DatabaseApi;
import com.andrewkydev.database.config.DatabaseConfig;
import com.andrewkydev.database.query.QueryRunner;
import com.andrewkydev.database.query.Transaction;
import com.andrewkydev.database.schema.Schema;
import com.andrewkydev.database.orm.EntityManager;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import javax.sql.DataSource;
public final class DatabaseImpl implements DatabaseApi {
private final DataSource dataSource;
private final DatabaseConfig config;
private final ExecutorService executor;
private final Schema schema;
private final QueryRunner queryRunner;
private final EntityManager entityManager;
public DatabaseImpl(DataSource dataSource, DatabaseConfig config) {
this.dataSource = dataSource;
this.config = config;
this.executor = Executors.newFixedThreadPool(Math.max(2, config.pool().maxPoolSize() / 2));
this.schema = new SchemaImpl(dataSource, config, executor);
this.queryRunner = new JdbcQueryRunner(dataSource, executor);
this.entityManager = new EntityManagerImpl(dataSource, executor);
}
@Override
public Schema schema() {
return schema;
}
@Override
public QueryRunner query() {
return queryRunner;
}
@Override
public EntityManager orm() {
return entityManager;
}
@Override
public Transaction beginTransaction() {
try {
Connection connection = dataSource.getConnection();
connection.setAutoCommit(false);
return new JdbcTransaction(connection, executor);
} catch (SQLException ex) {
throw new IllegalStateException("Failed to start transaction", ex);
}
}
@Override
public CompletableFuture<Transaction> beginTransactionAsync() {
return CompletableFuture.supplyAsync(this::beginTransaction, executor);
}
@Override
public void close() {
executor.shutdown();
if (dataSource instanceof AutoCloseable) {
try {
((AutoCloseable) dataSource).close();
} catch (Exception ex) {
throw new IllegalStateException("Failed to close datasource", ex);
}
}
}
}

View File

@@ -0,0 +1,539 @@
package com.andrewkydev.database.internal;
import com.andrewkydev.database.orm.EntityManager;
import com.andrewkydev.database.orm.EntityQuery;
import com.andrewkydev.database.orm.TypeAdapter;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.lang.reflect.Constructor;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executor;
import java.util.UUID;
import javax.sql.DataSource;
public final class EntityManagerImpl implements EntityManager {
private final DataSource dataSource;
private final Executor executor;
private final Map<Class<?>, EntityMetadata> metadataCache = new ConcurrentHashMap<>();
private final Map<Class<?>, TypeAdapter<?>> adapters = new ConcurrentHashMap<>();
private final Gson gson = new GsonBuilder().create();
public EntityManagerImpl(DataSource dataSource, Executor executor) {
this.dataSource = dataSource;
this.executor = executor;
registerDefaults();
}
@Override
public <T> void insert(T entity) {
EntityMetadata metadata = metadata(entity.getClass());
List<EntityMetadata.FieldMapping> columns = new ArrayList<>();
List<Object> values = new ArrayList<>();
EntityMetadata.FieldMapping idField = metadata.idFieldOrNull();
boolean generateId = false;
for (EntityMetadata.FieldMapping mapping : metadata.fields()) {
Object value = getValue(mapping, entity);
if (mapping.isId() && mapping.autoIncrement() && !hasExplicitValue(value)) {
generateId = true;
continue;
}
columns.add(mapping);
values.add(value);
}
StringBuilder sql = new StringBuilder();
sql.append("INSERT INTO ").append(metadata.table()).append(" (");
for (int i = 0; i < columns.size(); i++) {
if (i > 0) {
sql.append(", ");
}
sql.append(columns.get(i).column());
}
sql.append(") VALUES (");
for (int i = 0; i < columns.size(); i++) {
if (i > 0) {
sql.append(", ");
}
sql.append("?");
}
sql.append(")");
if (generateId && idField != null) {
Object generatedId = executeInsert(sql.toString(), values);
if (generatedId != null) {
setValue(idField, entity, generatedId);
}
} else {
executeUpdate(sql.toString(), values);
}
}
@Override
public <T> void update(T entity) {
EntityMetadata metadata = metadata(entity.getClass());
EntityMetadata.FieldMapping idField = metadata.idField();
Object idValue = getValue(idField, entity);
if (!hasExplicitValue(idValue)) {
throw new IllegalStateException("Entity id is required for update");
}
List<Object> params = new ArrayList<>();
StringBuilder sql = new StringBuilder();
sql.append("UPDATE ").append(metadata.table()).append(" SET ");
boolean first = true;
for (EntityMetadata.FieldMapping mapping : metadata.fields()) {
if (mapping.isId()) {
continue;
}
if (!first) {
sql.append(", ");
}
first = false;
sql.append(mapping.column()).append("=?");
params.add(getValue(mapping, entity));
}
sql.append(" WHERE ").append(idField.column()).append("=?");
params.add(idValue);
executeUpdate(sql.toString(), params);
}
@Override
public <T> void delete(T entity) {
EntityMetadata metadata = metadata(entity.getClass());
EntityMetadata.FieldMapping idField = metadata.idField();
Object idValue = getValue(idField, entity);
if (!hasExplicitValue(idValue)) {
throw new IllegalStateException("Entity id is required for delete");
}
String sql = "DELETE FROM " + metadata.table() + " WHERE " + idField.column() + "=?";
executeUpdate(sql, Collections.singletonList(idValue));
}
@Override
public <T> T findById(Class<T> type, Object id) {
EntityMetadata metadata = metadata(type);
String sql = "SELECT * FROM " + metadata.table() + " WHERE " + metadata.idField().column() + "=?";
List<T> result = query(sql, Collections.singletonList(id), type);
return result.isEmpty() ? null : result.get(0);
}
@Override
public <T> List<T> findAll(Class<T> type) {
EntityMetadata metadata = metadata(type);
String sql = "SELECT * FROM " + metadata.table();
return query(sql, Collections.emptyList(), type);
}
@Override
public <T> T findOneWhere(Class<T> type, String where, List<Object> params) {
List<T> results = findWhere(type, where, params, null, 1, null);
return results.isEmpty() ? null : results.get(0);
}
@Override
public <T> List<T> findWhere(Class<T> type, String where, List<Object> params) {
EntityMetadata metadata = metadata(type);
String sql = "SELECT * FROM " + metadata.table() + whereClause(where);
return query(sql, normalizeParams(params), type);
}
@Override
public <T> long count(Class<T> type) {
return count(type, "", Collections.emptyList());
}
@Override
public <T> long count(Class<T> type, String where, List<Object> params) {
EntityMetadata metadata = metadata(type);
String sql = "SELECT COUNT(*) FROM " + metadata.table() + whereClause(where);
return queryCount(sql, normalizeParams(params));
}
@Override
public <T> boolean exists(Class<T> type, String where, List<Object> params) {
EntityMetadata metadata = metadata(type);
String sql = "SELECT 1 FROM " + metadata.table() + whereClause(where) + " LIMIT 1";
List<Integer> result = queryScalar(sql, normalizeParams(params));
return !result.isEmpty();
}
@Override
public <T> int deleteWhere(Class<T> type, String where, List<Object> params) {
if (where == null || where.trim().isEmpty()) {
throw new IllegalStateException("deleteWhere requires a WHERE clause");
}
EntityMetadata metadata = metadata(type);
String sql = "DELETE FROM " + metadata.table() + whereClause(where);
return executeUpdate(sql, normalizeParams(params));
}
@Override
public <T> List<T> findWhere(
Class<T> type,
String where,
List<Object> params,
String orderBy,
Integer limit,
Integer offset
) {
EntityMetadata metadata = metadata(type);
String sql = "SELECT * FROM " + metadata.table()
+ whereClause(where)
+ orderByClause(orderBy)
+ limitClause(limit, offset);
return query(sql, normalizeParams(params), type);
}
@Override
public <T> CompletableFuture<List<T>> findWhereAsync(
Class<T> type,
String where,
List<Object> params,
String orderBy,
Integer limit,
Integer offset
) {
return CompletableFuture.supplyAsync(
() -> findWhere(type, where, params, orderBy, limit, offset),
executor
);
}
@Override
public <T> void registerAdapter(Class<T> type, TypeAdapter<T> adapter) {
adapters.put(type, adapter);
}
@Override
public <T> EntityQuery<T> query(Class<T> type) {
return new EntityQueryImpl<>(this, type);
}
@Override
public <T> CompletableFuture<Void> insertAsync(T entity) {
return CompletableFuture.runAsync(() -> insert(entity), executor);
}
@Override
public <T> CompletableFuture<Void> updateAsync(T entity) {
return CompletableFuture.runAsync(() -> update(entity), executor);
}
@Override
public <T> CompletableFuture<Void> deleteAsync(T entity) {
return CompletableFuture.runAsync(() -> delete(entity), executor);
}
@Override
public <T> CompletableFuture<T> findByIdAsync(Class<T> type, Object id) {
return CompletableFuture.supplyAsync(() -> findById(type, id), executor);
}
@Override
public <T> CompletableFuture<List<T>> findAllAsync(Class<T> type) {
return CompletableFuture.supplyAsync(() -> findAll(type), executor);
}
@Override
public <T> CompletableFuture<T> findOneWhereAsync(Class<T> type, String where, List<Object> params) {
return CompletableFuture.supplyAsync(() -> findOneWhere(type, where, params), executor);
}
@Override
public <T> CompletableFuture<List<T>> findWhereAsync(Class<T> type, String where, List<Object> params) {
return CompletableFuture.supplyAsync(() -> findWhere(type, where, params), executor);
}
@Override
public <T> CompletableFuture<Long> countAsync(Class<T> type) {
return CompletableFuture.supplyAsync(() -> count(type), executor);
}
@Override
public <T> CompletableFuture<Long> countAsync(Class<T> type, String where, List<Object> params) {
return CompletableFuture.supplyAsync(() -> count(type, where, params), executor);
}
@Override
public <T> CompletableFuture<Boolean> existsAsync(Class<T> type, String where, List<Object> params) {
return CompletableFuture.supplyAsync(() -> exists(type, where, params), executor);
}
@Override
public <T> CompletableFuture<Integer> deleteWhereAsync(Class<T> type, String where, List<Object> params) {
return CompletableFuture.supplyAsync(() -> deleteWhere(type, where, params), executor);
}
private EntityMetadata metadata(Class<?> type) {
return metadataCache.computeIfAbsent(type, EntityMetadata::resolve);
}
private <T> List<T> query(String sql, List<Object> params, Class<T> type) {
try (Connection connection = dataSource.getConnection();
PreparedStatement statement = connection.prepareStatement(sql)) {
JdbcSupport.bindParams(statement, params);
try (ResultSet resultSet = statement.executeQuery()) {
Set<String> columns = resolveColumns(resultSet);
List<T> results = new ArrayList<>();
while (resultSet.next()) {
results.add(mapRow(resultSet, type, columns));
}
return results;
}
} catch (SQLException ex) {
throw new IllegalStateException("Failed to query SQL: " + sql, ex);
}
}
<T> List<T> queryCustom(String sql, List<Object> params, Class<T> type) {
return query(sql, normalizeParams(params), type);
}
long queryCountCustom(String sql, List<Object> params) {
return queryCount(sql, normalizeParams(params));
}
boolean queryExistsCustom(String sql, List<Object> params) {
List<Integer> result = queryScalar(sql, normalizeParams(params));
return !result.isEmpty();
}
Executor executor() {
return executor;
}
String tableFor(Class<?> type) {
return metadata(type).table();
}
private long queryCount(String sql, List<Object> params) {
try (Connection connection = dataSource.getConnection();
PreparedStatement statement = connection.prepareStatement(sql)) {
JdbcSupport.bindParams(statement, params);
try (ResultSet resultSet = statement.executeQuery()) {
return resultSet.next() ? resultSet.getLong(1) : 0L;
}
} catch (SQLException ex) {
throw new IllegalStateException("Failed to query SQL: " + sql, ex);
}
}
private List<Integer> queryScalar(String sql, List<Object> params) {
try (Connection connection = dataSource.getConnection();
PreparedStatement statement = connection.prepareStatement(sql)) {
JdbcSupport.bindParams(statement, params);
try (ResultSet resultSet = statement.executeQuery()) {
List<Integer> results = new ArrayList<>();
while (resultSet.next()) {
results.add(resultSet.getInt(1));
}
return results;
}
} catch (SQLException ex) {
throw new IllegalStateException("Failed to query SQL: " + sql, ex);
}
}
private int executeUpdate(String sql, List<Object> params) {
try (Connection connection = dataSource.getConnection();
PreparedStatement statement = connection.prepareStatement(sql)) {
JdbcSupport.bindParams(statement, params);
return statement.executeUpdate();
} catch (SQLException ex) {
throw new IllegalStateException("Failed to execute SQL: " + sql, ex);
}
}
private Object executeInsert(String sql, List<Object> params) {
try (Connection connection = dataSource.getConnection();
PreparedStatement statement = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS)) {
JdbcSupport.bindParams(statement, params);
statement.executeUpdate();
try (ResultSet resultSet = statement.getGeneratedKeys()) {
if (resultSet.next()) {
return resultSet.getObject(1);
}
}
return null;
} catch (SQLException ex) {
throw new IllegalStateException("Failed to execute SQL: " + sql, ex);
}
}
private String whereClause(String where) {
if (where == null || where.trim().isEmpty()) {
return "";
}
return " WHERE " + where;
}
private String orderByClause(String orderBy) {
if (orderBy == null || orderBy.trim().isEmpty()) {
return "";
}
return " ORDER BY " + orderBy;
}
private String limitClause(Integer limit, Integer offset) {
if (limit == null) {
return "";
}
if (offset == null) {
return " LIMIT " + limit;
}
return " LIMIT " + limit + " OFFSET " + offset;
}
private List<Object> normalizeParams(List<Object> params) {
if (params == null) {
return Collections.emptyList();
}
return params;
}
private boolean hasExplicitValue(Object value) {
if (value == null) {
return false;
}
if (value instanceof Number) {
return ((Number) value).longValue() != 0L;
}
return true;
}
private <T> Object getValue(EntityMetadata.FieldMapping mapping, T entity) {
try {
Object value = mapping.field().get(entity);
if (value == null) {
return null;
}
if (mapping.json()) {
return gson.toJson(value, mapping.field().getGenericType());
}
TypeAdapter<Object> adapter = adapterFor(mapping.field().getType());
if (adapter != null) {
return adapter.toDatabase(value);
}
return value;
} catch (IllegalAccessException ex) {
throw new IllegalStateException("Failed to access field " + mapping.field().getName(), ex);
}
}
private <T> void setValue(EntityMetadata.FieldMapping mapping, T entity, Object value) {
try {
Class<?> targetType = mapping.field().getType();
Object coerced;
if (mapping.json()) {
coerced = gson.fromJson(value == null ? "null" : value.toString(), mapping.field().getGenericType());
} else {
TypeAdapter<Object> adapter = adapterFor(targetType);
coerced = adapter == null ? coerceValue(targetType, value) : adapter.fromDatabase(value);
}
mapping.field().set(entity, coerced);
} catch (IllegalAccessException ex) {
throw new IllegalStateException("Failed to set field " + mapping.field().getName(), ex);
}
}
private Object coerceValue(Class<?> targetType, Object value) {
if (value == null) {
return null;
}
if (targetType.isAssignableFrom(value.getClass())) {
return value;
}
if (targetType == long.class || targetType == Long.class) {
return ((Number) value).longValue();
}
if (targetType == int.class || targetType == Integer.class) {
return ((Number) value).intValue();
}
if (targetType == short.class || targetType == Short.class) {
return ((Number) value).shortValue();
}
if (targetType == String.class) {
return value.toString();
}
return value;
}
@SuppressWarnings("unchecked")
private TypeAdapter<Object> adapterFor(Class<?> type) {
return (TypeAdapter<Object>) adapters.get(type);
}
private <T> T mapRow(ResultSet resultSet, Class<T> type, Set<String> columns) {
EntityMetadata metadata = metadata(type);
T instance = instantiate(type);
for (EntityMetadata.FieldMapping mapping : metadata.fields()) {
if (!columns.contains(mapping.column().toLowerCase())) {
continue;
}
try {
Object value = resultSet.getObject(mapping.column());
if (value == null && mapping.field().getType().isPrimitive()) {
continue;
}
setValue(mapping, instance, value);
} catch (SQLException ex) {
throw new IllegalStateException("Failed to read column " + mapping.column(), ex);
}
}
return instance;
}
private Set<String> resolveColumns(ResultSet resultSet) throws SQLException {
Set<String> columns = new HashSet<>();
int count = resultSet.getMetaData().getColumnCount();
for (int i = 1; i <= count; i++) {
String label = resultSet.getMetaData().getColumnLabel(i);
if (label != null) {
columns.add(label.toLowerCase());
}
}
return columns;
}
private <T> T instantiate(Class<T> type) {
try {
Constructor<T> constructor = type.getDeclaredConstructor();
constructor.setAccessible(true);
return constructor.newInstance();
} catch (Exception ex) {
throw new IllegalStateException("Entity must have a no-arg constructor: " + type.getName(), ex);
}
}
private void registerDefaults() {
registerAdapter(UUID.class, new TypeAdapter<UUID>() {
@Override
public Object toDatabase(UUID value) {
return value == null ? null : value.toString();
}
@Override
public UUID fromDatabase(Object value) {
if (value == null) {
return null;
}
return UUID.fromString(value.toString());
}
});
}
}

View File

@@ -0,0 +1,149 @@
package com.andrewkydev.database.internal;
import com.andrewkydev.database.orm.DbColumn;
import com.andrewkydev.database.orm.DbEntity;
import com.andrewkydev.database.orm.DbId;
import com.andrewkydev.database.orm.DbJson;
import com.andrewkydev.database.orm.DbTransient;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.List;
final class EntityMetadata {
private final String table;
private final List<FieldMapping> fields;
private final FieldMapping idField;
private EntityMetadata(String table, List<FieldMapping> fields, FieldMapping idField) {
this.table = table;
this.fields = fields;
this.idField = idField;
}
static EntityMetadata resolve(Class<?> type) {
String table = resolveTable(type);
List<FieldMapping> fields = new ArrayList<>();
FieldMapping idField = null;
Class<?> current = type;
while (current != null && current != Object.class) {
for (Field field : current.getDeclaredFields()) {
if (Modifier.isStatic(field.getModifiers())) {
continue;
}
if (field.isAnnotationPresent(DbTransient.class)) {
continue;
}
String columnName = resolveColumnName(field);
DbId id = field.getAnnotation(DbId.class);
DbJson json = field.getAnnotation(DbJson.class);
boolean isId = id != null;
boolean autoIncrement = id != null && id.autoIncrement();
boolean jsonField = json != null;
FieldMapping mapping = new FieldMapping(field, columnName, isId, autoIncrement, jsonField);
fields.add(mapping);
if (isId) {
if (idField != null) {
throw new IllegalStateException("Multiple @DbId fields found for " + type.getName());
}
idField = mapping;
}
}
current = current.getSuperclass();
}
if (fields.isEmpty()) {
throw new IllegalStateException("No mappable fields found for " + type.getName());
}
return new EntityMetadata(table, fields, idField);
}
String table() {
return table;
}
List<FieldMapping> fields() {
return fields;
}
FieldMapping idField() {
if (idField == null) {
throw new IllegalStateException("No @DbId field defined for entity table " + table);
}
return idField;
}
FieldMapping idFieldOrNull() {
return idField;
}
private static String resolveTable(Class<?> type) {
DbEntity entity = type.getAnnotation(DbEntity.class);
if (entity != null && !entity.table().isEmpty()) {
return entity.table();
}
return toSnakeCase(type.getSimpleName());
}
private static String resolveColumnName(Field field) {
DbColumn column = field.getAnnotation(DbColumn.class);
if (column != null && !column.name().isEmpty()) {
return column.name();
}
return toSnakeCase(field.getName());
}
private static String toSnakeCase(String value) {
StringBuilder builder = new StringBuilder();
for (int i = 0; i < value.length(); i++) {
char c = value.charAt(i);
if (Character.isUpperCase(c)) {
if (i > 0) {
builder.append('_');
}
builder.append(Character.toLowerCase(c));
} else {
builder.append(c);
}
}
return builder.toString();
}
static final class FieldMapping {
private final Field field;
private final String column;
private final boolean id;
private final boolean autoIncrement;
private final boolean json;
FieldMapping(Field field, String column, boolean id, boolean autoIncrement, boolean json) {
this.field = field;
this.column = column;
this.id = id;
this.autoIncrement = autoIncrement;
this.json = json;
this.field.setAccessible(true);
}
Field field() {
return field;
}
String column() {
return column;
}
boolean isId() {
return id;
}
boolean autoIncrement() {
return autoIncrement;
}
boolean json() {
return json;
}
}
}

View File

@@ -0,0 +1,329 @@
package com.andrewkydev.database.internal;
import com.andrewkydev.database.orm.EntityQuery;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import com.andrewkydev.database.orm.Condition;
final class EntityQueryImpl<T> implements EntityQuery<T> {
private final EntityManagerImpl manager;
private final Class<T> type;
private List<String> selectColumns;
private String joinSql;
private String where;
private List<Object> params;
private String orderBy;
private String groupBy;
private String having;
private List<Object> havingParams;
private Integer limit;
private Integer offset;
EntityQueryImpl(EntityManagerImpl manager, Class<T> type) {
this.manager = manager;
this.type = type;
}
@Override
public EntityQuery<T> select(String... columns) {
if (columns == null || columns.length == 0) {
this.selectColumns = null;
return this;
}
this.selectColumns = new ArrayList<>(Arrays.asList(columns));
return this;
}
@Override
public EntityQuery<T> join(String joinSql) {
this.joinSql = joinSql;
return this;
}
@Override
public EntityQuery<T> where(String where, List<Object> params) {
this.where = where;
this.params = params == null ? null : new ArrayList<>(params);
return this;
}
@Override
public EntityQuery<T> where(String where, Object... params) {
this.where = where;
if (params == null || params.length == 0) {
this.params = null;
} else {
this.params = new ArrayList<>(Arrays.asList(params));
}
return this;
}
@Override
public EntityQuery<T> where(Condition condition) {
if (condition == null) {
this.where = null;
this.params = null;
return this;
}
this.where = condition.sql();
this.params = new ArrayList<>(condition.params());
return this;
}
@Override
public EntityQuery<T> and(Condition condition) {
if (condition == null) {
return this;
}
if (this.where == null || this.where.trim().isEmpty()) {
return where(condition);
}
this.where = "(" + this.where + " AND " + condition.sql() + ")";
if (this.params == null) {
this.params = new ArrayList<>();
}
this.params.addAll(condition.params());
return this;
}
@Override
public EntityQuery<T> or(Condition condition) {
if (condition == null) {
return this;
}
if (this.where == null || this.where.trim().isEmpty()) {
return where(condition);
}
this.where = "(" + this.where + " OR " + condition.sql() + ")";
if (this.params == null) {
this.params = new ArrayList<>();
}
this.params.addAll(condition.params());
return this;
}
@Override
public EntityQuery<T> orderBy(String orderBy) {
this.orderBy = orderBy;
return this;
}
@Override
public EntityQuery<T> groupBy(String groupBy) {
this.groupBy = groupBy;
return this;
}
@Override
public EntityQuery<T> having(String having, List<Object> params) {
this.having = having;
this.havingParams = params == null ? null : new ArrayList<>(params);
return this;
}
@Override
public EntityQuery<T> having(String having, Object... params) {
this.having = having;
if (params == null || params.length == 0) {
this.havingParams = null;
} else {
this.havingParams = new ArrayList<>(Arrays.asList(params));
}
return this;
}
@Override
public EntityQuery<T> having(Condition condition) {
if (condition == null) {
this.having = null;
this.havingParams = null;
return this;
}
this.having = condition.sql();
this.havingParams = new ArrayList<>(condition.params());
return this;
}
@Override
public EntityQuery<T> limit(int limit) {
this.limit = limit;
return this;
}
@Override
public EntityQuery<T> offset(int offset) {
this.offset = offset;
return this;
}
@Override
public EntityQuery<T> limit(int limit, int offset) {
this.limit = limit;
this.offset = offset;
return this;
}
@Override
public List<T> list() {
return manager.queryCustom(
buildSelectSql(),
mergeParams(),
type
);
}
@Override
public T one() {
List<T> results = manager.queryCustom(
buildSelectSqlWithLimit(1),
mergeParams(),
type
);
return results.isEmpty() ? null : results.get(0);
}
@Override
public long count() {
return manager.queryCountCustom(buildCountSql(), mergeParams());
}
@Override
public boolean exists() {
return manager.queryExistsCustom(buildExistsSql(), mergeParams());
}
@Override
public int delete() {
if (joinSql != null || groupBy != null || having != null) {
throw new IllegalStateException("delete does not support join/group/having");
}
return manager.deleteWhere(type, where, params);
}
@Override
public CompletableFuture<List<T>> listAsync() {
return CompletableFuture.supplyAsync(this::list, manager.executor());
}
@Override
public CompletableFuture<T> oneAsync() {
return CompletableFuture.supplyAsync(this::one, manager.executor());
}
@Override
public CompletableFuture<Long> countAsync() {
return CompletableFuture.supplyAsync(this::count, manager.executor());
}
@Override
public CompletableFuture<Boolean> existsAsync() {
return CompletableFuture.supplyAsync(this::exists, manager.executor());
}
@Override
public CompletableFuture<Integer> deleteAsync() {
return CompletableFuture.supplyAsync(this::delete, manager.executor());
}
private String buildSelectSql() {
return buildSelectSqlWithLimit(null);
}
private String buildSelectSqlWithLimit(Integer overrideLimit) {
String select = selectColumns == null || selectColumns.isEmpty()
? "*"
: String.join(", ", selectColumns);
StringBuilder sql = new StringBuilder("SELECT ");
sql.append(select).append(" FROM ").append(manager.tableFor(type));
if (joinSql != null && !joinSql.trim().isEmpty()) {
sql.append(" ").append(joinSql);
}
if (where != null && !where.trim().isEmpty()) {
sql.append(" WHERE ").append(where);
}
if (groupBy != null && !groupBy.trim().isEmpty()) {
sql.append(" GROUP BY ").append(groupBy);
}
if (having != null && !having.trim().isEmpty()) {
sql.append(" HAVING ").append(having);
}
if (orderBy != null && !orderBy.trim().isEmpty()) {
sql.append(" ORDER BY ").append(orderBy);
}
Integer effectiveLimit = overrideLimit == null ? limit : overrideLimit;
if (effectiveLimit != null) {
sql.append(" LIMIT ").append(effectiveLimit);
}
if (offset != null) {
if (effectiveLimit == null) {
sql.append(" LIMIT 2147483647");
}
sql.append(" OFFSET ").append(offset);
}
return sql.toString();
}
private String buildCountSql() {
StringBuilder sql = new StringBuilder();
if (groupBy == null || groupBy.trim().isEmpty()) {
sql.append("SELECT COUNT(*) FROM ").append(manager.tableFor(type));
if (joinSql != null && !joinSql.trim().isEmpty()) {
sql.append(" ").append(joinSql);
}
if (where != null && !where.trim().isEmpty()) {
sql.append(" WHERE ").append(where);
}
if (having != null && !having.trim().isEmpty()) {
sql.append(" HAVING ").append(having);
}
return sql.toString();
}
sql.append("SELECT COUNT(*) FROM (");
sql.append("SELECT 1 FROM ").append(manager.tableFor(type));
if (joinSql != null && !joinSql.trim().isEmpty()) {
sql.append(" ").append(joinSql);
}
if (where != null && !where.trim().isEmpty()) {
sql.append(" WHERE ").append(where);
}
sql.append(" GROUP BY ").append(groupBy);
if (having != null && !having.trim().isEmpty()) {
sql.append(" HAVING ").append(having);
}
sql.append(") t");
return sql.toString();
}
private String buildExistsSql() {
StringBuilder sql = new StringBuilder("SELECT 1 FROM ");
sql.append(manager.tableFor(type));
if (joinSql != null && !joinSql.trim().isEmpty()) {
sql.append(" ").append(joinSql);
}
if (where != null && !where.trim().isEmpty()) {
sql.append(" WHERE ").append(where);
}
if (groupBy != null && !groupBy.trim().isEmpty()) {
sql.append(" GROUP BY ").append(groupBy);
}
if (having != null && !having.trim().isEmpty()) {
sql.append(" HAVING ").append(having);
}
sql.append(" LIMIT 1");
return sql.toString();
}
private List<Object> mergeParams() {
List<Object> merged = new ArrayList<>();
if (params != null) {
merged.addAll(params);
}
if (havingParams != null) {
merged.addAll(havingParams);
}
return merged;
}
}

View File

@@ -0,0 +1,82 @@
package com.andrewkydev.database.internal;
import com.andrewkydev.database.query.QueryRunner;
import com.andrewkydev.database.query.RowMapper;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import javax.sql.DataSource;
public final class JdbcQueryRunner implements QueryRunner {
private final DataSource dataSource;
private final Executor executor;
public JdbcQueryRunner(DataSource dataSource, Executor executor) {
this.dataSource = dataSource;
this.executor = executor;
}
@Override
public int execute(String sql) {
return execute(sql, Collections.emptyList());
}
@Override
public int execute(String sql, List<Object> params) {
try (Connection connection = dataSource.getConnection();
PreparedStatement statement = connection.prepareStatement(sql)) {
JdbcSupport.bindParams(statement, params);
return statement.executeUpdate();
} catch (SQLException ex) {
throw new IllegalStateException("Failed to execute SQL: " + sql, ex);
}
}
@Override
public <T> List<T> query(String sql, RowMapper<T> mapper) {
return query(sql, Collections.emptyList(), mapper);
}
@Override
public <T> List<T> query(String sql, List<Object> params, RowMapper<T> mapper) {
try (Connection connection = dataSource.getConnection();
PreparedStatement statement = connection.prepareStatement(sql)) {
JdbcSupport.bindParams(statement, params);
try (ResultSet resultSet = statement.executeQuery()) {
List<T> results = new ArrayList<>();
while (resultSet.next()) {
results.add(mapper.map(resultSet));
}
return results;
}
} catch (SQLException ex) {
throw new IllegalStateException("Failed to query SQL: " + sql, ex);
}
}
@Override
public CompletableFuture<Integer> executeAsync(String sql) {
return CompletableFuture.supplyAsync(() -> execute(sql), executor);
}
@Override
public CompletableFuture<Integer> executeAsync(String sql, List<Object> params) {
return CompletableFuture.supplyAsync(() -> execute(sql, params), executor);
}
@Override
public <T> CompletableFuture<List<T>> queryAsync(String sql, RowMapper<T> mapper) {
return CompletableFuture.supplyAsync(() -> query(sql, mapper), executor);
}
@Override
public <T> CompletableFuture<List<T>> queryAsync(String sql, List<Object> params, RowMapper<T> mapper) {
return CompletableFuture.supplyAsync(() -> query(sql, params, mapper), executor);
}
}

View File

@@ -0,0 +1,19 @@
package com.andrewkydev.database.internal;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.List;
final class JdbcSupport {
private JdbcSupport() {
}
static void bindParams(PreparedStatement statement, List<Object> params) throws SQLException {
if (params == null || params.isEmpty()) {
return;
}
for (int i = 0; i < params.size(); i++) {
statement.setObject(i + 1, params.get(i));
}
}
}

View File

@@ -0,0 +1,116 @@
package com.andrewkydev.database.internal;
import com.andrewkydev.database.query.RowMapper;
import com.andrewkydev.database.query.Transaction;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
public final class JdbcTransaction implements Transaction {
private final Connection connection;
private final Executor executor;
public JdbcTransaction(Connection connection, Executor executor) {
this.connection = connection;
this.executor = executor;
}
@Override
public int execute(String sql) {
return execute(sql, Collections.emptyList());
}
@Override
public int execute(String sql, List<Object> params) {
try (PreparedStatement statement = connection.prepareStatement(sql)) {
JdbcSupport.bindParams(statement, params);
return statement.executeUpdate();
} catch (SQLException ex) {
throw new IllegalStateException("Failed to execute SQL: " + sql, ex);
}
}
@Override
public <T> List<T> query(String sql, RowMapper<T> mapper) {
return query(sql, Collections.emptyList(), mapper);
}
@Override
public <T> List<T> query(String sql, List<Object> params, RowMapper<T> mapper) {
try (PreparedStatement statement = connection.prepareStatement(sql)) {
JdbcSupport.bindParams(statement, params);
try (ResultSet resultSet = statement.executeQuery()) {
List<T> results = new ArrayList<>();
while (resultSet.next()) {
results.add(mapper.map(resultSet));
}
return results;
}
} catch (SQLException ex) {
throw new IllegalStateException("Failed to query SQL: " + sql, ex);
}
}
@Override
public CompletableFuture<Integer> executeAsync(String sql) {
return CompletableFuture.supplyAsync(() -> execute(sql), executor);
}
@Override
public CompletableFuture<Integer> executeAsync(String sql, List<Object> params) {
return CompletableFuture.supplyAsync(() -> execute(sql, params), executor);
}
@Override
public <T> CompletableFuture<List<T>> queryAsync(String sql, RowMapper<T> mapper) {
return CompletableFuture.supplyAsync(() -> query(sql, mapper), executor);
}
@Override
public <T> CompletableFuture<List<T>> queryAsync(String sql, List<Object> params, RowMapper<T> mapper) {
return CompletableFuture.supplyAsync(() -> query(sql, params, mapper), executor);
}
@Override
public void commit() {
try {
connection.commit();
} catch (SQLException ex) {
throw new IllegalStateException("Failed to commit transaction", ex);
}
}
@Override
public void rollback() {
try {
connection.rollback();
} catch (SQLException ex) {
throw new IllegalStateException("Failed to rollback transaction", ex);
}
}
@Override
public CompletableFuture<Void> commitAsync() {
return CompletableFuture.runAsync(this::commit, executor);
}
@Override
public CompletableFuture<Void> rollbackAsync() {
return CompletableFuture.runAsync(this::rollback, executor);
}
@Override
public void close() {
try {
connection.close();
} catch (SQLException ex) {
throw new IllegalStateException("Failed to close transaction connection", ex);
}
}
}

View File

@@ -0,0 +1,27 @@
package com.andrewkydev.database.internal;
import com.andrewkydev.database.schema.SqlDialect;
public final class JdbcUrlBuilder {
private JdbcUrlBuilder() {
}
public static String build(SqlDialect dialect, String host, int port, String database) {
String dbSegment = database == null ? "" : database;
return switch (dialect) {
case POSTGRESQL -> {
if (dbSegment.isEmpty()) {
dbSegment = "postgres";
}
yield "jdbc:postgresql://" + host + ":" + port + "/" + dbSegment;
}
default -> {
if (dbSegment.isEmpty()) {
yield "jdbc:mysql://" + host + ":" + port + "/";
}
yield "jdbc:mysql://" + host + ":" + port + "/" + dbSegment
+ "?useSSL=false&allowPublicKeyRetrieval=true";
}
};
}
}

View File

@@ -0,0 +1,271 @@
package com.andrewkydev.database.internal;
import com.andrewkydev.database.config.DatabaseConfig;
import com.andrewkydev.database.schema.ColumnSpec;
import com.andrewkydev.database.schema.IndexSpec;
import com.andrewkydev.database.schema.Schema;
import com.andrewkydev.database.schema.SqlDialect;
import com.andrewkydev.database.schema.TableSpec;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.StringJoiner;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import javax.sql.DataSource;
public final class SchemaImpl implements Schema {
private final DataSource dataSource;
private final DatabaseConfig config;
private final SqlDialect dialect;
private final Executor executor;
public SchemaImpl(DataSource dataSource, DatabaseConfig config, Executor executor) {
this.dataSource = dataSource;
this.config = config;
this.dialect = config.dialect();
this.executor = executor;
}
@Override
public void createDatabase(String name) {
String sql = "CREATE DATABASE " + name;
executeAdmin(sql);
}
@Override
public void dropDatabase(String name) {
String sql = "DROP DATABASE " + name;
executeAdmin(sql);
}
@Override
public void createTable(TableSpec spec) {
List<String> statements = new ArrayList<>();
statements.add(buildCreateTable(spec));
for (IndexSpec index : spec.indexes()) {
statements.add(buildCreateIndex(spec.name(), index));
}
executeStatements(statements, true);
}
@Override
public void dropTable(String table) {
executeStatements(singletonList("DROP TABLE " + table), true);
}
@Override
public void addColumn(String table, ColumnSpec column) {
executeStatements(singletonList("ALTER TABLE " + table + " ADD COLUMN " + columnDefinition(column)), true);
}
@Override
public void updateColumn(String table, ColumnSpec column) {
executeStatements(buildUpdateColumnStatements(table, column), true);
}
@Override
public void dropColumn(String table, String column) {
executeStatements(singletonList("ALTER TABLE " + table + " DROP COLUMN " + column), true);
}
@Override
public void addIndex(String table, IndexSpec index) {
executeStatements(singletonList(buildCreateIndex(table, index)), true);
}
@Override
public void dropIndex(String table, String indexName) {
String sql = dialect == SqlDialect.POSTGRESQL
? "DROP INDEX " + indexName
: "DROP INDEX " + indexName + " ON " + table;
executeStatements(singletonList(sql), true);
}
@Override
public CompletableFuture<Void> createDatabaseAsync(String name) {
return CompletableFuture.runAsync(() -> createDatabase(name), executor);
}
@Override
public CompletableFuture<Void> dropDatabaseAsync(String name) {
return CompletableFuture.runAsync(() -> dropDatabase(name), executor);
}
@Override
public CompletableFuture<Void> createTableAsync(TableSpec spec) {
return CompletableFuture.runAsync(() -> createTable(spec), executor);
}
@Override
public CompletableFuture<Void> dropTableAsync(String table) {
return CompletableFuture.runAsync(() -> dropTable(table), executor);
}
@Override
public CompletableFuture<Void> addColumnAsync(String table, ColumnSpec column) {
return CompletableFuture.runAsync(() -> addColumn(table, column), executor);
}
@Override
public CompletableFuture<Void> updateColumnAsync(String table, ColumnSpec column) {
return CompletableFuture.runAsync(() -> updateColumn(table, column), executor);
}
@Override
public CompletableFuture<Void> dropColumnAsync(String table, String column) {
return CompletableFuture.runAsync(() -> dropColumn(table, column), executor);
}
@Override
public CompletableFuture<Void> addIndexAsync(String table, IndexSpec index) {
return CompletableFuture.runAsync(() -> addIndex(table, index), executor);
}
@Override
public CompletableFuture<Void> dropIndexAsync(String table, String indexName) {
return CompletableFuture.runAsync(() -> dropIndex(table, indexName), executor);
}
private void executeAdmin(String sql) {
try (Connection connection = DriverManager.getConnection(
JdbcUrlBuilder.build(dialect, config.host(), config.port(), adminDatabase()),
config.username(),
config.password()
);
PreparedStatement statement = connection.prepareStatement(sql)) {
statement.executeUpdate();
} catch (SQLException ex) {
throw new IllegalStateException("Failed to execute admin SQL: " + sql, ex);
}
}
private String adminDatabase() {
if (dialect == SqlDialect.POSTGRESQL) {
return config.adminDatabase();
}
return "";
}
private void executeStatements(List<String> statements, boolean allowTransactions) {
boolean useTransaction = allowTransactions && config.autoTransactions();
if (!useTransaction) {
for (String statement : statements) {
executeStatement(statement);
}
return;
}
try (Connection connection = dataSource.getConnection()) {
connection.setAutoCommit(false);
try {
for (String sql : statements) {
try (PreparedStatement statement = connection.prepareStatement(sql)) {
statement.executeUpdate();
}
}
connection.commit();
} catch (SQLException ex) {
connection.rollback();
throw ex;
}
} catch (SQLException ex) {
throw new IllegalStateException("Failed to execute SQL statements", ex);
}
}
private List<String> singletonList(String statement) {
List<String> statements = new ArrayList<>(1);
statements.add(statement);
return statements;
}
private void executeStatement(String sql) {
try (Connection connection = dataSource.getConnection();
PreparedStatement statement = connection.prepareStatement(sql)) {
statement.executeUpdate();
} catch (SQLException ex) {
throw new IllegalStateException("Failed to execute SQL: " + sql, ex);
}
}
private String buildCreateTable(TableSpec spec) {
StringJoiner joiner = new StringJoiner(", ");
for (ColumnSpec column : spec.columns()) {
joiner.add(columnDefinition(column));
}
if (!spec.primaryKey().isEmpty()) {
joiner.add("PRIMARY KEY (" + String.join(", ", spec.primaryKey()) + ")");
} else {
List<String> inlineKeys = new ArrayList<>();
for (ColumnSpec column : spec.columns()) {
if (column.primaryKey()) {
inlineKeys.add(column.name());
}
}
if (!inlineKeys.isEmpty()) {
joiner.add("PRIMARY KEY (" + String.join(", ", inlineKeys) + ")");
}
}
return "CREATE TABLE " + spec.name() + " (" + joiner + ")";
}
private String buildCreateIndex(String table, IndexSpec index) {
String prefix = index.unique() ? "CREATE UNIQUE INDEX " : "CREATE INDEX ";
return prefix + index.name() + " ON " + table + " (" + String.join(", ", index.columns()) + ")";
}
private String columnDefinition(ColumnSpec column) {
StringBuilder builder = new StringBuilder();
builder.append(column.name()).append(" ");
builder.append(resolveColumnType(column));
if (!column.nullable()) {
builder.append(" NOT NULL");
}
if (column.defaultValue() != null && !column.defaultValue().isEmpty()) {
builder.append(" DEFAULT ").append(column.defaultValue());
}
if (column.autoIncrement() && dialect == SqlDialect.MYSQL) {
builder.append(" AUTO_INCREMENT");
}
return builder.toString();
}
private String resolveColumnType(ColumnSpec column) {
if (!column.autoIncrement() || dialect != SqlDialect.POSTGRESQL) {
return column.type();
}
String type = column.type().toUpperCase();
if (type.contains("BIG")) {
return "BIGSERIAL";
}
if (type.contains("INT")) {
return "SERIAL";
}
return column.type() + " GENERATED BY DEFAULT AS IDENTITY";
}
private List<String> buildUpdateColumnStatements(String table, ColumnSpec column) {
List<String> statements = new ArrayList<>();
if (dialect == SqlDialect.MYSQL) {
statements.add("ALTER TABLE " + table + " MODIFY COLUMN " + columnDefinition(column));
return statements;
}
statements.add("ALTER TABLE " + table + " ALTER COLUMN " + column.name() + " TYPE " + column.type());
if (column.nullable()) {
statements.add("ALTER TABLE " + table + " ALTER COLUMN " + column.name() + " DROP NOT NULL");
} else {
statements.add("ALTER TABLE " + table + " ALTER COLUMN " + column.name() + " SET NOT NULL");
}
if (column.defaultValue() == null || column.defaultValue().isEmpty()) {
statements.add("ALTER TABLE " + table + " ALTER COLUMN " + column.name() + " DROP DEFAULT");
} else {
statements.add("ALTER TABLE " + table + " ALTER COLUMN " + column.name() + " SET DEFAULT " + column.defaultValue());
}
return statements;
}
}

View File

@@ -0,0 +1,38 @@
package com.andrewkydev.database.orm;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public final class Condition {
private final String sql;
private final List<Object> params;
Condition(String sql, List<Object> params) {
this.sql = sql;
this.params = params == null ? Collections.emptyList() : params;
}
public String sql() {
return sql;
}
public List<Object> params() {
return params;
}
public Condition and(Condition other) {
return combine("AND", other);
}
public Condition or(Condition other) {
return combine("OR", other);
}
private Condition combine(String op, Condition other) {
List<Object> combined = new ArrayList<>(params.size() + other.params.size());
combined.addAll(params);
combined.addAll(other.params);
return new Condition("(" + sql + " " + op + " " + other.sql + ")", combined);
}
}

View File

@@ -0,0 +1,85 @@
package com.andrewkydev.database.orm;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public final class Conditions {
private Conditions() {
}
public static Condition raw(String sql, List<Object> params) {
return new Condition(sql, params);
}
public static Condition raw(String sql, Object... params) {
return new Condition(sql, params == null ? Collections.emptyList() : asList(params));
}
public static Condition eq(String column, Object value) {
return new Condition(column + " = ?", Collections.singletonList(value));
}
public static Condition ne(String column, Object value) {
return new Condition(column + " <> ?", Collections.singletonList(value));
}
public static Condition gt(String column, Object value) {
return new Condition(column + " > ?", Collections.singletonList(value));
}
public static Condition gte(String column, Object value) {
return new Condition(column + " >= ?", Collections.singletonList(value));
}
public static Condition lt(String column, Object value) {
return new Condition(column + " < ?", Collections.singletonList(value));
}
public static Condition lte(String column, Object value) {
return new Condition(column + " <= ?", Collections.singletonList(value));
}
public static Condition like(String column, Object value) {
return new Condition(column + " LIKE ?", Collections.singletonList(value));
}
public static Condition in(String column, List<Object> values) {
if (values == null || values.isEmpty()) {
return new Condition("1=0", Collections.emptyList());
}
StringBuilder builder = new StringBuilder();
builder.append(column).append(" IN (");
for (int i = 0; i < values.size(); i++) {
if (i > 0) {
builder.append(", ");
}
builder.append("?");
}
builder.append(")");
return new Condition(builder.toString(), values);
}
public static Condition in(String column, Object... values) {
if (values == null || values.length == 0) {
return new Condition("1=0", Collections.emptyList());
}
List<Object> list = new ArrayList<>(values.length);
Collections.addAll(list, values);
return in(column, list);
}
public static Condition isNull(String column) {
return new Condition(column + " IS NULL", Collections.emptyList());
}
public static Condition isNotNull(String column) {
return new Condition(column + " IS NOT NULL", Collections.emptyList());
}
private static List<Object> asList(Object[] params) {
List<Object> list = new ArrayList<>(params.length);
Collections.addAll(list, params);
return list;
}
}

View File

@@ -0,0 +1,20 @@
package com.andrewkydev.database.orm;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface DbColumn {
String name() default "";
boolean nullable() default true;
int length() default 255;
boolean unique() default false;
String type() default "";
}

View File

@@ -0,0 +1,12 @@
package com.andrewkydev.database.orm;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface DbEntity {
String table() default "";
}

View File

@@ -0,0 +1,12 @@
package com.andrewkydev.database.orm;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface DbId {
boolean autoIncrement() default true;
}

View File

@@ -0,0 +1,11 @@
package com.andrewkydev.database.orm;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface DbJson {
}

View File

@@ -0,0 +1,11 @@
package com.andrewkydev.database.orm;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface DbTransient {
}

View File

@@ -0,0 +1,73 @@
package com.andrewkydev.database.orm;
import java.util.List;
import java.util.concurrent.CompletableFuture;
public interface EntityManager {
<T> void insert(T entity);
<T> void update(T entity);
<T> void delete(T entity);
<T> T findById(Class<T> type, Object id);
<T> List<T> findAll(Class<T> type);
<T> T findOneWhere(Class<T> type, String where, List<Object> params);
<T> List<T> findWhere(Class<T> type, String where, List<Object> params);
<T> long count(Class<T> type);
<T> long count(Class<T> type, String where, List<Object> params);
<T> boolean exists(Class<T> type, String where, List<Object> params);
<T> int deleteWhere(Class<T> type, String where, List<Object> params);
<T> List<T> findWhere(
Class<T> type,
String where,
List<Object> params,
String orderBy,
Integer limit,
Integer offset
);
<T> CompletableFuture<List<T>> findWhereAsync(
Class<T> type,
String where,
List<Object> params,
String orderBy,
Integer limit,
Integer offset
);
<T> void registerAdapter(Class<T> type, TypeAdapter<T> adapter);
<T> EntityQuery<T> query(Class<T> type);
<T> CompletableFuture<Void> insertAsync(T entity);
<T> CompletableFuture<Void> updateAsync(T entity);
<T> CompletableFuture<Void> deleteAsync(T entity);
<T> CompletableFuture<T> findByIdAsync(Class<T> type, Object id);
<T> CompletableFuture<List<T>> findAllAsync(Class<T> type);
<T> CompletableFuture<T> findOneWhereAsync(Class<T> type, String where, List<Object> params);
<T> CompletableFuture<List<T>> findWhereAsync(Class<T> type, String where, List<Object> params);
<T> CompletableFuture<Long> countAsync(Class<T> type);
<T> CompletableFuture<Long> countAsync(Class<T> type, String where, List<Object> params);
<T> CompletableFuture<Boolean> existsAsync(Class<T> type, String where, List<Object> params);
<T> CompletableFuture<Integer> deleteWhereAsync(Class<T> type, String where, List<Object> params);
}

View File

@@ -0,0 +1,57 @@
package com.andrewkydev.database.orm;
import java.util.List;
import java.util.concurrent.CompletableFuture;
public interface EntityQuery<T> {
EntityQuery<T> select(String... columns);
EntityQuery<T> join(String joinSql);
EntityQuery<T> where(String where, List<Object> params);
EntityQuery<T> where(String where, Object... params);
EntityQuery<T> where(Condition condition);
EntityQuery<T> and(Condition condition);
EntityQuery<T> or(Condition condition);
EntityQuery<T> orderBy(String orderBy);
EntityQuery<T> groupBy(String groupBy);
EntityQuery<T> having(String having, List<Object> params);
EntityQuery<T> having(String having, Object... params);
EntityQuery<T> having(Condition condition);
EntityQuery<T> limit(int limit);
EntityQuery<T> offset(int offset);
EntityQuery<T> limit(int limit, int offset);
List<T> list();
T one();
long count();
boolean exists();
int delete();
CompletableFuture<List<T>> listAsync();
CompletableFuture<T> oneAsync();
CompletableFuture<Long> countAsync();
CompletableFuture<Boolean> existsAsync();
CompletableFuture<Integer> deleteAsync();
}

View File

@@ -0,0 +1,132 @@
package com.andrewkydev.database.orm;
import com.andrewkydev.database.schema.ColumnSpec;
import com.andrewkydev.database.schema.IndexSpec;
import com.andrewkydev.database.schema.SqlDialect;
import com.andrewkydev.database.schema.TableSpec;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.List;
public final class OrmSchema {
private OrmSchema() {
}
public static TableSpec fromEntity(Class<?> type, SqlDialect dialect) {
DbEntity entity = type.getAnnotation(DbEntity.class);
String table = entity != null && !entity.table().isEmpty()
? entity.table()
: toSnakeCase(type.getSimpleName());
TableSpec.Builder builder = TableSpec.builder(table);
List<String> primaryKeys = new ArrayList<>();
List<IndexSpec> indexes = new ArrayList<>();
Class<?> current = type;
while (current != null && current != Object.class) {
for (Field field : current.getDeclaredFields()) {
if (Modifier.isStatic(field.getModifiers())) {
continue;
}
if (field.isAnnotationPresent(DbTransient.class)) {
continue;
}
DbColumn column = field.getAnnotation(DbColumn.class);
DbId id = field.getAnnotation(DbId.class);
DbJson json = field.getAnnotation(DbJson.class);
String columnName = column != null && !column.name().isEmpty()
? column.name()
: toSnakeCase(field.getName());
boolean nullable = column == null || column.nullable();
boolean unique = column != null && column.unique();
int length = column != null ? column.length() : 255;
boolean autoIncrement = id != null && id.autoIncrement();
boolean primaryKey = id != null;
String typeName = column != null ? column.type() : "";
if (typeName == null || typeName.trim().isEmpty()) {
typeName = resolveType(field.getType(), length, json != null, dialect);
}
builder.column(ColumnSpec.builder(columnName, typeName)
.nullable(nullable)
.autoIncrement(autoIncrement)
.primaryKey(primaryKey)
.build());
if (primaryKey) {
primaryKeys.add(columnName);
}
if (unique) {
indexes.add(new IndexSpec(table + "_" + columnName + "_uk", listOf(columnName), true));
}
}
current = current.getSuperclass();
}
if (!primaryKeys.isEmpty()) {
builder.primaryKey(primaryKeys);
}
if (!indexes.isEmpty()) {
builder.indexes(indexes);
}
return builder.build();
}
private static String resolveType(Class<?> fieldType, int length, boolean json, SqlDialect dialect) {
if (json) {
return dialect == SqlDialect.POSTGRESQL ? "JSONB" : "JSON";
}
if (fieldType == String.class) {
return "VARCHAR(" + length + ")";
}
if (fieldType == int.class || fieldType == Integer.class) {
return "INT";
}
if (fieldType == long.class || fieldType == Long.class) {
return "BIGINT";
}
if (fieldType == short.class || fieldType == Short.class) {
return "SMALLINT";
}
if (fieldType == boolean.class || fieldType == Boolean.class) {
return dialect == SqlDialect.POSTGRESQL ? "BOOLEAN" : "TINYINT(1)";
}
if (fieldType == float.class || fieldType == Float.class) {
return "FLOAT";
}
if (fieldType == double.class || fieldType == Double.class) {
return "DOUBLE";
}
if (fieldType == java.util.UUID.class) {
return dialect == SqlDialect.POSTGRESQL ? "UUID" : "CHAR(36)";
}
return "TEXT";
}
private static List<String> listOf(String value) {
List<String> list = new ArrayList<>(1);
list.add(value);
return list;
}
private static String toSnakeCase(String value) {
StringBuilder builder = new StringBuilder();
for (int i = 0; i < value.length(); i++) {
char c = value.charAt(i);
if (Character.isUpperCase(c)) {
if (i > 0) {
builder.append('_');
}
builder.append(Character.toLowerCase(c));
} else {
builder.append(c);
}
}
return builder.toString();
}
}

View File

@@ -0,0 +1,7 @@
package com.andrewkydev.database.orm;
public interface TypeAdapter<T> {
Object toDatabase(T value);
T fromDatabase(Object value);
}

View File

@@ -0,0 +1,23 @@
package com.andrewkydev.database.query;
import java.util.List;
import java.util.concurrent.CompletableFuture;
public interface QueryRunner {
int execute(String sql);
int execute(String sql, List<Object> params);
<T> List<T> query(String sql, RowMapper<T> mapper);
<T> List<T> query(String sql, List<Object> params, RowMapper<T> mapper);
CompletableFuture<Integer> executeAsync(String sql);
CompletableFuture<Integer> executeAsync(String sql, List<Object> params);
<T> CompletableFuture<List<T>> queryAsync(String sql, RowMapper<T> mapper);
<T> CompletableFuture<List<T>> queryAsync(String sql, List<Object> params, RowMapper<T> mapper);
}

View File

@@ -0,0 +1,9 @@
package com.andrewkydev.database.query;
import java.sql.ResultSet;
import java.sql.SQLException;
@FunctionalInterface
public interface RowMapper<T> {
T map(ResultSet resultSet) throws SQLException;
}

View File

@@ -0,0 +1,34 @@
package com.andrewkydev.database.query;
import java.util.List;
import java.util.concurrent.CompletableFuture;
public interface Transaction extends AutoCloseable {
int execute(String sql);
int execute(String sql, List<Object> params);
<T> List<T> query(String sql, RowMapper<T> mapper);
<T> List<T> query(String sql, List<Object> params, RowMapper<T> mapper);
CompletableFuture<Integer> executeAsync(String sql);
CompletableFuture<Integer> executeAsync(String sql, List<Object> params);
<T> CompletableFuture<List<T>> queryAsync(String sql, RowMapper<T> mapper);
<T> CompletableFuture<List<T>> queryAsync(String sql, List<Object> params, RowMapper<T> mapper);
void commit();
void rollback();
CompletableFuture<Void> commitAsync();
CompletableFuture<Void> rollbackAsync();
@Override
void close();
}

View File

@@ -0,0 +1,85 @@
package com.andrewkydev.database.schema;
public final class ColumnSpec {
private final String name;
private final String type;
private final boolean nullable;
private final String defaultValue;
private final boolean autoIncrement;
private final boolean primaryKey;
private ColumnSpec(Builder builder) {
this.name = builder.name;
this.type = builder.type;
this.nullable = builder.nullable;
this.defaultValue = builder.defaultValue;
this.autoIncrement = builder.autoIncrement;
this.primaryKey = builder.primaryKey;
}
public static Builder builder(String name, String type) {
return new Builder(name, type);
}
public String name() {
return name;
}
public String type() {
return type;
}
public boolean nullable() {
return nullable;
}
public String defaultValue() {
return defaultValue;
}
public boolean autoIncrement() {
return autoIncrement;
}
public boolean primaryKey() {
return primaryKey;
}
public static final class Builder {
private final String name;
private final String type;
private boolean nullable = true;
private String defaultValue;
private boolean autoIncrement;
private boolean primaryKey;
private Builder(String name, String type) {
this.name = name;
this.type = type;
}
public Builder nullable(boolean nullable) {
this.nullable = nullable;
return this;
}
public Builder defaultValue(String defaultValue) {
this.defaultValue = defaultValue;
return this;
}
public Builder autoIncrement(boolean autoIncrement) {
this.autoIncrement = autoIncrement;
return this;
}
public Builder primaryKey(boolean primaryKey) {
this.primaryKey = primaryKey;
return this;
}
public ColumnSpec build() {
return new ColumnSpec(this);
}
}
}

View File

@@ -0,0 +1,29 @@
package com.andrewkydev.database.schema;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public final class IndexSpec {
private final String name;
private final List<String> columns;
private final boolean unique;
public IndexSpec(String name, List<String> columns, boolean unique) {
this.name = name;
this.columns = Collections.unmodifiableList(new ArrayList<>(columns));
this.unique = unique;
}
public String name() {
return name;
}
public List<String> columns() {
return columns;
}
public boolean unique() {
return unique;
}
}

View File

@@ -0,0 +1,42 @@
package com.andrewkydev.database.schema;
import java.util.concurrent.CompletableFuture;
public interface Schema {
void createDatabase(String name);
void dropDatabase(String name);
void createTable(TableSpec spec);
void dropTable(String table);
void addColumn(String table, ColumnSpec column);
void updateColumn(String table, ColumnSpec column);
void dropColumn(String table, String column);
void addIndex(String table, IndexSpec index);
void dropIndex(String table, String indexName);
CompletableFuture<Void> createDatabaseAsync(String name);
CompletableFuture<Void> dropDatabaseAsync(String name);
CompletableFuture<Void> createTableAsync(TableSpec spec);
CompletableFuture<Void> dropTableAsync(String table);
CompletableFuture<Void> addColumnAsync(String table, ColumnSpec column);
CompletableFuture<Void> updateColumnAsync(String table, ColumnSpec column);
CompletableFuture<Void> dropColumnAsync(String table, String column);
CompletableFuture<Void> addIndexAsync(String table, IndexSpec index);
CompletableFuture<Void> dropIndexAsync(String table, String indexName);
}

View File

@@ -0,0 +1,6 @@
package com.andrewkydev.database.schema;
public enum SqlDialect {
MYSQL,
POSTGRESQL
}

View File

@@ -0,0 +1,80 @@
package com.andrewkydev.database.schema;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public final class TableSpec {
private final String name;
private final List<ColumnSpec> columns;
private final List<String> primaryKey;
private final List<IndexSpec> indexes;
private TableSpec(Builder builder) {
this.name = builder.name;
this.columns = Collections.unmodifiableList(new ArrayList<>(builder.columns));
this.primaryKey = Collections.unmodifiableList(new ArrayList<>(builder.primaryKey));
this.indexes = Collections.unmodifiableList(new ArrayList<>(builder.indexes));
}
public static Builder builder(String name) {
return new Builder(name);
}
public String name() {
return name;
}
public List<ColumnSpec> columns() {
return columns;
}
public List<String> primaryKey() {
return primaryKey;
}
public List<IndexSpec> indexes() {
return indexes;
}
public static final class Builder {
private final String name;
private final List<ColumnSpec> columns = new ArrayList<>();
private final List<String> primaryKey = new ArrayList<>();
private final List<IndexSpec> indexes = new ArrayList<>();
private Builder(String name) {
this.name = name;
}
public Builder column(ColumnSpec column) {
this.columns.add(column);
return this;
}
public Builder columns(List<ColumnSpec> columns) {
this.columns.addAll(columns);
return this;
}
public Builder primaryKey(List<String> columns) {
this.primaryKey.clear();
this.primaryKey.addAll(columns);
return this;
}
public Builder index(IndexSpec index) {
this.indexes.add(index);
return this;
}
public Builder indexes(List<IndexSpec> indexes) {
this.indexes.addAll(indexes);
return this;
}
public TableSpec build() {
return new TableSpec(this);
}
}
}

View File

@@ -0,0 +1,16 @@
#support mysql | postgres
driver: "mysql"
host: "localhost"
port: 3306
database: "primalix"
username: "root"
password: ""
adminDatabase: "postgres"
autoTransactions: true
pool:
maxPoolSize: 10
minIdle: 2
connectionTimeoutMs: 30000
idleTimeoutMs: 600000
maxLifetimeMs: 1800000

View File

@@ -0,0 +1,8 @@
name: Database
description: "Database plugin for Primalix"
main: org.andrewkydev.Loader
version: "0.0.1"
api: [ 1.1.0 ]
load: POSTWORLD