Compare commits

...

26 Commits

Author SHA1 Message Date
34bb221af7 Change limit to 100, use shikimoriId as main identifier 2026-03-25 01:31:52 +05:00
aa14a4c89f Include grouped by translations result 2026-03-24 22:18:56 +05:00
efc3553e64 Update kodik api url 2026-03-23 20:54:53 +05:00
14625ea289 Move package com.backend.metadata.kodik.service to com.backend.metadata.kodik 2026-03-22 01:24:46 +05:00
3e8409f150 Add port for dev 2026-03-20 00:03:38 +05:00
e08853cd90 Fix last season 2026-03-20 00:02:52 +05:00
b126c3c5cb Add startup.sh 2026-02-28 00:18:50 +05:00
fa97f7eacb Update id to appName and log proper id 2026-02-26 17:17:36 +05:00
bf08d0f4b7 Add kodik, imdb, shikimori, kinopoisk 2026-02-15 02:48:02 +05:00
2dab1a4016 Basic integration of consul registration 2026-02-15 02:09:08 +05:00
e154e232df Migrate to quarkus 2026-01-27 00:41:46 +05:00
1e42f24e3a Use logstash as logging destination 2025-09-14 01:58:32 +05:00
740e1a0279 Use buildx caching in docker mvn package 2025-09-07 01:06:08 +05:00
13be17fa8f Use docker shared network instead of localhost 2025-09-01 19:21:31 +05:00
21199307c7 Use external docker network 2025-09-01 19:19:47 +05:00
e07f48f39d Change compose service name 2025-08-28 17:23:56 +05:00
53dcde6bd9 Exclude error message and change application name to anyame-kodik-search-backend 2025-08-28 17:22:27 +05:00
eca6f50730 Bump spring-boot 3.5.0 -> 3.5.5 2025-08-28 12:36:59 +05:00
4d99a59947 Add .env.example and add eureka client 2025-08-28 11:47:39 +05:00
f2eee1752d Include "season" in anime identifier 2025-08-10 16:15:23 +05:00
9cc7cede69 [Fix] Allow only specific types only in search results (#2)
Reviewed-on: anyame/anyame-kodik-search-service#2
Co-authored-by: bivashy <botyrbojey@gmail.com>
Co-committed-by: bivashy <botyrbojey@gmail.com>
2025-07-19 18:50:29 +00:00
9540a71307 [Fix] Return unique search results (#1)
Reviewed-on: anyame/anyame-kodik-search-service#1
Co-authored-by: bivashy <botyrbojey@gmail.com>
Co-committed-by: bivashy <botyrbojey@gmail.com>
2025-07-19 18:41:34 +00:00
55bbb79510 Fix compose image name 2025-07-14 00:45:08 +05:00
2b0f816633 Optimize Dockerfile and add docker compose 2025-07-12 18:15:55 +05:00
3fc06eabff Add kodik token to application.properties 2025-07-12 18:14:32 +05:00
95113ce2fb Change logger style in controller 2025-07-12 18:14:19 +05:00
35 changed files with 1085 additions and 305 deletions

5
.dockerignore Normal file
View File

@@ -0,0 +1,5 @@
*
!target/*-runner
!target/*-runner.jar
!target/lib/*
!target/quarkus-app/*

3
.env.example Normal file
View File

@@ -0,0 +1,3 @@
KODIK_TOKEN=YOUR_KODIK_TOKEN
EUREKA_SCHEMA=http
EUREKA_HOST=anyame-vue-bff:8080

2
.gitattributes vendored
View File

@@ -1,2 +0,0 @@
/mvnw text eol=lf
*.cmd text eol=crlf

60
.gitignore vendored
View File

@@ -1,35 +1,45 @@
HELP.md #Maven
target/ target/
.mvn/wrapper/maven-wrapper.jar pom.xml.tag
!**/src/main/**/target/ pom.xml.releaseBackup
!**/src/test/**/target/ pom.xml.versionsBackup
release.properties
.flattened-pom.xml
### STS ### # Eclipse
.apt_generated
.classpath
.factorypath
.project .project
.settings .classpath
.springBeans .settings/
.sts4-cache bin/
### IntelliJ IDEA ### # IntelliJ
.idea .idea
*.iws
*.iml
*.ipr *.ipr
*.iml
*.iws
### NetBeans ### # NetBeans
/nbproject/private/ nb-configuration.xml
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
build/
!**/src/main/**/build/
!**/src/test/**/build/
### VS Code ### # Visual Studio Code
.vscode/ .vscode
.factorypath
# OSX
.DS_Store
# Vim
*.swp
*.swo
# patch
*.orig
*.rej
# Local environment
.env .env
# Plugin directory
/.quarkus/cli/plugins/
# TLS Certificates
.certs/

View File

@@ -1,19 +1,3 @@
# Licensed to the Apache Software Foundation (ASF) under one wrapperVersion=3.3.4
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
wrapperVersion=3.3.2
distributionType=only-script distributionType=only-script
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.12/apache-maven-3.9.12-bin.zip

View File

@@ -1,36 +0,0 @@
FROM eclipse-temurin:21 AS app-build
ENV RELEASE=21
WORKDIR /opt/build
COPY ./target/*.jar ./application.jar
RUN java -Djarmode=layertools -jar application.jar extract
RUN $JAVA_HOME/bin/jlink \
--add-modules `jdeps --ignore-missing-deps -q -recursive --multi-release ${RELEASE} --print-module-deps -cp 'dependencies/BOOT-INF/lib/*' application.jar` \
--strip-debug \
--no-man-pages \
--no-header-files \
--compress=2 \
--output jdk
FROM debian:buster-slim
ARG BUILD_PATH=/opt/build
ENV JAVA_HOME=/opt/jdk
ENV PATH="${JAVA_HOME}/bin:${PATH}"
RUN groupadd --gid 1000 spring-app \
&& useradd --uid 1000 --gid spring-app --shell /bin/bash --create-home spring-app
USER spring-app:spring-app
WORKDIR /opt/workspace
COPY --from=app-build $BUILD_PATH/jdk $JAVA_HOME
COPY --from=app-build $BUILD_PATH/spring-boot-loader/ ./
COPY --from=app-build $BUILD_PATH/dependencies/ ./
COPY --from=app-build $BUILD_PATH/snapshot-dependencies/ ./
COPY --from=app-build $BUILD_PATH/application/ ./
EXPOSE 8080/tcp
ENTRYPOINT ["java", "org.springframework.boot.loader.launch.JarLauncher"]

View File

@@ -1,6 +1,6 @@
MIT License MIT License
Copyright (c) 2025 anyame Copyright (c) 2026 anyame
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:

View File

@@ -1,2 +1,62 @@
# anyame-kodik-search-service # code-with-quarkus
This project uses Quarkus, the Supersonic Subatomic Java Framework.
If you want to learn more about Quarkus, please visit its website: <https://quarkus.io/>.
## Running the application in dev mode
You can run your application in dev mode that enables live coding using:
```shell script
./mvnw quarkus:dev
```
> **_NOTE:_** Quarkus now ships with a Dev UI, which is available in dev mode only at <http://localhost:8080/q/dev/>.
## Packaging and running the application
The application can be packaged using:
```shell script
./mvnw package
```
It produces the `quarkus-run.jar` file in the `target/quarkus-app/` directory.
Be aware that its not an _über-jar_ as the dependencies are copied into the `target/quarkus-app/lib/` directory.
The application is now runnable using `java -jar target/quarkus-app/quarkus-run.jar`.
If you want to build an _über-jar_, execute the following command:
```shell script
./mvnw package -Dquarkus.package.jar.type=uber-jar
```
The application, packaged as an _über-jar_, is now runnable using `java -jar target/*-runner.jar`.
## Creating a native executable
You can create a native executable using:
```shell script
./mvnw package -Dnative
```
Or, if you don't have GraalVM installed, you can run the native executable build in a container using:
```shell script
./mvnw package -Dnative -Dquarkus.native.container-build=true
```
You can then execute your native executable with: `./target/code-with-quarkus-1.0.0-SNAPSHOT-runner`
If you want to learn more about building native executables, please consult <https://quarkus.io/guides/maven-tooling>.
## Provided Code
### REST
Easily start your REST Web Services
[Related guide section...](https://quarkus.io/guides/getting-started-reactive#reactive-jax-rs-resources)

50
mvnw vendored
View File

@@ -19,7 +19,7 @@
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
# Apache Maven Wrapper startup batch script, version 3.3.2 # Apache Maven Wrapper startup batch script, version 3.3.4
# #
# Optional ENV vars # Optional ENV vars
# ----------------- # -----------------
@@ -105,14 +105,17 @@ trim() {
printf "%s" "${1}" | tr -d '[:space:]' printf "%s" "${1}" | tr -d '[:space:]'
} }
scriptDir="$(dirname "$0")"
scriptName="$(basename "$0")"
# parse distributionUrl and optional distributionSha256Sum, requires .mvn/wrapper/maven-wrapper.properties # parse distributionUrl and optional distributionSha256Sum, requires .mvn/wrapper/maven-wrapper.properties
while IFS="=" read -r key value; do while IFS="=" read -r key value; do
case "${key-}" in case "${key-}" in
distributionUrl) distributionUrl=$(trim "${value-}") ;; distributionUrl) distributionUrl=$(trim "${value-}") ;;
distributionSha256Sum) distributionSha256Sum=$(trim "${value-}") ;; distributionSha256Sum) distributionSha256Sum=$(trim "${value-}") ;;
esac esac
done <"${0%/*}/.mvn/wrapper/maven-wrapper.properties" done <"$scriptDir/.mvn/wrapper/maven-wrapper.properties"
[ -n "${distributionUrl-}" ] || die "cannot read distributionUrl property in ${0%/*}/.mvn/wrapper/maven-wrapper.properties" [ -n "${distributionUrl-}" ] || die "cannot read distributionUrl property in $scriptDir/.mvn/wrapper/maven-wrapper.properties"
case "${distributionUrl##*/}" in case "${distributionUrl##*/}" in
maven-mvnd-*bin.*) maven-mvnd-*bin.*)
@@ -130,7 +133,7 @@ maven-mvnd-*bin.*)
distributionUrl="${distributionUrl%-bin.*}-$distributionPlatform.zip" distributionUrl="${distributionUrl%-bin.*}-$distributionPlatform.zip"
;; ;;
maven-mvnd-*) MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ ;; maven-mvnd-*) MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ ;;
*) MVN_CMD="mvn${0##*/mvnw}" _MVNW_REPO_PATTERN=/org/apache/maven/ ;; *) MVN_CMD="mvn${scriptName#mvnw}" _MVNW_REPO_PATTERN=/org/apache/maven/ ;;
esac esac
# apply MVNW_REPOURL and calculate MAVEN_HOME # apply MVNW_REPOURL and calculate MAVEN_HOME
@@ -227,7 +230,7 @@ if [ -n "${distributionSha256Sum-}" ]; then
echo "Please disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2 echo "Please disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2
exit 1 exit 1
elif command -v sha256sum >/dev/null; then elif command -v sha256sum >/dev/null; then
if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | sha256sum -c >/dev/null 2>&1; then if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | sha256sum -c - >/dev/null 2>&1; then
distributionSha256Result=true distributionSha256Result=true
fi fi
elif command -v shasum >/dev/null; then elif command -v shasum >/dev/null; then
@@ -252,8 +255,41 @@ if command -v unzip >/dev/null; then
else else
tar xzf${__MVNW_QUIET_TAR:+"$__MVNW_QUIET_TAR"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -C "$TMP_DOWNLOAD_DIR" || die "failed to untar" tar xzf${__MVNW_QUIET_TAR:+"$__MVNW_QUIET_TAR"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -C "$TMP_DOWNLOAD_DIR" || die "failed to untar"
fi fi
printf %s\\n "$distributionUrl" >"$TMP_DOWNLOAD_DIR/$distributionUrlNameMain/mvnw.url"
mv -- "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" "$MAVEN_HOME" || [ -d "$MAVEN_HOME" ] || die "fail to move MAVEN_HOME" # Find the actual extracted directory name (handles snapshots where filename != directory name)
actualDistributionDir=""
# First try the expected directory name (for regular distributions)
if [ -d "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" ]; then
if [ -f "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain/bin/$MVN_CMD" ]; then
actualDistributionDir="$distributionUrlNameMain"
fi
fi
# If not found, search for any directory with the Maven executable (for snapshots)
if [ -z "$actualDistributionDir" ]; then
# enable globbing to iterate over items
set +f
for dir in "$TMP_DOWNLOAD_DIR"/*; do
if [ -d "$dir" ]; then
if [ -f "$dir/bin/$MVN_CMD" ]; then
actualDistributionDir="$(basename "$dir")"
break
fi
fi
done
set -f
fi
if [ -z "$actualDistributionDir" ]; then
verbose "Contents of $TMP_DOWNLOAD_DIR:"
verbose "$(ls -la "$TMP_DOWNLOAD_DIR")"
die "Could not find Maven distribution directory in extracted archive"
fi
verbose "Found extracted Maven distribution directory: $actualDistributionDir"
printf %s\\n "$distributionUrl" >"$TMP_DOWNLOAD_DIR/$actualDistributionDir/mvnw.url"
mv -- "$TMP_DOWNLOAD_DIR/$actualDistributionDir" "$MAVEN_HOME" || [ -d "$MAVEN_HOME" ] || die "fail to move MAVEN_HOME"
clean || : clean || :
exec_maven "$@" exec_maven "$@"

56
mvnw.cmd vendored
View File

@@ -19,7 +19,7 @@
@REM ---------------------------------------------------------------------------- @REM ----------------------------------------------------------------------------
@REM ---------------------------------------------------------------------------- @REM ----------------------------------------------------------------------------
@REM Apache Maven Wrapper startup batch script, version 3.3.2 @REM Apache Maven Wrapper startup batch script, version 3.3.4
@REM @REM
@REM Optional ENV vars @REM Optional ENV vars
@REM MVNW_REPOURL - repo url base for downloading maven distribution @REM MVNW_REPOURL - repo url base for downloading maven distribution
@@ -40,7 +40,7 @@
@SET __MVNW_ARG0_NAME__= @SET __MVNW_ARG0_NAME__=
@SET MVNW_USERNAME= @SET MVNW_USERNAME=
@SET MVNW_PASSWORD= @SET MVNW_PASSWORD=
@IF NOT "%__MVNW_CMD__%"=="" (%__MVNW_CMD__% %*) @IF NOT "%__MVNW_CMD__%"=="" ("%__MVNW_CMD__%" %*)
@echo Cannot start maven from wrapper >&2 && exit /b 1 @echo Cannot start maven from wrapper >&2 && exit /b 1
@GOTO :EOF @GOTO :EOF
: end batch / begin powershell #> : end batch / begin powershell #>
@@ -73,16 +73,30 @@ switch -wildcard -casesensitive ( $($distributionUrl -replace '^.*/','') ) {
# apply MVNW_REPOURL and calculate MAVEN_HOME # apply MVNW_REPOURL and calculate MAVEN_HOME
# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-<version>,maven-mvnd-<version>-<platform>}/<hash> # maven home pattern: ~/.m2/wrapper/dists/{apache-maven-<version>,maven-mvnd-<version>-<platform>}/<hash>
if ($env:MVNW_REPOURL) { if ($env:MVNW_REPOURL) {
$MVNW_REPO_PATTERN = if ($USE_MVND) { "/org/apache/maven/" } else { "/maven/mvnd/" } $MVNW_REPO_PATTERN = if ($USE_MVND -eq $False) { "/org/apache/maven/" } else { "/maven/mvnd/" }
$distributionUrl = "$env:MVNW_REPOURL$MVNW_REPO_PATTERN$($distributionUrl -replace '^.*'+$MVNW_REPO_PATTERN,'')" $distributionUrl = "$env:MVNW_REPOURL$MVNW_REPO_PATTERN$($distributionUrl -replace "^.*$MVNW_REPO_PATTERN",'')"
} }
$distributionUrlName = $distributionUrl -replace '^.*/','' $distributionUrlName = $distributionUrl -replace '^.*/',''
$distributionUrlNameMain = $distributionUrlName -replace '\.[^.]*$','' -replace '-bin$','' $distributionUrlNameMain = $distributionUrlName -replace '\.[^.]*$','' -replace '-bin$',''
$MAVEN_HOME_PARENT = "$HOME/.m2/wrapper/dists/$distributionUrlNameMain"
$MAVEN_M2_PATH = "$HOME/.m2"
if ($env:MAVEN_USER_HOME) { if ($env:MAVEN_USER_HOME) {
$MAVEN_HOME_PARENT = "$env:MAVEN_USER_HOME/wrapper/dists/$distributionUrlNameMain" $MAVEN_M2_PATH = "$env:MAVEN_USER_HOME"
} }
$MAVEN_HOME_NAME = ([System.Security.Cryptography.MD5]::Create().ComputeHash([byte[]][char[]]$distributionUrl) | ForEach-Object {$_.ToString("x2")}) -join ''
if (-not (Test-Path -Path $MAVEN_M2_PATH)) {
New-Item -Path $MAVEN_M2_PATH -ItemType Directory | Out-Null
}
$MAVEN_WRAPPER_DISTS = $null
if ((Get-Item $MAVEN_M2_PATH).Target[0] -eq $null) {
$MAVEN_WRAPPER_DISTS = "$MAVEN_M2_PATH/wrapper/dists"
} else {
$MAVEN_WRAPPER_DISTS = (Get-Item $MAVEN_M2_PATH).Target[0] + "/wrapper/dists"
}
$MAVEN_HOME_PARENT = "$MAVEN_WRAPPER_DISTS/$distributionUrlNameMain"
$MAVEN_HOME_NAME = ([System.Security.Cryptography.SHA256]::Create().ComputeHash([byte[]][char[]]$distributionUrl) | ForEach-Object {$_.ToString("x2")}) -join ''
$MAVEN_HOME = "$MAVEN_HOME_PARENT/$MAVEN_HOME_NAME" $MAVEN_HOME = "$MAVEN_HOME_PARENT/$MAVEN_HOME_NAME"
if (Test-Path -Path "$MAVEN_HOME" -PathType Container) { if (Test-Path -Path "$MAVEN_HOME" -PathType Container) {
@@ -134,7 +148,33 @@ if ($distributionSha256Sum) {
# unzip and move # unzip and move
Expand-Archive "$TMP_DOWNLOAD_DIR/$distributionUrlName" -DestinationPath "$TMP_DOWNLOAD_DIR" | Out-Null Expand-Archive "$TMP_DOWNLOAD_DIR/$distributionUrlName" -DestinationPath "$TMP_DOWNLOAD_DIR" | Out-Null
Rename-Item -Path "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" -NewName $MAVEN_HOME_NAME | Out-Null
# Find the actual extracted directory name (handles snapshots where filename != directory name)
$actualDistributionDir = ""
# First try the expected directory name (for regular distributions)
$expectedPath = Join-Path "$TMP_DOWNLOAD_DIR" "$distributionUrlNameMain"
$expectedMvnPath = Join-Path "$expectedPath" "bin/$MVN_CMD"
if ((Test-Path -Path $expectedPath -PathType Container) -and (Test-Path -Path $expectedMvnPath -PathType Leaf)) {
$actualDistributionDir = $distributionUrlNameMain
}
# If not found, search for any directory with the Maven executable (for snapshots)
if (!$actualDistributionDir) {
Get-ChildItem -Path "$TMP_DOWNLOAD_DIR" -Directory | ForEach-Object {
$testPath = Join-Path $_.FullName "bin/$MVN_CMD"
if (Test-Path -Path $testPath -PathType Leaf) {
$actualDistributionDir = $_.Name
}
}
}
if (!$actualDistributionDir) {
Write-Error "Could not find Maven distribution directory in extracted archive"
}
Write-Verbose "Found extracted Maven distribution directory: $actualDistributionDir"
Rename-Item -Path "$TMP_DOWNLOAD_DIR/$actualDistributionDir" -NewName $MAVEN_HOME_NAME | Out-Null
try { try {
Move-Item -Path "$TMP_DOWNLOAD_DIR/$MAVEN_HOME_NAME" -Destination $MAVEN_HOME_PARENT | Out-Null Move-Item -Path "$TMP_DOWNLOAD_DIR/$MAVEN_HOME_NAME" -Destination $MAVEN_HOME_PARENT | Out-Null
} catch { } catch {

155
pom.xml
View File

@@ -3,47 +3,58 @@
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <groupId>com.backend.metadata.kodik.service</groupId>
<groupId>org.springframework.boot</groupId> <artifactId>anyame-kodik-metadata-backend</artifactId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>3.5.0</version>
<relativePath /> <!-- lookup parent from repository -->
</parent>
<groupId>com.backend.search.kodik.service</groupId>
<artifactId>anyame-backend</artifactId>
<version>0.0.1-SNAPSHOT</version> <version>0.0.1-SNAPSHOT</version>
<name>anyame-backend</name> <packaging>quarkus</packaging>
<description>Kodik search service for anyame</description>
<url />
<licenses>
<license />
</licenses>
<developers>
<developer />
</developers>
<scm>
<connection />
<developerConnection />
<tag />
<url />
</scm>
<properties>
<java.version>21</java.version>
<retrofit.version>3.0.0</retrofit.version> <properties>
<spring-dotenv.version>4.0.0</spring-dotenv.version> <compiler-plugin.version>3.14.1</compiler-plugin.version>
<springdoc-openapi-starter.version>2.8.9</springdoc-openapi-starter.version> <maven.compiler.release>25</maven.compiler.release>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<quarkus.platform.artifact-id>quarkus-bom</quarkus.platform.artifact-id>
<quarkus.platform.group-id>io.quarkus.platform</quarkus.platform.group-id>
<quarkus.platform.version>3.31.1</quarkus.platform.version>
<skipITs>true</skipITs>
<surefire-plugin.version>3.5.4</surefire-plugin.version>
<easy-retrofit.version>1.2.0</easy-retrofit.version>
<retrofit.version>2.11.0</retrofit.version>
</properties> </properties>
<dependencyManagement>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.springframework.boot</groupId> <groupId>${quarkus.platform.group-id}</groupId>
<artifactId>spring-boot-starter-web</artifactId> <artifactId>${quarkus.platform.artifact-id}</artifactId>
<version>${quarkus.platform.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency> </dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency> <dependency>
<groupId>com.squareup.retrofit2</groupId> <groupId>io.quarkus</groupId>
<artifactId>retrofit</artifactId> <artifactId>quarkus-arc</artifactId>
<version>${retrofit.version}</version> </dependency>
<dependency>
<groupId>io.quarkus</groupId>
<artifactId>quarkus-rest</artifactId>
</dependency>
<dependency>
<groupId>io.quarkus</groupId>
<artifactId>quarkus-rest-jackson</artifactId>
</dependency>
<dependency>
<groupId>io.quarkus</groupId>
<artifactId>quarkus-smallrye-stork</artifactId>
</dependency>
<dependency>
<groupId>io.smallrye.stork</groupId>
<artifactId>stork-service-registration-consul</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.squareup.retrofit2</groupId> <groupId>com.squareup.retrofit2</groupId>
@@ -51,19 +62,18 @@
<version>${retrofit.version}</version> <version>${retrofit.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>me.paulschwarz</groupId> <groupId>io.quarkiverse.retrofit</groupId>
<artifactId>spring-dotenv</artifactId> <artifactId>quarkus-easy-retrofit</artifactId>
<version>${spring-dotenv.version}</version> <version>${easy-retrofit.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.springdoc</groupId> <groupId>io.quarkus</groupId>
<artifactId>springdoc-openapi-starter-webmvc-ui</artifactId> <artifactId>quarkus-junit</artifactId>
<version>${springdoc-openapi-starter.version}</version> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.springframework.boot</groupId> <groupId>io.rest-assured</groupId>
<artifactId>spring-boot-starter-test</artifactId> <artifactId>rest-assured</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>
@@ -71,10 +81,67 @@
<build> <build>
<plugins> <plugins>
<plugin> <plugin>
<groupId>org.springframework.boot</groupId> <groupId>${quarkus.platform.group-id}</groupId>
<artifactId>spring-boot-maven-plugin</artifactId> <artifactId>quarkus-maven-plugin</artifactId>
<version>${quarkus.platform.version}</version>
<extensions>true</extensions>
</plugin>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>${compiler-plugin.version}</version>
<configuration>
<parameters>true</parameters>
</configuration>
</plugin>
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<version>${surefire-plugin.version}</version>
<configuration>
<argLine>@{argLine}</argLine>
<systemPropertyVariables>
<java.util.logging.manager>org.jboss.logmanager.LogManager</java.util.logging.manager>
<maven.home>${maven.home}</maven.home>
</systemPropertyVariables>
</configuration>
</plugin>
<plugin>
<artifactId>maven-failsafe-plugin</artifactId>
<version>${surefire-plugin.version}</version>
<executions>
<execution>
<goals>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
</execution>
</executions>
<configuration>
<argLine>@{argLine}</argLine>
<systemPropertyVariables>
<native.image.path>
${project.build.directory}/${project.build.finalName}-runner</native.image.path>
<java.util.logging.manager>org.jboss.logmanager.LogManager</java.util.logging.manager>
<maven.home>${maven.home}</maven.home>
</systemPropertyVariables>
</configuration>
</plugin> </plugin>
</plugins> </plugins>
</build> </build>
<profiles>
<profile>
<id>native</id>
<activation>
<property>
<name>native</name>
</property>
</activation>
<properties>
<quarkus.package.jar.enabled>false</quarkus.package.jar.enabled>
<skipITs>false</skipITs>
<quarkus.native.enabled>true</quarkus.native.enabled>
</properties>
</profile>
</profiles>
</project> </project>

View File

@@ -0,0 +1,25 @@
services:
kodik-metadata-service:
build:
context: ../../../.
dockerfile: src/main/docker/Dockerfile.jvm
container_name: kodik-metadata-service
restart: on-failure
env_file: ../../../.env
networks:
- anyame-consul
ports:
- 0:8080
deploy:
resources:
limits:
memory: 512M
cpus: "1"
reservations:
memory: 128M
cpus: "0.3"
networks:
anyame-consul:
name: anyame-consul
external: true

View File

@@ -0,0 +1,23 @@
services:
kodik-metadata-service:
build:
context: ../../../.
dockerfile: src/main/docker/Dockerfile.native-micro
container_name: kodik-metadata-service
restart: on-failure
env_file: ../../../.env
networks:
- anyame-consul
deploy:
resources:
limits:
memory: 512M
cpus: "1"
reservations:
memory: 128M
cpus: "0.3"
networks:
anyame-consul:
name: anyame-consul
external: true

View File

@@ -0,0 +1,100 @@
####
# This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
#
# Before building the container image run:
#
# ./mvnw package
#
# Then, build the image with:
#
# docker build -f src/main/docker/Dockerfile.jvm -t quarkus/anyame-kodik-metadata-backend-jvm .
#
# Then run the container using:
#
# docker run -i --rm -p 8080:8080 quarkus/anyame-kodik-metadata-backend-jvm
#
# If you want to include the debug port into your docker image
# you will have to expose the debug port (default 5005 being the default) like this : EXPOSE 8080 5005.
# Additionally you will have to set -e JAVA_DEBUG=true and -e JAVA_DEBUG_PORT=*:5005
# when running the container
#
# Then run the container using :
#
# docker run -i --rm -p 8080:8080 quarkus/anyame-kodik-metadata-backend-jvm
#
# This image uses the `run-java.sh` script to run the application.
# This scripts computes the command line to execute your Java application, and
# includes memory/GC tuning.
# You can configure the behavior using the following environment properties:
# - JAVA_OPTS: JVM options passed to the `java` command (example: "-verbose:class") - Be aware that this will override
# the default JVM options, use `JAVA_OPTS_APPEND` to append options
# - JAVA_OPTS_APPEND: User specified Java options to be appended to generated options
# in JAVA_OPTS (example: "-Dsome.property=foo")
# - JAVA_MAX_MEM_RATIO: Is used when no `-Xmx` option is given in JAVA_OPTS. This is
# used to calculate a default maximal heap memory based on a containers restriction.
# If used in a container without any memory constraints for the container then this
# option has no effect. If there is a memory constraint then `-Xmx` is set to a ratio
# of the container available memory as set here. The default is `50` which means 50%
# of the available memory is used as an upper boundary. You can skip this mechanism by
# setting this value to `0` in which case no `-Xmx` option is added.
# - JAVA_INITIAL_MEM_RATIO: Is used when no `-Xms` option is given in JAVA_OPTS. This
# is used to calculate a default initial heap memory based on the maximum heap memory.
# If used in a container without any memory constraints for the container then this
# option has no effect. If there is a memory constraint then `-Xms` is set to a ratio
# of the `-Xmx` memory as set here. The default is `25` which means 25% of the `-Xmx`
# is used as the initial heap size. You can skip this mechanism by setting this value
# to `0` in which case no `-Xms` option is added (example: "25")
# - JAVA_MAX_INITIAL_MEM: Is used when no `-Xms` option is given in JAVA_OPTS.
# This is used to calculate the maximum value of the initial heap memory. If used in
# a container without any memory constraints for the container then this option has
# no effect. If there is a memory constraint then `-Xms` is limited to the value set
# here. The default is 4096MB which means the calculated value of `-Xms` never will
# be greater than 4096MB. The value of this variable is expressed in MB (example: "4096")
# - JAVA_DIAGNOSTICS: Set this to get some diagnostics information to standard output
# when things are happening. This option, if set to true, will set
# `-XX:+UnlockDiagnosticVMOptions`. Disabled by default (example: "true").
# - JAVA_DEBUG: If set remote debugging will be switched on. Disabled by default (example:
# true").
# - JAVA_DEBUG_PORT: Port used for remote debugging. Defaults to 5005 (example: "8787").
# - CONTAINER_CORE_LIMIT: A calculated core limit as described in
# https://www.kernel.org/doc/Documentation/scheduler/sched-bwc.txt. (example: "2")
# - CONTAINER_MAX_MEMORY: Memory limit given to the container (example: "1024").
# - GC_MIN_HEAP_FREE_RATIO: Minimum percentage of heap free after GC to avoid expansion.
# (example: "20")
# - GC_MAX_HEAP_FREE_RATIO: Maximum percentage of heap free after GC to avoid shrinking.
# (example: "40")
# - GC_TIME_RATIO: Specifies the ratio of the time spent outside the garbage collection.
# (example: "4")
# - GC_ADAPTIVE_SIZE_POLICY_WEIGHT: The weighting given to the current GC time versus
# previous GC times. (example: "90")
# - GC_METASPACE_SIZE: The initial metaspace size. (example: "20")
# - GC_MAX_METASPACE_SIZE: The maximum metaspace size. (example: "100")
# - GC_CONTAINER_OPTIONS: Specify Java GC to use. The value of this variable should
# contain the necessary JRE command-line options to specify the required GC, which
# will override the default of `-XX:+UseParallelGC` (example: -XX:+UseG1GC).
# - HTTPS_PROXY: The location of the https proxy. (example: "myuser@127.0.0.1:8080")
# - HTTP_PROXY: The location of the http proxy. (example: "myuser@127.0.0.1:8080")
# - NO_PROXY: A comma separated lists of hosts, IP addresses or domains that can be
# accessed directly. (example: "foo.example.com,bar.example.com")
#
# You can find more information about the UBI base runtime images and their configuration here:
# https://rh-openjdk.github.io/redhat-openjdk-containers/
###
FROM registry.access.redhat.com/ubi9/openjdk-25:1.24
ENV LANGUAGE='en_US:en'
# We make four distinct layers so if there are application changes the library layers can be re-used
COPY --chown=185 target/quarkus-app/lib/ /deployments/lib/
COPY --chown=185 target/quarkus-app/*.jar /deployments/
COPY --chown=185 target/quarkus-app/app/ /deployments/app/
COPY --chown=185 target/quarkus-app/quarkus/ /deployments/quarkus/
EXPOSE 8080
USER 185
ENV JAVA_OPTS_APPEND="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"
ENV JAVA_APP_JAR="/deployments/quarkus-run.jar"
ENTRYPOINT [ "/opt/jboss/container/java/run/run-java.sh" ]

View File

@@ -0,0 +1,96 @@
####
# This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
#
# Before building the container image run:
#
# ./mvnw package -Dquarkus.package.jar.type=legacy-jar
#
# Then, build the image with:
#
# docker build -f src/main/docker/Dockerfile.legacy-jar -t quarkus/anyame-kodik-metadata-backend-legacy-jar .
#
# Then run the container using:
#
# docker run -i --rm -p 8080:8080 quarkus/anyame-kodik-metadata-backend-legacy-jar
#
# If you want to include the debug port into your docker image
# you will have to expose the debug port (default 5005 being the default) like this : EXPOSE 8080 5005.
# Additionally you will have to set -e JAVA_DEBUG=true and -e JAVA_DEBUG_PORT=*:5005
# when running the container
#
# Then run the container using :
#
# docker run -i --rm -p 8080:8080 quarkus/anyame-kodik-metadata-backend-legacy-jar
#
# This image uses the `run-java.sh` script to run the application.
# This scripts computes the command line to execute your Java application, and
# includes memory/GC tuning.
# You can configure the behavior using the following environment properties:
# - JAVA_OPTS: JVM options passed to the `java` command (example: "-verbose:class") - Be aware that this will override
# the default JVM options, use `JAVA_OPTS_APPEND` to append options
# - JAVA_OPTS_APPEND: User specified Java options to be appended to generated options
# in JAVA_OPTS (example: "-Dsome.property=foo")
# - JAVA_MAX_MEM_RATIO: Is used when no `-Xmx` option is given in JAVA_OPTS. This is
# used to calculate a default maximal heap memory based on a containers restriction.
# If used in a container without any memory constraints for the container then this
# option has no effect. If there is a memory constraint then `-Xmx` is set to a ratio
# of the container available memory as set here. The default is `50` which means 50%
# of the available memory is used as an upper boundary. You can skip this mechanism by
# setting this value to `0` in which case no `-Xmx` option is added.
# - JAVA_INITIAL_MEM_RATIO: Is used when no `-Xms` option is given in JAVA_OPTS. This
# is used to calculate a default initial heap memory based on the maximum heap memory.
# If used in a container without any memory constraints for the container then this
# option has no effect. If there is a memory constraint then `-Xms` is set to a ratio
# of the `-Xmx` memory as set here. The default is `25` which means 25% of the `-Xmx`
# is used as the initial heap size. You can skip this mechanism by setting this value
# to `0` in which case no `-Xms` option is added (example: "25")
# - JAVA_MAX_INITIAL_MEM: Is used when no `-Xms` option is given in JAVA_OPTS.
# This is used to calculate the maximum value of the initial heap memory. If used in
# a container without any memory constraints for the container then this option has
# no effect. If there is a memory constraint then `-Xms` is limited to the value set
# here. The default is 4096MB which means the calculated value of `-Xms` never will
# be greater than 4096MB. The value of this variable is expressed in MB (example: "4096")
# - JAVA_DIAGNOSTICS: Set this to get some diagnostics information to standard output
# when things are happening. This option, if set to true, will set
# `-XX:+UnlockDiagnosticVMOptions`. Disabled by default (example: "true").
# - JAVA_DEBUG: If set remote debugging will be switched on. Disabled by default (example:
# true").
# - JAVA_DEBUG_PORT: Port used for remote debugging. Defaults to 5005 (example: "8787").
# - CONTAINER_CORE_LIMIT: A calculated core limit as described in
# https://www.kernel.org/doc/Documentation/scheduler/sched-bwc.txt. (example: "2")
# - CONTAINER_MAX_MEMORY: Memory limit given to the container (example: "1024").
# - GC_MIN_HEAP_FREE_RATIO: Minimum percentage of heap free after GC to avoid expansion.
# (example: "20")
# - GC_MAX_HEAP_FREE_RATIO: Maximum percentage of heap free after GC to avoid shrinking.
# (example: "40")
# - GC_TIME_RATIO: Specifies the ratio of the time spent outside the garbage collection.
# (example: "4")
# - GC_ADAPTIVE_SIZE_POLICY_WEIGHT: The weighting given to the current GC time versus
# previous GC times. (example: "90")
# - GC_METASPACE_SIZE: The initial metaspace size. (example: "20")
# - GC_MAX_METASPACE_SIZE: The maximum metaspace size. (example: "100")
# - GC_CONTAINER_OPTIONS: Specify Java GC to use. The value of this variable should
# contain the necessary JRE command-line options to specify the required GC, which
# will override the default of `-XX:+UseParallelGC` (example: -XX:+UseG1GC).
# - HTTPS_PROXY: The location of the https proxy. (example: "myuser@127.0.0.1:8080")
# - HTTP_PROXY: The location of the http proxy. (example: "myuser@127.0.0.1:8080")
# - NO_PROXY: A comma separated lists of hosts, IP addresses or domains that can be
# accessed directly. (example: "foo.example.com,bar.example.com")
#
# You can find more information about the UBI base runtime images and their configuration here:
# https://rh-openjdk.github.io/redhat-openjdk-containers/
###
FROM registry.access.redhat.com/ubi9/openjdk-25:1.23
ENV LANGUAGE='en_US:en'
COPY target/lib/* /deployments/lib/
COPY target/*-runner.jar /deployments/quarkus-run.jar
EXPOSE 8080
USER 185
ENV JAVA_OPTS_APPEND="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"
ENV JAVA_APP_JAR="/deployments/quarkus-run.jar"
ENTRYPOINT [ "/opt/jboss/container/java/run/run-java.sh" ]

View File

@@ -0,0 +1,29 @@
####
# This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode.
#
# Before building the container image run:
#
# ./mvnw package -Dnative
#
# Then, build the image with:
#
# docker build -f src/main/docker/Dockerfile.native -t quarkus/anyame-kodik-metadata-backend .
#
# Then run the container using:
#
# docker run -i --rm -p 8080:8080 quarkus/anyame-kodik-metadata-backend
#
# The ` registry.access.redhat.com/ubi9/ubi-minimal:9.7` base image is based on UBI 9.
# To use UBI 8, switch to `quay.io/ubi8/ubi-minimal:8.10`.
###
FROM registry.access.redhat.com/ubi9/ubi-minimal:9.7
WORKDIR /work/
RUN chown 1001 /work \
&& chmod "g+rwX" /work \
&& chown 1001:root /work
COPY --chown=1001:root --chmod=0755 target/*-runner /work/application
EXPOSE 8080
USER 1001
ENTRYPOINT ["./application", "-Dquarkus.http.host=0.0.0.0"]

View File

@@ -0,0 +1,32 @@
####
# This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode.
# It uses a micro base image, tuned for Quarkus native executables.
# It reduces the size of the resulting container image.
# Check https://quarkus.io/guides/quarkus-runtime-base-image for further information about this image.
#
# Before building the container image run:
#
# ./mvnw package -Dnative
#
# Then, build the image with:
#
# docker build -f src/main/docker/Dockerfile.native-micro -t quarkus/anyame-kodik-metadata-backend .
#
# Then run the container using:
#
# docker run -i --rm -p 8080:8080 quarkus/anyame-kodik-metadata-backend
#
# The `quay.io/quarkus/ubi9-quarkus-micro-image:2.0` base image is based on UBI 9.
# To use UBI 8, switch to `quay.io/quarkus/quarkus-micro-image:2.0`.
###
FROM quay.io/quarkus/ubi9-quarkus-micro-image:2.0
WORKDIR /work/
RUN chown 1001 /work \
&& chmod "g+rwX" /work \
&& chown 1001:root /work
COPY --chown=1001:root --chmod=0755 target/*-runner /work/application
EXPOSE 8080
USER 1001
ENTRYPOINT ["./application", "-Dquarkus.http.host=0.0.0.0"]

View File

@@ -0,0 +1,30 @@
package com.backend.metadata.kodik;
import org.eclipse.microprofile.config.inject.ConfigProperty;
import org.jboss.logging.Logger;
import io.quarkus.arc.lookup.LookupIfProperty;
import io.vertx.core.Vertx;
import io.vertx.ext.consul.ConsulClient;
import io.vertx.ext.consul.ConsulClientOptions;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.inject.Produces;
@ApplicationScoped
public class ApplicationBeanProducer {
Logger logger = Logger.getLogger(ApplicationBeanProducer.class);
@ConfigProperty(name = "consul.host", defaultValue = "consul")
String host;
@ConfigProperty(name = "consul.port", defaultValue = "8500")
int port;
@Produces
@LookupIfProperty(name = "quarkus.stork.kodik-metadata-service.service-registrar.type", stringValue = "consul")
public ConsulClient consulClient(Vertx vertx) {
return ConsulClient.create(vertx, new ConsulClientOptions()
.setHost(host)
.setPort(port));
}
}

View File

@@ -0,0 +1,56 @@
package com.backend.metadata.kodik;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.eclipse.microprofile.config.inject.ConfigProperty;
import org.jboss.logging.Logger;
import io.quarkus.runtime.ShutdownEvent;
import io.quarkus.runtime.StartupEvent;
import io.vertx.ext.consul.ConsulClient;
import io.vertx.ext.consul.ServiceOptions;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.event.Observes;
import jakarta.enterprise.inject.Instance;
@ApplicationScoped
public class ApplicationLifecycle {
@ConfigProperty(name = "quarkus.application.name")
private String appName;
@ConfigProperty(name = "quarkus.application.port", defaultValue = "8080")
private int port;
private Logger logger;
private Instance<ConsulClient> consulClient;
private ScheduledExecutorService executor;
public ApplicationLifecycle(Logger logger,
Instance<ConsulClient> consulClient,
ScheduledExecutorService executor) {
this.logger = logger;
this.consulClient = consulClient;
this.executor = executor;
}
void onStart(@Observes StartupEvent ev) {
if (consulClient.isResolvable()) {
executor.schedule(() -> {
consulClient.get().registerService(new ServiceOptions()
.setPort(port)
.setAddress(appName)
.setName(appName)
.setId(appName),
result -> logger.infof("Service %s (%d) registered", appName, port));
}, 1000, TimeUnit.MILLISECONDS);
}
}
void onStop(@Observes ShutdownEvent ev) {
if (consulClient.isResolvable()) {
consulClient.get().deregisterService(appName + "-" + port,
result -> logger.infof("Service %s-%d deregistered", appName, port));
}
}
}

View File

@@ -0,0 +1,19 @@
package com.backend.metadata.kodik.api;
import jakarta.enterprise.context.ApplicationScoped;
import io.github.easyretrofit.core.builder.BaseConverterFactoryBuilder;
import retrofit2.Converter;
import retrofit2.converter.jackson.JacksonConverterFactory;
/**
* @author liuziyuan
*/
@ApplicationScoped
public class JacksonConvertFactoryBuilder extends BaseConverterFactoryBuilder {
@Override
public Converter.Factory buildConverterFactory() {
return JacksonConverterFactory.create();
}
}

View File

@@ -0,0 +1,67 @@
package com.backend.metadata.kodik.api;
import com.backend.metadata.kodik.api.model.KodikResponse;
import io.github.easyretrofit.core.annotation.RetrofitBuilder;
import retrofit2.Call;
import retrofit2.http.Field;
import retrofit2.http.FormUrlEncoded;
import retrofit2.http.POST;
@RetrofitBuilder(baseUrl = "${kodik.api.url}", addConverterFactory = { JacksonConvertFactoryBuilder.class })
public interface KodikAPI {
@FormUrlEncoded
@POST("list")
Call<KodikResponse> list(
@Field("token") String token,
@Field("limit") int limit,
@Field("with_material_data") int withMaterialData);
@FormUrlEncoded
@POST("list")
Call<KodikResponse> list(
@Field("token") String token,
@Field("next") String next,
@Field("limit") int limit,
@Field("with_material_data") int withMaterialData);
@FormUrlEncoded
@POST("search")
Call<KodikResponse> search(
@Field("token") String token,
@Field("title") String title,
@Field("limit") int limit,
@Field("with_material_data") int withMaterialData);
@FormUrlEncoded
@POST("search")
Call<KodikResponse> findByKodikID(
@Field("token") String token,
@Field("id") String id,
@Field("limit") int limit,
@Field("with_material_data") int withMaterialData);
@FormUrlEncoded
@POST("search")
Call<KodikResponse> findByShikimoriID(
@Field("token") String token,
@Field("shikimori_id") String shikimoriId,
@Field("limit") int limit,
@Field("with_material_data") int withMaterialData);
@FormUrlEncoded
@POST("search")
Call<KodikResponse> findByKinopoiskID(
@Field("token") String token,
@Field("kinopoisk_id") String kinopoiskId,
@Field("limit") int limit,
@Field("with_material_data") int withMaterialData);
@FormUrlEncoded
@POST("search")
Call<KodikResponse> findByImdbID(
@Field("token") String token,
@Field("imdb_id") String imdbId,
@Field("limit") int limit,
@Field("with_material_data") int withMaterialData);
}

View File

@@ -1,14 +1,19 @@
package com.backend.search.kodik.service.anyame_backend.api.model; package com.backend.metadata.kodik.api.model;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List; import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
@JsonIgnoreProperties(ignoreUnknown = true) @JsonIgnoreProperties(ignoreUnknown = true)
public class KodikResponse { public class KodikResponse {
public int total; public int total;
public List<Result> results; public List<Result> results;
@JsonProperty("prev_page")
public String previousPage;
@JsonProperty("next_page")
public String nextPage;
@JsonIgnoreProperties(ignoreUnknown = true) @JsonIgnoreProperties(ignoreUnknown = true)
public static class Result { public static class Result {
@@ -21,6 +26,7 @@ public class KodikResponse {
@JsonProperty("other_title") @JsonProperty("other_title")
public String otherTitle; public String otherTitle;
public Translation translation; public Translation translation;
public List<Translation> translations;
public int year; public int year;
@JsonProperty("last_season") @JsonProperty("last_season")
public int lastSeason; public int lastSeason;
@@ -67,6 +73,40 @@ public class KodikResponse {
public int id; public int id;
public String title; public String title;
public String type; public String type;
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + id;
result = prime * result + ((title == null) ? 0 : title.hashCode());
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Translation other = (Translation) obj;
if (id != other.id)
return false;
if (title == null) {
if (other.title != null)
return false;
} else if (!title.equals(other.title))
return false;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
} }
@Override @Override

View File

@@ -1,10 +1,10 @@
package com.backend.search.kodik.service.anyame_backend.api.model; package com.backend.metadata.kodik.api.model;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
@JsonIgnoreProperties(ignoreUnknown = true) @JsonIgnoreProperties(ignoreUnknown = true)
public class MaterialData { public class MaterialData {

View File

@@ -0,0 +1,150 @@
package com.backend.metadata.kodik.resource;
import java.io.IOException;
import javax.naming.ServiceUnavailableException;
import org.jboss.logging.Logger;
import com.backend.metadata.kodik.api.KodikAPI;
import com.backend.metadata.kodik.api.model.KodikResponse;
import com.backend.metadata.kodik.service.KodikAPITokenProvider;
import com.backend.metadata.kodik.service.KodikSearchFilterService;
import io.quarkiverse.retrofit.runtime.EnableRetrofit;
import jakarta.ws.rs.BadRequestException;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.PathParam;
import jakarta.ws.rs.QueryParam;
import retrofit2.Response;
@EnableRetrofit("com.backend.metadata.kodik.api")
@Path("/kodik")
public class SearchResource {
private static final Logger LOG = Logger.getLogger(SearchResource.class);
private final KodikAPI kodikAPI;
private final KodikAPITokenProvider tokenProvider;
private final KodikSearchFilterService searchFilterService;
public SearchResource(KodikAPI kodikAPI, KodikAPITokenProvider tokenProvider,
KodikSearchFilterService searchFilterService) {
this.kodikAPI = kodikAPI;
this.tokenProvider = tokenProvider;
this.searchFilterService = searchFilterService;
}
@GET
@Path("/list")
public KodikResponse list() throws ServiceUnavailableException {
try {
Response<KodikResponse> response = kodikAPI.list(tokenProvider.getKodikToken(), 100, 1).execute();
if (!response.isSuccessful()) {
LOG.errorv("failed list request with response code {0}, message {1}",
response.code(),
response.message());
throw new BadRequestException("bad response, code: " + response.code());
}
return filterResults(response.body());
} catch (IOException e) {
LOG.warn("i/o error", e);
throw new ServiceUnavailableException("i/o error");
}
}
@GET
@Path("/search")
public KodikResponse search(@QueryParam("title") String title) throws ServiceUnavailableException {
try {
Response<KodikResponse> response = kodikAPI.search(tokenProvider.getKodikToken(), title, 100, 1).execute();
if (!response.isSuccessful()) {
LOG.errorv("failed search request with title {0}, response code {1}, message {2}", title,
response.code(),
response.message());
throw new BadRequestException("bad response, code: " + response.code());
}
return filterResults(response.body());
} catch (IOException e) {
LOG.warn("i/o error", e);
throw new ServiceUnavailableException("i/o error");
}
}
@GET
@Path("/id/{id}")
public KodikResponse findByKodikId(@PathParam("id") String id) throws ServiceUnavailableException {
try {
Response<KodikResponse> response = kodikAPI.findByKodikID(tokenProvider.getKodikToken(), id, 100, 1)
.execute();
if (!response.isSuccessful()) {
LOG.errorv("failed find by kodik id {0}, response code {1}, message {2}", id,
response.code(), response.message());
throw new BadRequestException("bad response, code: " + response.code());
}
return filterResults(response.body());
} catch (IOException e) {
LOG.warn("i/o error", e);
throw new ServiceUnavailableException("i/o error");
}
}
@GET
@Path("/shikimori/{id}")
public KodikResponse findByShikimoriId(@PathParam("id") String id) throws ServiceUnavailableException {
try {
Response<KodikResponse> response = kodikAPI.findByShikimoriID(tokenProvider.getKodikToken(), id, 100, 1)
.execute();
if (!response.isSuccessful()) {
LOG.errorv("failed find by shikimori id {0}, response code {1}, message {2}", id,
response.code(), response.message());
throw new BadRequestException("bad response, code: " + response.code());
}
return filterResults(response.body());
} catch (IOException e) {
LOG.warn("i/o error", e);
throw new ServiceUnavailableException("i/o error");
}
}
@GET
@Path("/kinopoisk/{id}")
public KodikResponse findByKinopoiskId(@PathParam("id") String id) throws ServiceUnavailableException {
try {
Response<KodikResponse> response = kodikAPI.findByKinopoiskID(tokenProvider.getKodikToken(), id, 100, 1)
.execute();
if (!response.isSuccessful()) {
LOG.errorv("failed find by kinopoisk id {0}, response code {1}, message {2}", id,
response.code(), response.message());
throw new BadRequestException("bad response, code: " + response.code());
}
return filterResults(response.body());
} catch (IOException e) {
LOG.warn("i/o error", e);
throw new ServiceUnavailableException("i/o error");
}
}
@GET
@Path("/imdb/{id}")
public KodikResponse findByImdbId(@PathParam("id") String id) throws ServiceUnavailableException {
try {
Response<KodikResponse> response = kodikAPI.findByImdbID(tokenProvider.getKodikToken(), id, 100, 1)
.execute();
if (!response.isSuccessful()) {
LOG.errorv("failed find by imdb id {0}, response code {1}, message {2}", id,
response.code(), response.message());
throw new BadRequestException("bad response, code: " + response.code());
}
return filterResults(response.body());
} catch (IOException e) {
LOG.warn("i/o error", e);
throw new ServiceUnavailableException("i/o error");
}
}
private KodikResponse filterResults(KodikResponse response) {
response.results = searchFilterService.filter(response.results);
return response;
}
}

View File

@@ -0,0 +1,15 @@
package com.backend.metadata.kodik.service;
import jakarta.enterprise.context.ApplicationScoped;
import org.eclipse.microprofile.config.inject.ConfigProperty;
@ApplicationScoped
public class KodikAPITokenProvider {
@ConfigProperty(name = "kodik.token")
String kodikToken;
public String getKodikToken() {
return kodikToken;
}
}

View File

@@ -0,0 +1,69 @@
package com.backend.metadata.kodik.service;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import com.backend.metadata.kodik.api.model.KodikResponse;
import jakarta.enterprise.context.ApplicationScoped;
@ApplicationScoped
public class KodikSearchFilterService {
private static final Set<String> ALLOWED_TYPES = Set.of("anime-serial", "anime");
public List<KodikResponse.Result> filter(List<KodikResponse.Result> target) {
return filter(target, Collections.emptyList());
}
public List<KodikResponse.Result> filter(List<KodikResponse.Result> target, List<KodikResponse.Result> fullSeen) {
Set<String> seenIds = fullSeen.stream()
.map(this::identifier)
.collect(Collectors.toSet());
Map<String, List<KodikResponse.Result>> grouped = Stream.concat(fullSeen.stream(), target.stream())
.filter(this::isAllowedType)
.collect(Collectors.groupingBy(this::identifier, LinkedHashMap::new, Collectors.toList()));
return grouped.entrySet().stream()
.filter(e -> !seenIds.contains(e.getKey()))
.map(entry -> {
List<KodikResponse.Result> group = entry.getValue();
KodikResponse.Result first = group.get(0);
first.translations = group.stream()
.map(r -> r.translation)
.filter(Objects::nonNull)
.distinct()
.collect(Collectors.toList());
return first;
})
.collect(Collectors.toList());
}
private boolean isAllowedType(KodikResponse.Result result) {
return result != null && ALLOWED_TYPES.contains(result.type);
}
public String identifier(KodikResponse.Result result) {
if (result == null)
return "null-0";
String primaryId = Arrays.asList(
result.shikimoriId,
result.kinopoiskId,
result.imdbId,
result.worldartLink).stream()
.filter(id -> id != null && !id.isBlank())
.findFirst()
.orElse(result.id);
return primaryId + "-" + Math.max(result.lastSeason, 1);
}
}

View File

@@ -1,27 +0,0 @@
package com.backend.search.kodik.service.anyame_backend;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.web.servlet.config.annotation.CorsRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
@SpringBootApplication
public class AnyameBackendApplication {
public static void main(String[] args) {
SpringApplication.run(AnyameBackendApplication.class, args);
}
// TODO: Research if this is good approach or not?
@Bean
public WebMvcConfigurer corsConfigurer() {
return new WebMvcConfigurer() {
@Override
public void addCorsMappings(CorsRegistry registry) {
registry.addMapping("/**").allowedOrigins("http://localhost:3000", "http://localhost:3001");
}
};
}
}

View File

@@ -1,20 +0,0 @@
package com.backend.search.kodik.service.anyame_backend.api;
import com.backend.search.kodik.service.anyame_backend.api.model.KodikResponse;
import retrofit2.Call;
import retrofit2.http.Field;
import retrofit2.http.FormUrlEncoded;
import retrofit2.http.POST;
public interface KodikAPI {
@FormUrlEncoded
@POST("search")
Call<KodikResponse> search(
@Field("token") String token,
@Field("title") String title,
@Field("limit") int limit,
@Field("with_material_data") int withMaterialData);
}

View File

@@ -1,15 +0,0 @@
package com.backend.search.kodik.service.anyame_backend.component;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
@Component
public class KodikAPITokenProvider {
@Value("${KODIK_TOKEN}")
private String kodikToken;
public String getKodikToken() {
return kodikToken;
}
}

View File

@@ -1,29 +0,0 @@
package com.backend.search.kodik.service.anyame_backend.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import com.backend.search.kodik.service.anyame_backend.api.KodikAPI;
import retrofit2.Retrofit;
import retrofit2.converter.jackson.JacksonConverterFactory;
@Configuration
public class APIConfig {
private static final String KODIK_API_URL = "https://kodikapi.com/";
@Bean
public Retrofit retrofit() {
return new Retrofit.Builder()
.baseUrl(KODIK_API_URL)
.addConverterFactory(JacksonConverterFactory.create())
.build();
}
@Bean
public KodikAPI kodikAPIService(Retrofit retrofit) {
return retrofit.create(KodikAPI.class);
}
}

View File

@@ -1,48 +0,0 @@
package com.backend.search.kodik.service.anyame_backend.controller;
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.server.ResponseStatusException;
import com.backend.search.kodik.service.anyame_backend.api.KodikAPI;
import com.backend.search.kodik.service.anyame_backend.api.model.KodikResponse;
import com.backend.search.kodik.service.anyame_backend.component.KodikAPITokenProvider;
import retrofit2.Response;
@RestController
public class SearchController {
private static final Logger log = LoggerFactory.getLogger(SearchController.class);
private final KodikAPI kodikAPI;
private final KodikAPITokenProvider tokenProvider;
public SearchController(KodikAPI kodikAPI, KodikAPITokenProvider tokenProvider) {
this.kodikAPI = kodikAPI;
this.tokenProvider = tokenProvider;
}
@GetMapping("/search")
public KodikResponse search(@RequestParam("title") String title) {
try {
Response<KodikResponse> response = kodikAPI.search(tokenProvider.getKodikToken(), title, 100, 1).execute();
if (!response.isSuccessful()) {
log.info("failed search request with title {}, response code {}, message {}", title, response.code(),
response.message());
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "bad response, code: " + response.code());
}
log.info("search result {}", response.body());
return response.body();
} catch (IOException e) {
log.warn("i/o error", e);
throw new ResponseStatusException(HttpStatus.SERVICE_UNAVAILABLE, "i/o error");
}
}
}

View File

@@ -1,2 +1,5 @@
spring.application.name=anyame-backend quarkus.application.name=kodik-metadata-service
server.error.include-message=always kodik.api.url=https://kodik-api.com/
quarkus.stork.kodik-metadata-service.service-registrar.type=consul

View File

@@ -0,0 +1,8 @@
package com.backend.metadata.kodik.service;
import io.quarkus.test.junit.QuarkusIntegrationTest;
@QuarkusIntegrationTest
class GreetingResourceIT {
// Execute the same tests but in packaged mode.
}

View File

@@ -1,13 +0,0 @@
package com.backend.search.service.anyame_backend;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class AnyameBackendApplicationTests {
@Test
void contextLoads() {
}
}

3
startup.sh Executable file
View File

@@ -0,0 +1,3 @@
#!/bin/bash
./mvnw jar:jar install:install && ./mvnw clean package && docker compose -f src/main/docker-compose/jvm.yml up -d --build