Migrate to quarkus

This commit is contained in:
2026-01-27 00:41:46 +05:00
parent 1e42f24e3a
commit e154e232df
32 changed files with 751 additions and 388 deletions

View File

@@ -0,0 +1,98 @@
####
# This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
#
# Before building the container image run:
#
# ./mvnw package
#
# Then, build the image with:
#
# docker build -f src/main/docker/Dockerfile.jvm -t quarkus/code-with-quarkus-jvm .
#
# Then run the container using:
#
# docker run -i --rm -p 8080:8080 quarkus/code-with-quarkus-jvm
#
# If you want to include the debug port into your docker image
# you will have to expose the debug port (default 5005 being the default) like this : EXPOSE 8080 5005.
# Additionally you will have to set -e JAVA_DEBUG=true and -e JAVA_DEBUG_PORT=*:5005
# when running the container
#
# Then run the container using :
#
# docker run -i --rm -p 8080:8080 quarkus/code-with-quarkus-jvm
#
# This image uses the `run-java.sh` script to run the application.
# This scripts computes the command line to execute your Java application, and
# includes memory/GC tuning.
# You can configure the behavior using the following environment properties:
# - JAVA_OPTS: JVM options passed to the `java` command (example: "-verbose:class") - Be aware that this will override
# the default JVM options, use `JAVA_OPTS_APPEND` to append options
# - JAVA_OPTS_APPEND: User specified Java options to be appended to generated options
# in JAVA_OPTS (example: "-Dsome.property=foo")
# - JAVA_MAX_MEM_RATIO: Is used when no `-Xmx` option is given in JAVA_OPTS. This is
# used to calculate a default maximal heap memory based on a containers restriction.
# If used in a container without any memory constraints for the container then this
# option has no effect. If there is a memory constraint then `-Xmx` is set to a ratio
# of the container available memory as set here. The default is `50` which means 50%
# of the available memory is used as an upper boundary. You can skip this mechanism by
# setting this value to `0` in which case no `-Xmx` option is added.
# - JAVA_INITIAL_MEM_RATIO: Is used when no `-Xms` option is given in JAVA_OPTS. This
# is used to calculate a default initial heap memory based on the maximum heap memory.
# If used in a container without any memory constraints for the container then this
# option has no effect. If there is a memory constraint then `-Xms` is set to a ratio
# of the `-Xmx` memory as set here. The default is `25` which means 25% of the `-Xmx`
# is used as the initial heap size. You can skip this mechanism by setting this value
# to `0` in which case no `-Xms` option is added (example: "25")
# - JAVA_MAX_INITIAL_MEM: Is used when no `-Xms` option is given in JAVA_OPTS.
# This is used to calculate the maximum value of the initial heap memory. If used in
# a container without any memory constraints for the container then this option has
# no effect. If there is a memory constraint then `-Xms` is limited to the value set
# here. The default is 4096MB which means the calculated value of `-Xms` never will
# be greater than 4096MB. The value of this variable is expressed in MB (example: "4096")
# - JAVA_DIAGNOSTICS: Set this to get some diagnostics information to standard output
# when things are happening. This option, if set to true, will set
# `-XX:+UnlockDiagnosticVMOptions`. Disabled by default (example: "true").
# - JAVA_DEBUG: If set remote debugging will be switched on. Disabled by default (example:
# true").
# - JAVA_DEBUG_PORT: Port used for remote debugging. Defaults to 5005 (example: "8787").
# - CONTAINER_CORE_LIMIT: A calculated core limit as described in
# https://www.kernel.org/doc/Documentation/scheduler/sched-bwc.txt. (example: "2")
# - CONTAINER_MAX_MEMORY: Memory limit given to the container (example: "1024").
# - GC_MIN_HEAP_FREE_RATIO: Minimum percentage of heap free after GC to avoid expansion.
# (example: "20")
# - GC_MAX_HEAP_FREE_RATIO: Maximum percentage of heap free after GC to avoid shrinking.
# (example: "40")
# - GC_TIME_RATIO: Specifies the ratio of the time spent outside the garbage collection.
# (example: "4")
# - GC_ADAPTIVE_SIZE_POLICY_WEIGHT: The weighting given to the current GC time versus
# previous GC times. (example: "90")
# - GC_METASPACE_SIZE: The initial metaspace size. (example: "20")
# - GC_MAX_METASPACE_SIZE: The maximum metaspace size. (example: "100")
# - GC_CONTAINER_OPTIONS: Specify Java GC to use. The value of this variable should
# contain the necessary JRE command-line options to specify the required GC, which
# will override the default of `-XX:+UseParallelGC` (example: -XX:+UseG1GC).
# - HTTPS_PROXY: The location of the https proxy. (example: "myuser@127.0.0.1:8080")
# - HTTP_PROXY: The location of the http proxy. (example: "myuser@127.0.0.1:8080")
# - NO_PROXY: A comma separated lists of hosts, IP addresses or domains that can be
# accessed directly. (example: "foo.example.com,bar.example.com")
#
###
FROM registry.access.redhat.com/ubi9/openjdk-21:1.23
ENV LANGUAGE='en_US:en'
# We make four distinct layers so if there are application changes the library layers can be re-used
COPY --chown=185 target/quarkus-app/lib/ /deployments/lib/
COPY --chown=185 target/quarkus-app/*.jar /deployments/
COPY --chown=185 target/quarkus-app/app/ /deployments/app/
COPY --chown=185 target/quarkus-app/quarkus/ /deployments/quarkus/
EXPOSE 8080
USER 185
ENV JAVA_OPTS_APPEND="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"
ENV JAVA_APP_JAR="/deployments/quarkus-run.jar"
ENTRYPOINT [ "/opt/jboss/container/java/run/run-java.sh" ]

View File

@@ -0,0 +1,94 @@
####
# This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode
#
# Before building the container image run:
#
# ./mvnw package -Dquarkus.package.jar.type=legacy-jar
#
# Then, build the image with:
#
# docker build -f src/main/docker/Dockerfile.legacy-jar -t quarkus/code-with-quarkus-legacy-jar .
#
# Then run the container using:
#
# docker run -i --rm -p 8080:8080 quarkus/code-with-quarkus-legacy-jar
#
# If you want to include the debug port into your docker image
# you will have to expose the debug port (default 5005 being the default) like this : EXPOSE 8080 5005.
# Additionally you will have to set -e JAVA_DEBUG=true and -e JAVA_DEBUG_PORT=*:5005
# when running the container
#
# Then run the container using :
#
# docker run -i --rm -p 8080:8080 quarkus/code-with-quarkus-legacy-jar
#
# This image uses the `run-java.sh` script to run the application.
# This scripts computes the command line to execute your Java application, and
# includes memory/GC tuning.
# You can configure the behavior using the following environment properties:
# - JAVA_OPTS: JVM options passed to the `java` command (example: "-verbose:class") - Be aware that this will override
# the default JVM options, use `JAVA_OPTS_APPEND` to append options
# - JAVA_OPTS_APPEND: User specified Java options to be appended to generated options
# in JAVA_OPTS (example: "-Dsome.property=foo")
# - JAVA_MAX_MEM_RATIO: Is used when no `-Xmx` option is given in JAVA_OPTS. This is
# used to calculate a default maximal heap memory based on a containers restriction.
# If used in a container without any memory constraints for the container then this
# option has no effect. If there is a memory constraint then `-Xmx` is set to a ratio
# of the container available memory as set here. The default is `50` which means 50%
# of the available memory is used as an upper boundary. You can skip this mechanism by
# setting this value to `0` in which case no `-Xmx` option is added.
# - JAVA_INITIAL_MEM_RATIO: Is used when no `-Xms` option is given in JAVA_OPTS. This
# is used to calculate a default initial heap memory based on the maximum heap memory.
# If used in a container without any memory constraints for the container then this
# option has no effect. If there is a memory constraint then `-Xms` is set to a ratio
# of the `-Xmx` memory as set here. The default is `25` which means 25% of the `-Xmx`
# is used as the initial heap size. You can skip this mechanism by setting this value
# to `0` in which case no `-Xms` option is added (example: "25")
# - JAVA_MAX_INITIAL_MEM: Is used when no `-Xms` option is given in JAVA_OPTS.
# This is used to calculate the maximum value of the initial heap memory. If used in
# a container without any memory constraints for the container then this option has
# no effect. If there is a memory constraint then `-Xms` is limited to the value set
# here. The default is 4096MB which means the calculated value of `-Xms` never will
# be greater than 4096MB. The value of this variable is expressed in MB (example: "4096")
# - JAVA_DIAGNOSTICS: Set this to get some diagnostics information to standard output
# when things are happening. This option, if set to true, will set
# `-XX:+UnlockDiagnosticVMOptions`. Disabled by default (example: "true").
# - JAVA_DEBUG: If set remote debugging will be switched on. Disabled by default (example:
# true").
# - JAVA_DEBUG_PORT: Port used for remote debugging. Defaults to 5005 (example: "8787").
# - CONTAINER_CORE_LIMIT: A calculated core limit as described in
# https://www.kernel.org/doc/Documentation/scheduler/sched-bwc.txt. (example: "2")
# - CONTAINER_MAX_MEMORY: Memory limit given to the container (example: "1024").
# - GC_MIN_HEAP_FREE_RATIO: Minimum percentage of heap free after GC to avoid expansion.
# (example: "20")
# - GC_MAX_HEAP_FREE_RATIO: Maximum percentage of heap free after GC to avoid shrinking.
# (example: "40")
# - GC_TIME_RATIO: Specifies the ratio of the time spent outside the garbage collection.
# (example: "4")
# - GC_ADAPTIVE_SIZE_POLICY_WEIGHT: The weighting given to the current GC time versus
# previous GC times. (example: "90")
# - GC_METASPACE_SIZE: The initial metaspace size. (example: "20")
# - GC_MAX_METASPACE_SIZE: The maximum metaspace size. (example: "100")
# - GC_CONTAINER_OPTIONS: Specify Java GC to use. The value of this variable should
# contain the necessary JRE command-line options to specify the required GC, which
# will override the default of `-XX:+UseParallelGC` (example: -XX:+UseG1GC).
# - HTTPS_PROXY: The location of the https proxy. (example: "myuser@127.0.0.1:8080")
# - HTTP_PROXY: The location of the http proxy. (example: "myuser@127.0.0.1:8080")
# - NO_PROXY: A comma separated lists of hosts, IP addresses or domains that can be
# accessed directly. (example: "foo.example.com,bar.example.com")
#
###
FROM registry.access.redhat.com/ubi9/openjdk-21:1.23
ENV LANGUAGE='en_US:en'
COPY target/lib/* /deployments/lib/
COPY target/*-runner.jar /deployments/quarkus-run.jar
EXPOSE 8080
USER 185
ENV JAVA_OPTS_APPEND="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"
ENV JAVA_APP_JAR="/deployments/quarkus-run.jar"
ENTRYPOINT [ "/opt/jboss/container/java/run/run-java.sh" ]

View File

@@ -0,0 +1,29 @@
####
# This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode.
#
# Before building the container image run:
#
# ./mvnw package -Dnative
#
# Then, build the image with:
#
# docker build -f src/main/docker/Dockerfile.native -t quarkus/code-with-quarkus .
#
# Then run the container using:
#
# docker run -i --rm -p 8080:8080 quarkus/code-with-quarkus
#
# The ` registry.access.redhat.com/ubi9/ubi-minimal:9.7` base image is based on UBI 9.
# To use UBI 8, switch to `quay.io/ubi8/ubi-minimal:8.10`.
###
FROM registry.access.redhat.com/ubi9/ubi-minimal:9.7
WORKDIR /work/
RUN chown 1001 /work \
&& chmod "g+rwX" /work \
&& chown 1001:root /work
COPY --chown=1001:root --chmod=0755 target/*-runner /work/application
EXPOSE 8080
USER 1001
ENTRYPOINT ["./application", "-Dquarkus.http.host=0.0.0.0"]

View File

@@ -0,0 +1,32 @@
####
# This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode.
# It uses a micro base image, tuned for Quarkus native executables.
# It reduces the size of the resulting container image.
# Check https://quarkus.io/guides/quarkus-runtime-base-image for further information about this image.
#
# Before building the container image run:
#
# ./mvnw package -Dnative
#
# Then, build the image with:
#
# docker build -f src/main/docker/Dockerfile.native-micro -t quarkus/code-with-quarkus .
#
# Then run the container using:
#
# docker run -i --rm -p 8080:8080 quarkus/code-with-quarkus
#
# The `quay.io/quarkus/ubi9-quarkus-micro-image:2.0` base image is based on UBI 9.
# To use UBI 8, switch to `quay.io/quarkus/quarkus-micro-image:2.0`.
###
FROM quay.io/quarkus/ubi9-quarkus-micro-image:2.0
WORKDIR /work/
RUN chown 1001 /work \
&& chmod "g+rwX" /work \
&& chown 1001:root /work
COPY --chown=1001:root --chmod=0755 target/*-runner /work/application
EXPOSE 8080
USER 1001
ENTRYPOINT ["./application", "-Dquarkus.http.host=0.0.0.0"]

View File

@@ -1,27 +0,0 @@
package com.backend.search.kodik.service.anyame_backend;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.web.servlet.config.annotation.CorsRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
@SpringBootApplication
public class AnyameBackendApplication {
public static void main(String[] args) {
SpringApplication.run(AnyameBackendApplication.class, args);
}
// TODO: Research if this is good approach or not?
@Bean
public WebMvcConfigurer corsConfigurer() {
return new WebMvcConfigurer() {
@Override
public void addCorsMappings(CorsRegistry registry) {
registry.addMapping("/**").allowedOrigins("http://localhost:3000", "http://localhost:3001");
}
};
}
}

View File

@@ -1,15 +0,0 @@
package com.backend.search.kodik.service.anyame_backend.component;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
@Component
public class KodikAPITokenProvider {
@Value("${kodik.token}")
private String kodikToken;
public String getKodikToken() {
return kodikToken;
}
}

View File

@@ -1,29 +0,0 @@
package com.backend.search.kodik.service.anyame_backend.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import com.backend.search.kodik.service.anyame_backend.api.KodikAPI;
import retrofit2.Retrofit;
import retrofit2.converter.jackson.JacksonConverterFactory;
@Configuration
public class APIConfig {
private static final String KODIK_API_URL = "https://kodikapi.com/";
@Bean
public Retrofit retrofit() {
return new Retrofit.Builder()
.baseUrl(KODIK_API_URL)
.addConverterFactory(JacksonConverterFactory.create())
.build();
}
@Bean
public KodikAPI kodikAPIService(Retrofit retrofit) {
return retrofit.create(KodikAPI.class);
}
}

View File

@@ -1,50 +0,0 @@
package com.backend.search.kodik.service.anyame_backend.controller;
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.server.ResponseStatusException;
import com.backend.search.kodik.service.anyame_backend.api.KodikAPI;
import com.backend.search.kodik.service.anyame_backend.api.model.KodikResponse;
import com.backend.search.kodik.service.anyame_backend.component.KodikAPITokenProvider;
import com.backend.search.kodik.service.anyame_backend.service.KodikSearchFilterService;
import retrofit2.Response;
@RestController
public class SearchController {
Logger log = LoggerFactory.getLogger(SearchController.class);
private final KodikAPI kodikAPI;
private final KodikAPITokenProvider tokenProvider;
private final KodikSearchFilterService searchFilterService;
public SearchController(KodikAPI kodikAPI, KodikAPITokenProvider tokenProvider,
KodikSearchFilterService searchFilterService) {
this.kodikAPI = kodikAPI;
this.tokenProvider = tokenProvider;
this.searchFilterService = searchFilterService;
}
@GetMapping("/search")
public KodikResponse search(@RequestParam("title") String title) {
try {
Response<KodikResponse> response = kodikAPI.search(tokenProvider.getKodikToken(), title, 100, 1).execute();
if (!response.isSuccessful()) {
log.info("failed search request with title {}, response code {}, message {}", title, response.code(),
response.message());
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "bad response, code: " + response.code());
}
return searchFilterService.filter(response.body());
} catch (IOException e) {
log.warn("i/o error", e);
throw new ResponseStatusException(HttpStatus.SERVICE_UNAVAILABLE, "i/o error");
}
}
}

View File

@@ -1,44 +0,0 @@
package com.backend.search.kodik.service.anyame_backend.service;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import com.backend.search.kodik.service.anyame_backend.api.model.KodikResponse;
import com.backend.search.kodik.service.anyame_backend.api.model.KodikResponse.Result;
@Service
public class KodikSearchFilterService {
Logger log = LoggerFactory.getLogger(KodikSearchFilterService.class);
private static final List<String> ALLOWED_TYPES = Arrays.asList("anime-serial", "anime");
public KodikResponse filter(KodikResponse body) {
Set<String> seenIds = new HashSet<>();
List<Result> filteredResults = body.results.stream()
.filter(result -> ALLOWED_TYPES.contains(result.type))
.filter(result -> {
String identifier = identifier(result);
boolean updated = seenIds.add(identifier);
return updated;
})
.toList();
body.results = filteredResults;
return body;
}
public String identifier(Result result) {
List<String> identifiers = Arrays.asList(result.kinopoiskId,
result.imdbId,
result.shikimoriId,
result.worldartLink);
return identifiers.stream().filter(identifier -> identifier != null && !identifier.isBlank()).findFirst()
.orElse(result.id) + Integer.toString(result.lastSeason);
}
}

View File

@@ -0,0 +1,19 @@
package com.backend.search.kodik.service.api;
import jakarta.enterprise.context.ApplicationScoped;
import io.github.easyretrofit.core.builder.BaseConverterFactoryBuilder;
import retrofit2.Converter;
import retrofit2.converter.jackson.JacksonConverterFactory;
/**
* @author liuziyuan
*/
@ApplicationScoped
public class JacksonConvertFactoryBuilder extends BaseConverterFactoryBuilder {
@Override
public Converter.Factory buildConverterFactory() {
return JacksonConverterFactory.create();
}
}

View File

@@ -1,14 +1,15 @@
package com.backend.search.kodik.service.anyame_backend.api;
package com.backend.search.kodik.service.api;
import com.backend.search.kodik.service.anyame_backend.api.model.KodikResponse;
import com.backend.search.kodik.service.api.model.KodikResponse;
import io.github.easyretrofit.core.annotation.RetrofitBuilder;
import retrofit2.Call;
import retrofit2.http.Field;
import retrofit2.http.FormUrlEncoded;
import retrofit2.http.POST;
@RetrofitBuilder(baseUrl = "${kodik.api.url}", addConverterFactory = { JacksonConvertFactoryBuilder.class })
public interface KodikAPI {
@FormUrlEncoded
@POST("search")
Call<KodikResponse> search(

View File

@@ -1,10 +1,10 @@
package com.backend.search.kodik.service.anyame_backend.api.model;
package com.backend.search.kodik.service.api.model;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
@JsonIgnoreProperties(ignoreUnknown = true)
public class KodikResponse {
public int total;

View File

@@ -1,10 +1,10 @@
package com.backend.search.kodik.service.anyame_backend.api.model;
package com.backend.search.kodik.service.api.model;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
@JsonIgnoreProperties(ignoreUnknown = true)
public class MaterialData {

View File

@@ -0,0 +1,53 @@
package com.backend.search.kodik.service.resource;
import java.io.IOException;
import javax.naming.ServiceUnavailableException;
import org.jboss.logging.Logger;
import com.backend.search.kodik.service.api.KodikAPI;
import com.backend.search.kodik.service.api.model.KodikResponse;
import com.backend.search.kodik.service.service.KodikAPITokenProvider;
import com.backend.search.kodik.service.service.KodikSearchFilterService;
import io.quarkiverse.retrofit.runtime.EnableRetrofit;
import jakarta.ws.rs.BadRequestException;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.QueryParam;
import retrofit2.Response;
@EnableRetrofit("com.backend.search.kodik.service.api")
@Path("/search")
public class SearchResource {
private static final Logger LOG = Logger.getLogger(SearchResource.class);
private final KodikAPI kodikAPI;
private final KodikAPITokenProvider tokenProvider;
private final KodikSearchFilterService searchFilterService;
public SearchResource(KodikAPI kodikAPI, KodikAPITokenProvider tokenProvider,
KodikSearchFilterService searchFilterService) {
this.kodikAPI = kodikAPI;
this.tokenProvider = tokenProvider;
this.searchFilterService = searchFilterService;
}
@GET
public KodikResponse search(@QueryParam("title") String title) throws ServiceUnavailableException {
try {
Response<KodikResponse> response = kodikAPI.search(tokenProvider.getKodikToken(), title, 100, 1).execute();
if (!response.isSuccessful()) {
LOG.errorv("failed search request with title {0}, response code {1}, message {2}", title,
response.code(),
response.message());
throw new BadRequestException("bad response, code: " + response.code());
}
return searchFilterService.filter(response.body());
} catch (IOException e) {
LOG.warn("i/o error", e);
throw new ServiceUnavailableException("i/o error");
}
}
}

View File

@@ -0,0 +1,15 @@
package com.backend.search.kodik.service.service;
import jakarta.enterprise.context.ApplicationScoped;
import org.eclipse.microprofile.config.inject.ConfigProperty;
@ApplicationScoped
public class KodikAPITokenProvider {
@ConfigProperty(name = "kodik.token")
String kodikToken;
public String getKodikToken() {
return kodikToken;
}
}

View File

@@ -0,0 +1,62 @@
package com.backend.search.kodik.service.service;
import jakarta.enterprise.context.ApplicationScoped;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import org.jboss.logging.Logger;
import com.backend.search.kodik.service.api.model.KodikResponse;
@ApplicationScoped
public class KodikSearchFilterService {
private static final Logger LOG = Logger.getLogger(KodikSearchFilterService.class);
private static final Set<String> ALLOWED_TYPES = Set.of("anime-serial", "anime");
public KodikResponse filter(KodikResponse body) {
Objects.requireNonNull(body, "KodikResponse body cannot be null");
Objects.requireNonNull(body.results, "KodikResponse.results cannot be null");
Set<String> seenIds = new HashSet<>();
List<KodikResponse.Result> filteredResults = new ArrayList<>();
for (KodikResponse.Result result : body.results) {
if (isAllowedType(result) && isUnique(result, seenIds)) {
filteredResults.add(result);
}
}
body.results = filteredResults;
return body;
}
private boolean isAllowedType(KodikResponse.Result result) {
return result != null && ALLOWED_TYPES.contains(result.type);
}
private boolean isUnique(KodikResponse.Result result, Set<String> seenIds) {
return seenIds.add(identifier(result));
}
public String identifier(KodikResponse.Result result) {
if (result == null)
return "null-0";
String primaryId = Arrays.asList(
result.kinopoiskId,
result.imdbId,
result.shikimoriId,
result.worldartLink).stream()
.filter(id -> id != null && !id.isBlank())
.findFirst()
.orElse(result.id);
return primaryId + "-" + result.lastSeason;
}
}

View File

@@ -1,3 +1 @@
spring.application.name=anyame-kodik-search-backend
kodik.token=${KODIK_TOKEN}
eureka.client.serviceUrl.defaultZone: ${EUREKA_SCHEMA}://${EUREKA_HOST}/eureka/
kodik.api.url=https://kodikapi.com/

View File

@@ -1,11 +0,0 @@
<configuration>
<appender name="LOGSTASH" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>logstash:5044</destination>
<encoder class="net.logstash.logback.encoder.LogstashEncoder" />
</appender>
<root level="INFO">
<appender-ref ref="LOGSTASH" />
</root>
</configuration>

View File

@@ -1,13 +0,0 @@
package com.backend.search.service.anyame_backend;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class AnyameBackendApplicationTests {
@Test
void contextLoads() {
}
}

View File

@@ -0,0 +1,8 @@
package org.acme;
import io.quarkus.test.junit.QuarkusIntegrationTest;
@QuarkusIntegrationTest
class GreetingResourceIT extends GreetingResourceTest {
// Execute the same tests but in packaged mode.
}

View File

@@ -0,0 +1,20 @@
package org.acme;
import io.quarkus.test.junit.QuarkusTest;
import org.junit.jupiter.api.Test;
import static io.restassured.RestAssured.given;
import static org.hamcrest.CoreMatchers.is;
@QuarkusTest
class GreetingResourceTest {
@Test
void testHelloEndpoint() {
given()
.when().get("/hello")
.then()
.statusCode(200)
.body(is("Hello from Quarkus REST"));
}
}