mirror of
https://github.com/imgfloat/server.git
synced 2026-02-05 03:39:26 +00:00
Add server validation
This commit is contained in:
@@ -1,18 +1,45 @@
|
||||
package com.imgfloat.app.model;
|
||||
|
||||
import jakarta.validation.constraints.DecimalMax;
|
||||
import jakarta.validation.constraints.DecimalMin;
|
||||
import jakarta.validation.constraints.Positive;
|
||||
import jakarta.validation.constraints.PositiveOrZero;
|
||||
|
||||
public class TransformRequest {
|
||||
private double x;
|
||||
private double y;
|
||||
|
||||
@Positive(message = "Width must be greater than 0")
|
||||
private double width;
|
||||
|
||||
@Positive(message = "Height must be greater than 0")
|
||||
private double height;
|
||||
|
||||
private double rotation;
|
||||
|
||||
@DecimalMin(value = "0.0", message = "Playback speed cannot be negative")
|
||||
@DecimalMax(value = "4.0", message = "Playback speed cannot exceed 4.0")
|
||||
private Double speed;
|
||||
|
||||
private Boolean muted;
|
||||
|
||||
@Positive(message = "zIndex must be at least 1")
|
||||
private Integer zIndex;
|
||||
private Boolean audioLoop;
|
||||
|
||||
@PositiveOrZero(message = "Audio delay must be zero or greater")
|
||||
private Integer audioDelayMillis;
|
||||
|
||||
@DecimalMin(value = "0.1", message = "Audio speed must be at least 0.1x")
|
||||
@DecimalMax(value = "4.0", message = "Audio speed cannot exceed 4.0x")
|
||||
private Double audioSpeed;
|
||||
|
||||
@DecimalMin(value = "0.5", message = "Audio pitch must be at least 0.5x")
|
||||
@DecimalMax(value = "2.0", message = "Audio pitch cannot exceed 2.0x")
|
||||
private Double audioPitch;
|
||||
|
||||
@DecimalMin(value = "0.0", message = "Audio volume cannot be negative")
|
||||
@DecimalMax(value = "1.0", message = "Audio volume cannot exceed 1.0")
|
||||
private Double audioVolume;
|
||||
|
||||
public double getX() {
|
||||
|
||||
@@ -23,6 +23,7 @@ import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.messaging.simp.SimpMessagingTemplate;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
import org.springframework.web.server.ResponseStatusException;
|
||||
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.ByteArrayInputStream;
|
||||
@@ -54,10 +55,18 @@ import javax.imageio.stream.ImageInputStream;
|
||||
import org.w3c.dom.Node;
|
||||
import org.w3c.dom.NodeList;
|
||||
|
||||
import static org.springframework.http.HttpStatus.BAD_REQUEST;
|
||||
|
||||
@Service
|
||||
public class ChannelDirectoryService {
|
||||
private static final int MIN_GIF_DELAY_MS = 20;
|
||||
private static final String PREVIEW_MEDIA_TYPE = "image/png";
|
||||
private static final double MAX_SPEED = 4.0;
|
||||
private static final double MIN_AUDIO_SPEED = 0.1;
|
||||
private static final double MAX_AUDIO_SPEED = 4.0;
|
||||
private static final double MIN_AUDIO_PITCH = 0.5;
|
||||
private static final double MAX_AUDIO_PITCH = 2.0;
|
||||
private static final double MAX_AUDIO_VOLUME = 1.0;
|
||||
private static final Logger logger = LoggerFactory.getLogger(ChannelDirectoryService.class);
|
||||
private final ChannelRepository channelRepository;
|
||||
private final AssetRepository assetRepository;
|
||||
@@ -180,6 +189,7 @@ public class ChannelDirectoryService {
|
||||
return assetRepository.findById(assetId)
|
||||
.filter(asset -> normalized.equals(asset.getBroadcaster()))
|
||||
.map(asset -> {
|
||||
validateTransform(request);
|
||||
asset.setX(request.getX());
|
||||
asset.setY(request.getY());
|
||||
asset.setWidth(request.getWidth());
|
||||
@@ -188,7 +198,7 @@ public class ChannelDirectoryService {
|
||||
if (request.getZIndex() != null) {
|
||||
asset.setZIndex(request.getZIndex());
|
||||
}
|
||||
if (request.getSpeed() != null && request.getSpeed() >= 0) {
|
||||
if (request.getSpeed() != null) {
|
||||
asset.setSpeed(request.getSpeed());
|
||||
}
|
||||
if (request.getMuted() != null && asset.isVideo()) {
|
||||
@@ -197,18 +207,17 @@ public class ChannelDirectoryService {
|
||||
if (request.getAudioLoop() != null) {
|
||||
asset.setAudioLoop(request.getAudioLoop());
|
||||
}
|
||||
if (request.getAudioDelayMillis() != null && request.getAudioDelayMillis() >= 0) {
|
||||
if (request.getAudioDelayMillis() != null) {
|
||||
asset.setAudioDelayMillis(request.getAudioDelayMillis());
|
||||
}
|
||||
if (request.getAudioSpeed() != null && request.getAudioSpeed() >= 0) {
|
||||
if (request.getAudioSpeed() != null) {
|
||||
asset.setAudioSpeed(request.getAudioSpeed());
|
||||
}
|
||||
if (request.getAudioPitch() != null && request.getAudioPitch() > 0) {
|
||||
if (request.getAudioPitch() != null) {
|
||||
asset.setAudioPitch(request.getAudioPitch());
|
||||
}
|
||||
if (request.getAudioVolume() != null && request.getAudioVolume() >= 0) {
|
||||
double clamped = Math.max(0.0, Math.min(2.0, request.getAudioVolume()));
|
||||
asset.setAudioVolume(clamped);
|
||||
if (request.getAudioVolume() != null) {
|
||||
asset.setAudioVolume(request.getAudioVolume());
|
||||
}
|
||||
assetRepository.save(asset);
|
||||
AssetView view = AssetView.from(normalized, asset);
|
||||
@@ -218,6 +227,33 @@ public class ChannelDirectoryService {
|
||||
});
|
||||
}
|
||||
|
||||
private void validateTransform(TransformRequest request) {
|
||||
if (request.getWidth() <= 0) {
|
||||
throw new ResponseStatusException(BAD_REQUEST, "Width must be greater than 0");
|
||||
}
|
||||
if (request.getHeight() <= 0) {
|
||||
throw new ResponseStatusException(BAD_REQUEST, "Height must be greater than 0");
|
||||
}
|
||||
if (request.getSpeed() != null && (request.getSpeed() < 0 || request.getSpeed() > MAX_SPEED)) {
|
||||
throw new ResponseStatusException(BAD_REQUEST, "Playback speed must be between 0 and " + MAX_SPEED);
|
||||
}
|
||||
if (request.getZIndex() != null && request.getZIndex() < 1) {
|
||||
throw new ResponseStatusException(BAD_REQUEST, "zIndex must be at least 1");
|
||||
}
|
||||
if (request.getAudioDelayMillis() != null && request.getAudioDelayMillis() < 0) {
|
||||
throw new ResponseStatusException(BAD_REQUEST, "Audio delay must be zero or greater");
|
||||
}
|
||||
if (request.getAudioSpeed() != null && (request.getAudioSpeed() < MIN_AUDIO_SPEED || request.getAudioSpeed() > MAX_AUDIO_SPEED)) {
|
||||
throw new ResponseStatusException(BAD_REQUEST, "Audio speed must be between " + MIN_AUDIO_SPEED + " and " + MAX_AUDIO_SPEED + "x");
|
||||
}
|
||||
if (request.getAudioPitch() != null && (request.getAudioPitch() < MIN_AUDIO_PITCH || request.getAudioPitch() > MAX_AUDIO_PITCH)) {
|
||||
throw new ResponseStatusException(BAD_REQUEST, "Audio pitch must be between " + MIN_AUDIO_PITCH + " and " + MAX_AUDIO_PITCH + "x");
|
||||
}
|
||||
if (request.getAudioVolume() != null && (request.getAudioVolume() < 0 || request.getAudioVolume() > MAX_AUDIO_VOLUME)) {
|
||||
throw new ResponseStatusException(BAD_REQUEST, "Audio volume must be between 0 and " + MAX_AUDIO_VOLUME);
|
||||
}
|
||||
}
|
||||
|
||||
public Optional<AssetView> triggerPlayback(String broadcaster, String assetId, PlaybackRequest request) {
|
||||
String normalized = normalize(broadcaster);
|
||||
return assetRepository.findById(assetId)
|
||||
|
||||
@@ -13,6 +13,7 @@ import org.junit.jupiter.api.Test;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.springframework.messaging.simp.SimpMessagingTemplate;
|
||||
import org.springframework.mock.web.MockMultipartFile;
|
||||
import org.springframework.web.server.ResponseStatusException;
|
||||
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
@@ -28,6 +29,7 @@ import java.util.concurrent.ConcurrentHashMap;
|
||||
import javax.imageio.ImageIO;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyString;
|
||||
import static org.mockito.Mockito.doAnswer;
|
||||
@@ -69,12 +71,7 @@ class ChannelDirectoryServiceTest {
|
||||
String channel = "caster";
|
||||
String id = service.createAsset(channel, file).orElseThrow().id();
|
||||
|
||||
TransformRequest transform = new TransformRequest();
|
||||
transform.setX(10);
|
||||
transform.setY(20);
|
||||
transform.setWidth(200);
|
||||
transform.setHeight(150);
|
||||
transform.setRotation(45);
|
||||
TransformRequest transform = validTransform();
|
||||
|
||||
assertThat(service.updateTransform(channel, id, transform)).isPresent();
|
||||
|
||||
@@ -83,6 +80,62 @@ class ChannelDirectoryServiceTest {
|
||||
assertThat(service.updateVisibility(channel, id, visibilityRequest)).isPresent();
|
||||
}
|
||||
|
||||
@Test
|
||||
void rejectsInvalidTransformDimensions() throws Exception {
|
||||
String channel = "caster";
|
||||
String id = createSampleAsset(channel);
|
||||
|
||||
TransformRequest transform = validTransform();
|
||||
transform.setWidth(0);
|
||||
|
||||
assertThatThrownBy(() -> service.updateTransform(channel, id, transform))
|
||||
.isInstanceOf(ResponseStatusException.class)
|
||||
.hasMessageContaining("Width must be greater than 0");
|
||||
}
|
||||
|
||||
@Test
|
||||
void rejectsOutOfRangePlaybackValues() throws Exception {
|
||||
String channel = "caster";
|
||||
String id = createSampleAsset(channel);
|
||||
|
||||
TransformRequest speedTransform = validTransform();
|
||||
speedTransform.setSpeed(5.0);
|
||||
|
||||
assertThatThrownBy(() -> service.updateTransform(channel, id, speedTransform))
|
||||
.isInstanceOf(ResponseStatusException.class)
|
||||
.hasMessageContaining("Playback speed must be between 0 and 4.0");
|
||||
|
||||
TransformRequest volumeTransform = validTransform();
|
||||
volumeTransform.setAudioVolume(1.5);
|
||||
|
||||
assertThatThrownBy(() -> service.updateTransform(channel, id, volumeTransform))
|
||||
.isInstanceOf(ResponseStatusException.class)
|
||||
.hasMessageContaining("Audio volume must be between 0 and 1.0");
|
||||
}
|
||||
|
||||
@Test
|
||||
void appliesBoundaryValues() throws Exception {
|
||||
String channel = "caster";
|
||||
String id = createSampleAsset(channel);
|
||||
|
||||
TransformRequest transform = validTransform();
|
||||
transform.setSpeed(0.0);
|
||||
transform.setAudioSpeed(0.1);
|
||||
transform.setAudioPitch(0.5);
|
||||
transform.setAudioVolume(1.0);
|
||||
transform.setAudioDelayMillis(0);
|
||||
transform.setZIndex(1);
|
||||
|
||||
AssetView view = service.updateTransform(channel, id, transform).orElseThrow();
|
||||
|
||||
assertThat(view.speed()).isEqualTo(0.0);
|
||||
assertThat(view.audioSpeed()).isEqualTo(0.1);
|
||||
assertThat(view.audioPitch()).isEqualTo(0.5);
|
||||
assertThat(view.audioVolume()).isEqualTo(1.0);
|
||||
assertThat(view.audioDelayMillis()).isEqualTo(0);
|
||||
assertThat(view.zIndex()).isEqualTo(1);
|
||||
}
|
||||
|
||||
private byte[] samplePng() throws IOException {
|
||||
BufferedImage image = new BufferedImage(2, 2, BufferedImage.TYPE_INT_ARGB);
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
@@ -90,6 +143,21 @@ class ChannelDirectoryServiceTest {
|
||||
return out.toByteArray();
|
||||
}
|
||||
|
||||
private String createSampleAsset(String channel) throws Exception {
|
||||
MockMultipartFile file = new MockMultipartFile("file", "image.png", "image/png", samplePng());
|
||||
return service.createAsset(channel, file).orElseThrow().id();
|
||||
}
|
||||
|
||||
private TransformRequest validTransform() {
|
||||
TransformRequest transform = new TransformRequest();
|
||||
transform.setX(10);
|
||||
transform.setY(20);
|
||||
transform.setWidth(200);
|
||||
transform.setHeight(150);
|
||||
transform.setRotation(45);
|
||||
return transform;
|
||||
}
|
||||
|
||||
private void setupInMemoryPersistence() {
|
||||
Map<String, Channel> channels = new ConcurrentHashMap<>();
|
||||
Map<String, Asset> assets = new ConcurrentHashMap<>();
|
||||
|
||||
Reference in New Issue
Block a user