1 Commits

Author SHA1 Message Date
49bee944d8 openapi: rethink integers
All checks were successful
continuous-integration/drone/push Build is passing
2025-07-25 19:15:20 -07:00
178 changed files with 4967 additions and 33576 deletions

View File

@@ -7,45 +7,7 @@ platform:
arch: amd64
steps:
- name: build-backend
image: golang:1.24.0
environment:
GIT_USER:
from_secret: GIT_USER
GIT_PASS:
from_secret: GIT_PASS
commands:
- echo "machine git.itzana.me login $${GIT_USER} password $${GIT_PASS}" > ~/.netrc
- chmod 600 ~/.netrc
- make build-backend
when:
branch:
- master
- staging
- name: build-validator
image: clux/muslrust:1.91.0-stable
commands:
- make build-validator
when:
branch:
- master
- staging
- name: build-frontend
image: oven/bun:1.3.3
commands:
- apt-get update
- apt-get install make
- make build-frontend
when:
branch:
- master
- staging
event:
- pull_request
- name: image-backend
- name: api
image: plugins/docker
settings:
registry: registry.itzana.me
@@ -57,10 +19,8 @@ steps:
from_secret: REGISTRY_USER
password:
from_secret: REGISTRY_PASS
dockerfile: Dockerfile
dockerfile: Containerfile
context: .
depends_on:
- build-backend
when:
branch:
- master
@@ -68,7 +28,7 @@ steps:
event:
- push
- name: image-frontend
- name: frontend
image: plugins/docker
settings:
registry: registry.itzana.me
@@ -89,7 +49,7 @@ steps:
event:
- push
- name: image-validator
- name: validator
image: plugins/docker
settings:
registry: registry.itzana.me
@@ -102,9 +62,7 @@ steps:
password:
from_secret: REGISTRY_PASS
dockerfile: validation/Containerfile
context: .
depends_on:
- build-validator
context: validation
when:
branch:
- master
@@ -125,9 +83,9 @@ steps:
PASSWORD:
from_secret: ARGO_PASS
depends_on:
- image-backend
- image-frontend
- image-validator
- api
- frontend
- validator
when:
branch:
- master
@@ -136,19 +94,64 @@ steps:
- push
# pr dry run
- name: api-pr
image: plugins/docker
settings:
registry: registry.itzana.me
repo: registry.itzana.me/strafesnet/maptest-validator
tags:
- ${DRONE_BRANCH}-${DRONE_BUILD_NUMBER}
- ${DRONE_BRANCH}
dockerfile: Containerfile
context: .
dry_run: true
when:
event:
- pull_request
- name: frontend-pr
image: plugins/docker
settings:
registry: registry.itzana.me
repo: registry.itzana.me/strafesnet/maptest-validator
tags:
- ${DRONE_BRANCH}-${DRONE_BUILD_NUMBER}
- ${DRONE_BRANCH}
dockerfile: web/Containerfile
context: web
dry_run: true
when:
event:
- pull_request
- name: validator-pr
image: plugins/docker
settings:
registry: registry.itzana.me
repo: registry.itzana.me/strafesnet/maptest-validator
tags:
- ${DRONE_BRANCH}-${DRONE_BUILD_NUMBER}
- ${DRONE_BRANCH}
dockerfile: validation/Containerfile
context: validation
dry_run: true
when:
event:
- pull_request
- name: build-pr
image: alpine
commands:
- echo "Success!"
depends_on:
- build-backend
- build-validator
- build-frontend
- api-pr
- frontend-pr
- validator-pr
when:
event:
- pull_request
---
kind: signature
hmac: 6de9d4b91f14b30561856daf275d1fd523e1ce7a5a3651b660f0d8907b4692fb
hmac: 11e6d7f1eb839d3798fdcb642ca5523c011bd14c1f3a0343a9c3106bab9ef142
...

1
.gitignore vendored
View File

@@ -1,3 +1,2 @@
build
.idea
/target

1225
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
[workspace]
members = [
"validation",
"submissions-api-rs",
"validation/api",
]
resolver = "2"

33
Containerfile Normal file
View File

@@ -0,0 +1,33 @@
# Stage 1: Build
FROM registry.itzana.me/docker-proxy/golang:1.24 AS builder
# Set the working directory in the container
WORKDIR /app
# Copy go.mod and go.sum files
COPY go.mod go.sum ./
# Copy the entire project
COPY . .
# Build the Go application
RUN CGO_ENABLED=0 GOOS=linux go build -o service ./cmd/maps-service/service.go
# Stage 2: Run
FROM registry.itzana.me/docker-proxy/alpine:3.21
# Set up a non-root user for security
RUN addgroup -S appgroup && adduser -S appuser -G appgroup
USER appuser
# Set the working directory in the container
WORKDIR /home/appuser
# Copy the built application from the builder stage
COPY --from=builder /app/service .
# Expose application port (adjust if needed)
EXPOSE 8081
# Command to run the application
ENTRYPOINT ["./service"]

View File

@@ -1,3 +0,0 @@
FROM alpine
COPY build/server /
ENTRYPOINT ["/server"]

View File

@@ -1,41 +1,12 @@
clean:
rm -rf build
rm -rf web/build
submissions:
DOCKER_BUILDKIT=1 docker build . -f Containerfile -t maps-service-submissions
# build
build-backend:
CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o build/server cmd/maps-service/service.go
web:
docker build web -f web/Containerfile -t maps-service-web
build-validator:
cargo build --release --target x86_64-unknown-linux-musl --bin maps-validation
validation:
docker build validation -f validation/Containerfile -t maps-service-validation
build-frontend:
rm -rf web/build
cd web && bun install --frozen-lockfile
cd web && bun run build
all: submissions web validation
build: build-backend build-validator build-frontend
# image
image-backend:
docker build . -t maptest-api
image-validator:
docker build . -f validation/Containerfile -t maptest-validator
image-frontend:
docker build web -f web/Containerfile -t maptest-frontend
# docker
docker-backend:
make build-backend
make image-backend
docker-validator:
make build-validator
make image-validator
docker-frontend:
make image-frontend
docker: docker-backend docker-validator docker-frontend
.PHONY: clean build-backend build-validator build-frontend build image-backend image-validator image-frontend docker-backend docker-validator docker-frontend docker
.PHONY: submissions web validation

View File

@@ -13,11 +13,11 @@ Prerequisite: golang installed
1. Run `go generate` to ensure the generated API is up-to-date. This project uses [ogen](https://github.com/ogen-go/ogen).
```bash
go generate
go generate -run "go run github.com/ogen-go/ogen/cmd/ogen@latest --target api --clean openapi.yaml"
```
2. Build the project.
```bash
make build-backend
go build git.itzana.me/strafesnet/maps-service
```
By default, the project opens at `localhost:8080`.
@@ -47,16 +47,14 @@ AUTH_HOST="http://localhost:8083/"
Prerequisite: rust installed
1. `cargo run --release -p maps-validation`
1. `cd validation`
2. `cargo run --release`
Environment Variables:
- ROBLOX_GROUP_ID
- RBXCOOKIE
- RBX_API_KEY
- API_HOST_INTERNAL
- NATS_HOST
- LOAD_ASSET_VERSION_PLACE_ID
- LOAD_ASSET_VERSION_UNIVERSE_ID
#### License

View File

@@ -7,16 +7,6 @@ import (
"os"
)
// @title StrafesNET Maps API
// @version 1.0
// @description Obtain an api key at https://dev.strafes.net
// @description Requires Maps:Read permission
// @BasePath /public-api/v1
// @securityDefinitions.apikey ApiKeyAuth
// @in header
// @name X-API-Key
func main() {
log.SetLevel(log.InfoLevel)
log.SetFormatter(&log.JSONFormatter{})
@@ -25,8 +15,6 @@ func main() {
app := cmds.NewApp()
app.Commands = []*cli.Command{
cmds.NewServeCommand(),
cmds.NewApiCommand(),
cmds.NewAORCommand(),
}
if err := app.Run(os.Args); err != nil {

View File

@@ -13,7 +13,7 @@ services:
submissions:
image:
maptest-api
maps-service-submissions
container_name: submissions
command: [
# debug
@@ -34,7 +34,7 @@ services:
"--data-rpc-host","dataservice:9000",
]
env_file:
- /home/quat/auth-compose/strafesnet_staging.env
- ../auth-compose/strafesnet_staging.env
depends_on:
- authrpc
- nats
@@ -45,7 +45,7 @@ services:
web:
image:
maptest-frontend
maps-service-web
networks:
- maps-service-network
ports:
@@ -56,16 +56,14 @@ services:
validation:
image:
maptest-validator
maps-service-validation
container_name: validation
env_file:
- /home/quat/auth-compose/strafesnet_staging.env
- ../auth-compose/strafesnet_staging.env
environment:
- ROBLOX_GROUP_ID=17032139 # "None" is special case string value
- API_HOST_INTERNAL=http://submissions:8083/v1
- NATS_HOST=nats:4222
- LOAD_ASSET_VERSION_PLACE_ID=14001440964
- LOAD_ASSET_VERSION_UNIVERSE_ID=4850603885
depends_on:
- nats
# note: this races the submissions which creates a nats stream
@@ -105,7 +103,7 @@ services:
- REDIS_ADDR=authredis:6379
- RBX_GROUP_ID=17032139
env_file:
- /home/quat/auth-compose/auth-service.env
- ../auth-compose/auth-service.env
depends_on:
- authredis
networks:
@@ -119,7 +117,7 @@ services:
environment:
- REDIS_ADDR=authredis:6379
env_file:
- /home/quat/auth-compose/auth-service.env
- ../auth-compose/auth-service.env
depends_on:
- authredis
networks:

View File

@@ -1,242 +0,0 @@
// Package docs Code generated by swaggo/swag. DO NOT EDIT
package docs
import "github.com/swaggo/swag"
const docTemplate = `{
"schemes": {{ marshal .Schemes }},
"swagger": "2.0",
"info": {
"description": "{{escape .Description}}",
"title": "{{.Title}}",
"contact": {},
"version": "{{.Version}}"
},
"host": "{{.Host}}",
"basePath": "{{.BasePath}}",
"paths": {
"/map": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Get a list of maps",
"produces": [
"application/json"
],
"tags": [
"maps"
],
"summary": "List maps",
"parameters": [
{
"maximum": 100,
"minimum": 1,
"type": "integer",
"default": 10,
"description": "Page size (max 100)",
"name": "page_size",
"in": "query"
},
{
"minimum": 1,
"type": "integer",
"default": 1,
"description": "Page number",
"name": "page_number",
"in": "query"
},
{
"type": "integer",
"name": "game_id",
"in": "query"
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/PagedResponse-Map"
}
},
"default": {
"description": "General error response",
"schema": {
"$ref": "#/definitions/Error"
}
}
}
}
},
"/map/{id}": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Get a specific map by its ID",
"produces": [
"application/json"
],
"tags": [
"maps"
],
"summary": "Get map by ID",
"parameters": [
{
"type": "integer",
"description": "Map ID",
"name": "id",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/Response-Map"
}
},
"404": {
"description": "Map not found",
"schema": {
"$ref": "#/definitions/Error"
}
},
"default": {
"description": "General error response",
"schema": {
"$ref": "#/definitions/Error"
}
}
}
}
}
},
"definitions": {
"Error": {
"type": "object",
"properties": {
"error": {
"type": "string"
}
}
},
"Map": {
"type": "object",
"properties": {
"asset_version": {
"type": "integer"
},
"created_at": {
"type": "string"
},
"creator": {
"type": "string"
},
"date": {
"type": "string"
},
"display_name": {
"type": "string"
},
"game_id": {
"type": "integer"
},
"id": {
"type": "integer"
},
"load_count": {
"type": "integer"
},
"modes": {
"type": "integer"
},
"submitter": {
"type": "integer"
},
"thumbnail": {
"type": "integer"
},
"updated_at": {
"type": "string"
}
}
},
"PagedResponse-Map": {
"type": "object",
"properties": {
"data": {
"description": "Data contains the actual response payload",
"type": "array",
"items": {
"$ref": "#/definitions/Map"
}
},
"pagination": {
"description": "Pagination contains information about paging",
"allOf": [
{
"$ref": "#/definitions/Pagination"
}
]
}
}
},
"Pagination": {
"type": "object",
"properties": {
"page": {
"description": "Current page number",
"type": "integer"
},
"page_size": {
"description": "Number of items per page",
"type": "integer"
}
}
},
"Response-Map": {
"type": "object",
"properties": {
"data": {
"description": "Data contains the actual response payload",
"allOf": [
{
"$ref": "#/definitions/Map"
}
]
}
}
}
},
"securityDefinitions": {
"ApiKeyAuth": {
"type": "apiKey",
"name": "X-API-Key",
"in": "header"
}
}
}`
// SwaggerInfo holds exported Swagger Info so clients can modify it
var SwaggerInfo = &swag.Spec{
Version: "1.0",
Host: "",
BasePath: "/public-api/v1",
Schemes: []string{},
Title: "StrafesNET Maps API",
Description: "Obtain an api key at https://dev.strafes.net\nRequires Maps:Read permission",
InfoInstanceName: "swagger",
SwaggerTemplate: docTemplate,
LeftDelim: "{{",
RightDelim: "}}",
}
func init() {
swag.Register(SwaggerInfo.InstanceName(), SwaggerInfo)
}

View File

@@ -1,217 +0,0 @@
{
"swagger": "2.0",
"info": {
"description": "Obtain an api key at https://dev.strafes.net\nRequires Maps:Read permission",
"title": "StrafesNET Maps API",
"contact": {},
"version": "1.0"
},
"basePath": "/public-api/v1",
"paths": {
"/map": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Get a list of maps",
"produces": [
"application/json"
],
"tags": [
"maps"
],
"summary": "List maps",
"parameters": [
{
"maximum": 100,
"minimum": 1,
"type": "integer",
"default": 10,
"description": "Page size (max 100)",
"name": "page_size",
"in": "query"
},
{
"minimum": 1,
"type": "integer",
"default": 1,
"description": "Page number",
"name": "page_number",
"in": "query"
},
{
"type": "integer",
"name": "game_id",
"in": "query"
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/PagedResponse-Map"
}
},
"default": {
"description": "General error response",
"schema": {
"$ref": "#/definitions/Error"
}
}
}
}
},
"/map/{id}": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Get a specific map by its ID",
"produces": [
"application/json"
],
"tags": [
"maps"
],
"summary": "Get map by ID",
"parameters": [
{
"type": "integer",
"description": "Map ID",
"name": "id",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/Response-Map"
}
},
"404": {
"description": "Map not found",
"schema": {
"$ref": "#/definitions/Error"
}
},
"default": {
"description": "General error response",
"schema": {
"$ref": "#/definitions/Error"
}
}
}
}
}
},
"definitions": {
"Error": {
"type": "object",
"properties": {
"error": {
"type": "string"
}
}
},
"Map": {
"type": "object",
"properties": {
"asset_version": {
"type": "integer"
},
"created_at": {
"type": "string"
},
"creator": {
"type": "string"
},
"date": {
"type": "string"
},
"display_name": {
"type": "string"
},
"game_id": {
"type": "integer"
},
"id": {
"type": "integer"
},
"load_count": {
"type": "integer"
},
"modes": {
"type": "integer"
},
"submitter": {
"type": "integer"
},
"thumbnail": {
"type": "integer"
},
"updated_at": {
"type": "string"
}
}
},
"PagedResponse-Map": {
"type": "object",
"properties": {
"data": {
"description": "Data contains the actual response payload",
"type": "array",
"items": {
"$ref": "#/definitions/Map"
}
},
"pagination": {
"description": "Pagination contains information about paging",
"allOf": [
{
"$ref": "#/definitions/Pagination"
}
]
}
}
},
"Pagination": {
"type": "object",
"properties": {
"page": {
"description": "Current page number",
"type": "integer"
},
"page_size": {
"description": "Number of items per page",
"type": "integer"
}
}
},
"Response-Map": {
"type": "object",
"properties": {
"data": {
"description": "Data contains the actual response payload",
"allOf": [
{
"$ref": "#/definitions/Map"
}
]
}
}
}
},
"securityDefinitions": {
"ApiKeyAuth": {
"type": "apiKey",
"name": "X-API-Key",
"in": "header"
}
}
}

View File

@@ -1,141 +0,0 @@
basePath: /public-api/v1
definitions:
Error:
properties:
error:
type: string
type: object
Map:
properties:
asset_version:
type: integer
created_at:
type: string
creator:
type: string
date:
type: string
display_name:
type: string
game_id:
type: integer
id:
type: integer
load_count:
type: integer
modes:
type: integer
submitter:
type: integer
thumbnail:
type: integer
updated_at:
type: string
type: object
PagedResponse-Map:
properties:
data:
description: Data contains the actual response payload
items:
$ref: '#/definitions/Map'
type: array
pagination:
allOf:
- $ref: '#/definitions/Pagination'
description: Pagination contains information about paging
type: object
Pagination:
properties:
page:
description: Current page number
type: integer
page_size:
description: Number of items per page
type: integer
type: object
Response-Map:
properties:
data:
allOf:
- $ref: '#/definitions/Map'
description: Data contains the actual response payload
type: object
info:
contact: {}
description: |-
Obtain an api key at https://dev.strafes.net
Requires Maps:Read permission
title: StrafesNET Maps API
version: "1.0"
paths:
/map:
get:
description: Get a list of maps
parameters:
- default: 10
description: Page size (max 100)
in: query
maximum: 100
minimum: 1
name: page_size
type: integer
- default: 1
description: Page number
in: query
minimum: 1
name: page_number
type: integer
- in: query
name: game_id
type: integer
produces:
- application/json
responses:
"200":
description: OK
schema:
$ref: '#/definitions/PagedResponse-Map'
default:
description: General error response
schema:
$ref: '#/definitions/Error'
security:
- ApiKeyAuth: []
summary: List maps
tags:
- maps
/map/{id}:
get:
description: Get a specific map by its ID
parameters:
- description: Map ID
in: path
name: id
required: true
type: integer
produces:
- application/json
responses:
"200":
description: OK
schema:
$ref: '#/definitions/Response-Map'
"404":
description: Map not found
schema:
$ref: '#/definitions/Error'
default:
description: General error response
schema:
$ref: '#/definitions/Error'
security:
- ApiKeyAuth: []
summary: Get map by ID
tags:
- maps
securityDefinitions:
ApiKeyAuth:
in: header
name: X-API-Key
type: apiKey
swagger: "2.0"

View File

@@ -1,4 +1,3 @@
package main
//go:generate swag init -g ./cmd/maps-service/service.go
//go:generate go run github.com/ogen-go/ogen/cmd/ogen@latest --target pkg/api --clean openapi.yaml

97
go.mod
View File

@@ -1,105 +1,64 @@
module git.itzana.me/strafesnet/maps-service
go 1.24.0
go 1.22
toolchain go1.24.5
toolchain go1.23.3
require (
git.itzana.me/StrafesNET/dev-service v0.0.0-20250628052121-92af8193b5ed
git.itzana.me/strafesnet/go-grpc v0.0.0-20250815013325-1c84f73bdcb1
git.itzana.me/strafesnet/go-grpc v0.0.0-20250724030029-845bea991815
git.itzana.me/strafesnet/utils v0.0.0-20220716194944-d8ca164052f9
github.com/dchest/siphash v1.2.3
github.com/gin-gonic/gin v1.10.1
github.com/go-faster/errors v0.7.1
github.com/go-faster/jx v1.2.0
github.com/go-faster/jx v1.1.0
github.com/nats-io/nats.go v1.37.0
github.com/ogen-go/ogen v1.18.0
github.com/redis/go-redis/v9 v9.10.0
github.com/ogen-go/ogen v1.2.1
github.com/sirupsen/logrus v1.9.3
github.com/swaggo/files v1.0.1
github.com/swaggo/gin-swagger v1.6.0
github.com/swaggo/swag v1.16.6
github.com/urfave/cli/v2 v2.27.6
go.opentelemetry.io/otel v1.39.0
go.opentelemetry.io/otel/metric v1.39.0
go.opentelemetry.io/otel/trace v1.39.0
github.com/urfave/cli/v2 v2.27.5
go.opentelemetry.io/otel v1.32.0
go.opentelemetry.io/otel/metric v1.32.0
go.opentelemetry.io/otel/trace v1.32.0
google.golang.org/grpc v1.48.0
gorm.io/driver/postgres v1.6.0
gorm.io/gorm v1.25.12
gorm.io/driver/postgres v1.5.10
gorm.io/gorm v1.25.10
)
require (
github.com/KyleBanks/depth v1.2.1 // indirect
github.com/PuerkitoBio/purell v1.1.1 // indirect
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect
github.com/bytedance/sonic v1.11.6 // indirect
github.com/bytedance/sonic/loader v0.1.1 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/cloudwego/base64x v0.1.4 // indirect
github.com/cloudwego/iasm v0.2.0 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/gabriel-vasile/mimetype v1.4.3 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect
github.com/go-openapi/jsonpointer v0.19.5 // indirect
github.com/go-openapi/jsonreference v0.19.6 // indirect
github.com/go-openapi/spec v0.20.4 // indirect
github.com/go-openapi/swag v0.19.15 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.20.0 // indirect
github.com/goccy/go-json v0.10.2 // indirect
github.com/golang/protobuf v1.5.2 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
github.com/jackc/pgx/v5 v5.6.0 // indirect
github.com/jackc/puddle/v2 v2.2.2 // indirect
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect
github.com/jackc/pgx/v5 v5.5.5 // indirect
github.com/jackc/puddle/v2 v2.2.1 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.18.1 // indirect
github.com/klauspost/cpuid/v2 v2.2.7 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/mailru/easyjson v0.7.6 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/klauspost/compress v1.17.6 // indirect
github.com/nats-io/nkeys v0.4.7 // indirect
github.com/nats-io/nuid v1.0.1 // indirect
github.com/pelletier/go-toml/v2 v2.2.2 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/shopspring/decimal v1.4.0 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.12 // indirect
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
golang.org/x/arch v0.8.0 // indirect
golang.org/x/crypto v0.46.0 // indirect
golang.org/x/mod v0.31.0 // indirect
golang.org/x/tools v0.40.0 // indirect
golang.org/x/crypto v0.23.0 // indirect
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013 // indirect
google.golang.org/protobuf v1.34.1 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
google.golang.org/protobuf v1.28.0 // indirect
)
require (
github.com/dlclark/regexp2 v1.11.5 // indirect
github.com/fatih/color v1.18.0 // indirect
github.com/dlclark/regexp2 v1.11.0 // indirect
github.com/fatih/color v1.17.0 // indirect
github.com/ghodss/yaml v1.0.0 // indirect
github.com/go-faster/yaml v0.4.6 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/logr v1.4.2 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
// github.com/golang/protobuf v1.5.4 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/segmentio/asm v1.2.1 // indirect
github.com/segmentio/asm v1.2.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
go.uber.org/zap v1.27.1 // indirect
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93 // indirect
golang.org/x/net v0.48.0 // indirect
golang.org/x/sync v0.19.0 // indirect
golang.org/x/sys v0.39.0 // indirect
golang.org/x/text v0.32.0 // indirect
go.uber.org/zap v1.27.0 // indirect
golang.org/x/exp v0.0.0-20240531132922-fd00a4e0eefc // indirect
golang.org/x/net v0.25.0 // indirect
golang.org/x/sync v0.7.0 // indirect
golang.org/x/sys v0.20.0 // indirect
golang.org/x/text v0.15.0 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
)

250
go.sum
View File

@@ -1,36 +1,14 @@
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
git.itzana.me/StrafesNET/dev-service v0.0.0-20250628052121-92af8193b5ed h1:eGWIQx2AOrSsLC2dieuSs8MCliRE60tvpZnmxsTBtKc=
git.itzana.me/StrafesNET/dev-service v0.0.0-20250628052121-92af8193b5ed/go.mod h1:KJal0K++M6HEzSry6JJ2iDPZtOQn5zSstNlDbU3X4Jg=
git.itzana.me/strafesnet/go-grpc v0.0.0-20250815013325-1c84f73bdcb1 h1:imXibfeYcae6og0TTDUFRQ3CQtstGjIoLbCn+pezD2o=
git.itzana.me/strafesnet/go-grpc v0.0.0-20250815013325-1c84f73bdcb1/go.mod h1:X7XTRUScRkBWq8q8bplbeso105RPDlnY7J6Wy1IwBMs=
git.itzana.me/strafesnet/go-grpc v0.0.0-20250724030029-845bea991815 h1:hkuOnehphRXUq/2z2UYgoqTq5MJj1GsWfshyc7bXda8=
git.itzana.me/strafesnet/go-grpc v0.0.0-20250724030029-845bea991815/go.mod h1:X7XTRUScRkBWq8q8bplbeso105RPDlnY7J6Wy1IwBMs=
git.itzana.me/strafesnet/utils v0.0.0-20220716194944-d8ca164052f9 h1:7lU6jyR7S7Rhh1dnUp7GyIRHUTBXZagw8F4n4hOyxLw=
git.itzana.me/strafesnet/utils v0.0.0-20220716194944-d8ca164052f9/go.mod h1:uyYerSieEt4v0MJCdPLppG0LtJ4Yj035vuTetWGsxjY=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc=
github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE=
github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI=
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs=
github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c=
github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA=
github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0=
github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0=
github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4=
github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM=
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y=
github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg=
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI=
@@ -39,18 +17,13 @@ github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWH
github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cpuguy83/go-md2man/v2 v2.0.5 h1:ZtcqGrnekaHpVLArFSe4HK5DoKx1T0rq2DwVB0alcyc=
github.com/cpuguy83/go-md2man/v2 v2.0.5/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dchest/siphash v1.2.3 h1:QXwFc8cFOR2dSa/gE6o/HokBMWtLUaNDVd+22aKHeEA=
github.com/dchest/siphash v1.2.3/go.mod h1:0NvQU092bT0ipiFN++/rXm69QG9tVxLAlQHIXMPAkHc=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI=
github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZQ=
github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
@@ -59,51 +32,19 @@ github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4=
github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI=
github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/gin-contrib/gzip v0.0.6 h1:NjcunTcGAj5CO1gn4N8jHOSIeRFHIbn51z6K+xaN4d4=
github.com/gin-contrib/gzip v0.0.6/go.mod h1:QOJlmV2xmayAjkNS2Y8NQsMneuRShOU/kjovCXNuzzk=
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
github.com/gin-gonic/gin v1.10.1 h1:T0ujvqyCSqRopADpgPgiTT63DUQVSfojyME59Ei63pQ=
github.com/gin-gonic/gin v1.10.1/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
github.com/go-faster/errors v0.7.1 h1:MkJTnDoEdi9pDabt1dpWf7AA8/BaSYZqibYyhZ20AYg=
github.com/go-faster/errors v0.7.1/go.mod h1:5ySTjWFiphBs07IKuiL69nxdfd5+fzh1u7FPGZP2quo=
github.com/go-faster/jx v1.1.0 h1:ZsW3wD+snOdmTDy9eIVgQdjUpXRRV4rqW8NS3t+20bg=
github.com/go-faster/jx v1.1.0/go.mod h1:vKDNikrKoyUmpzaJ0OkIkRQClNHFX/nF3dnTJZb3skg=
github.com/go-faster/jx v1.2.0 h1:T2YHJPrFaYu21fJtUxC9GzmluKu8rVIFDwwGBKTDseI=
github.com/go-faster/jx v1.2.0/go.mod h1:UWLOVDmMG597a5tBFPLIWJdUxz5/2emOpfsj9Neg0PE=
github.com/go-faster/yaml v0.4.6 h1:lOK/EhI04gCpPgPhgt0bChS6bvw7G3WwI8xxVe0sw9I=
github.com/go-faster/yaml v0.4.6/go.mod h1:390dRIvV4zbnO7qC9FGo6YYutc+wyyUSHBgbXL52eXk=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY=
github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
github.com/go-openapi/jsonreference v0.19.6 h1:UBIxjkht+AWIgYzCDSv2GN+E/togfwXUJFRTWhl2Jjs=
github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns=
github.com/go-openapi/spec v0.20.4 h1:O8hJrt0UMnhHcluhIdUgCLRWyM2x7QkBXRvOs7m+O1M=
github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I=
github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyrCM=
github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8=
github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
@@ -127,9 +68,8 @@ github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
@@ -137,125 +77,66 @@ github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
github.com/jackc/pgx/v5 v5.6.0 h1:SWJzexBzPL5jb0GEsrPMLIsi/3jOo7RHlzTjcAeDrPY=
github.com/jackc/pgx/v5 v5.6.0/go.mod h1:DNZ/vlrUnhWCoFGxHAG8U2ljioxukquj7utPDgtQdTw=
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
github.com/jackc/pgx/v5 v5.5.5 h1:amBjrZVmksIdNjxGW/IiIMzxMKZFelXbUoPNb+8sjQw=
github.com/jackc/pgx/v5 v5.5.5/go.mod h1:ez9gk+OAat140fv9ErkZDYFWmXLfV+++K0uAOiwgm1A=
github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk=
github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
github.com/jinzhu/now v1.1.2/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/klauspost/compress v1.17.6 h1:60eq2E/jlfwQXtvZEeBUYADs+BwKBWURIY+Gj2eRGjI=
github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
github.com/klauspost/compress v1.18.1 h1:bcSGx7UbpBqMChDtsF28Lw6v/G94LPrrbMbdC3JH2co=
github.com/klauspost/compress v1.18.1/go.mod h1:ZQFFVG+MdnR0P+l6wpXgIL4NTtwiKIdBnrBd8Nrxr+0=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM=
github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA=
github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/nats-io/nats.go v1.37.0 h1:07rauXbVnnJvv1gfIyghFEo6lUcYRY0WXc3x7x0vUxE=
github.com/nats-io/nats.go v1.37.0/go.mod h1:Ubdu4Nh9exXdSz0RVWRFBbRfrbSxOYd26oF0wkWclB8=
github.com/nats-io/nkeys v0.4.7 h1:RwNJbbIdYCoClSDNY7QVKZlyb/wfT6ugvFCiKy6vDvI=
github.com/nats-io/nkeys v0.4.7/go.mod h1:kqXRgRDPlGy7nGaEDMuYzmiJCIAAWDK0IMBtDmGD0nc=
github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw=
github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/ogen-go/ogen v1.2.1 h1:C5A0lvUMu2wl+eWIxnpXMWnuOJ26a2FyzR1CIC2qG0M=
github.com/ogen-go/ogen v1.2.1/go.mod h1:P2zQdEu8UqaVRfD5GEFvl+9q63VjMLvDquq1wVbyInM=
github.com/ogen-go/ogen v1.18.0 h1:6RQ7lFBjOeNaUWu4getfqIh4GJbEY4hqKuzDtec/g60=
github.com/ogen-go/ogen v1.18.0/go.mod h1:dHFr2Wf6cA7tSxMI+zPC21UR5hAlDw8ZYUkK3PziURY=
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/redis/go-redis/v9 v9.10.0 h1:FxwK3eV8p/CQa0Ch276C7u2d0eNC9kCmAYQ7mCXCzVs=
github.com/redis/go-redis/v9 v9.10.0/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw=
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ=
github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=
github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
github.com/segmentio/asm v1.2.1 h1:DTNbBqs57ioxAD4PrArqftgypG4/qNpXoJx8TVXxPR0=
github.com/segmentio/asm v1.2.1/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/swaggo/files v1.0.1 h1:J1bVJ4XHZNq0I46UU90611i9/YzdrF7x92oX1ig5IdE=
github.com/swaggo/files v1.0.1/go.mod h1:0qXmMNH6sXNf+73t65aKeB+ApmgxdnkQzVTAj2uaMUg=
github.com/swaggo/gin-swagger v1.6.0 h1:y8sxvQ3E20/RCyrXeFfg60r6H0Z+SwpTjMYsMm+zy8M=
github.com/swaggo/gin-swagger v1.6.0/go.mod h1:BG00cCEy294xtVpyIAHG6+e2Qzj/xKlRdOqDkvq0uzo=
github.com/swaggo/swag v1.16.6 h1:qBNcx53ZaX+M5dxVyTrgQ0PJ/ACK+NzhwcbieTt+9yI=
github.com/swaggo/swag v1.16.6/go.mod h1:ngP2etMK5a0P3QBizic5MEwpRmluJZPHjXcMoj4Xesg=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
github.com/urfave/cli/v2 v2.27.6 h1:VdRdS98FNhKZ8/Az8B7MTyGQmpIr36O1EHybx/LaZ4g=
github.com/urfave/cli/v2 v2.27.6/go.mod h1:3Sevf16NykTbInEnD0yKkjDAeZDS0A6bzhBH5hrMvTQ=
github.com/urfave/cli/v2 v2.27.5 h1:WoHEJLdsXr6dDWoJgMq/CboDmyY/8HMMH1fTECbih+w=
github.com/urfave/cli/v2 v2.27.5/go.mod h1:3Sevf16NykTbInEnD0yKkjDAeZDS0A6bzhBH5hrMvTQ=
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 h1:gEOO8jv9F4OT7lGCjxCBTO/36wtF6j2nSip77qHd4x4=
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64=
go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48=
go.opentelemetry.io/otel v1.39.0/go.mod h1:kLlFTywNWrFyEdH0oj2xK0bFYZtHRYUdv1NklR/tgc8=
go.opentelemetry.io/otel/metric v1.39.0 h1:d1UzonvEZriVfpNKEVmHXbdf909uGTOQjA0HF0Ls5Q0=
go.opentelemetry.io/otel/metric v1.39.0/go.mod h1:jrZSWL33sD7bBxg1xjrqyDjnuzTUB0x1nBERXd7Ftcs=
go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI=
go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA=
go.opentelemetry.io/otel v1.32.0 h1:WnBN+Xjcteh0zdk01SVqV55d/m62NJLJdIyb4y/WO5U=
go.opentelemetry.io/otel v1.32.0/go.mod h1:00DCVSB0RQcnzlwyTfqtxSm+DRr9hpYrHjNGiBHVQIg=
go.opentelemetry.io/otel/metric v1.32.0 h1:xV2umtmNcThh2/a/aCP+h64Xx5wsj8qqnkYZktzNa0M=
go.opentelemetry.io/otel/metric v1.32.0/go.mod h1:jH7CIbbK6SH2V2wE16W05BHCtIDzauciCRLoc/SyMv8=
go.opentelemetry.io/otel/trace v1.32.0 h1:WIC9mYrXf8TmY/EXuULKc8hR17vE+Hjv2cssQDe03fM=
go.opentelemetry.io/otel/trace v1.32.0/go.mod h1:+i4rkvCraA+tG6AzwloGaCtkx53Fa+L+V8e9a7YvhT8=
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
@@ -263,103 +144,55 @@ go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8=
go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
go.uber.org/zap v1.27.1 h1:08RqriUEv8+ArZRYSTXy1LeBScaMpVSTBhCeaZYfMYc=
go.uber.org/zap v1.27.1/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc=
golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc=
golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc=
golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU=
golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0=
golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20240531132922-fd00a4e0eefc h1:O9NuF4s+E/PvMIy+9IUZB9znFwUIXEWSstNjek6VpVg=
golang.org/x/exp v0.0.0-20240531132922-fd00a4e0eefc/go.mod h1:XtvwrStGgqGPLc4cjQfWqZHG1YFdYs6swckp8vpsjnc=
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93 h1:fQsdNF2N+/YewlRZiricy4P1iimyPKZ/xwniHj8Q2a0=
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93/go.mod h1:EPRbTFwzwjXj9NpYyyrvenVh9Y+GFeEvMNh7Xuz7xgU=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI=
golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU=
golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY=
golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw=
golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk=
golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU=
golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY=
golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
@@ -389,11 +222,9 @@ google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlba
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg=
google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw=
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
@@ -401,15 +232,12 @@ gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gorm.io/driver/postgres v1.6.0 h1:2dxzU8xJ+ivvqTRph34QX+WrRaJlmfyPqXmoGVjMBa4=
gorm.io/driver/postgres v1.6.0/go.mod h1:vUw0mrGgrTK+uPHEhAdV4sfFELrByKVGnaVRkXDhtWo=
gorm.io/driver/postgres v1.5.10 h1:7Lggqempgy496c0WfHXsYWxk3Th+ZcW66/21QhVFdeE=
gorm.io/driver/postgres v1.5.10/go.mod h1:DX3GReXH+3FPWGrrgffdvCk3DQ1dwDPdmbenSkweRGI=
gorm.io/gorm v1.21.11/go.mod h1:F+OptMscr0P2F2qU97WT1WimdH9GaQPoDW7AYd5i2Y0=
gorm.io/gorm v1.25.12 h1:I0u8i2hWQItBq1WfE0o2+WuL9+8L21K9e2HHSTE/0f8=
gorm.io/gorm v1.25.12/go.mod h1:xh7N7RHfYlNc5EmcI/El95gXusucDrQnHXe0+CgWcLQ=
gorm.io/gorm v1.25.10 h1:dQpO+33KalOA+aFYGlK+EfxcI5MbO7EP2yYygwh9h+s=
gorm.io/gorm v1.25.10/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=

View File

@@ -1,493 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Configuration
CLUSTER_NAME="${KIND_CLUSTER_NAME:-maps-service-local}"
INFRA_PATH="${INFRA_PATH:-$HOME/Documents/Projects/infra}"
NAMESPACE="${NAMESPACE:-default}"
REGISTRY_NAME="kind-registry"
REGISTRY_PORT="5001"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
log_info() {
echo -e "${GREEN}[INFO]${NC} $1"
}
log_warn() {
echo -e "${YELLOW}[WARN]${NC} $1"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
# Check dependencies
check_dependencies() {
log_info "Checking dependencies..."
local deps=("kind" "kubectl" "docker")
for dep in "${deps[@]}"; do
if ! command -v "$dep" &> /dev/null; then
log_error "$dep is not installed. Please install it first."
exit 1
fi
done
log_info "All dependencies are installed"
}
# Create local container registry
create_registry() {
if [ "$(docker ps -q -f name=${REGISTRY_NAME})" ]; then
log_info "Registry ${REGISTRY_NAME} already running"
return 0
fi
if [ "$(docker ps -aq -f name=${REGISTRY_NAME})" ]; then
log_info "Starting existing registry ${REGISTRY_NAME}"
docker start ${REGISTRY_NAME}
return 0
fi
log_info "Creating local registry ${REGISTRY_NAME}..."
docker run -d --restart=always -p "127.0.0.1:${REGISTRY_PORT}:5000" --name "${REGISTRY_NAME}" registry:2
}
# Create KIND cluster with registry
create_cluster() {
if kind get clusters | grep -q "^${CLUSTER_NAME}$"; then
log_warn "Cluster ${CLUSTER_NAME} already exists"
read -p "Do you want to delete and recreate it? (y/N): " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
log_info "Deleting existing cluster..."
kind delete cluster --name "${CLUSTER_NAME}"
else
log_info "Using existing cluster"
kubectl config use-context "kind-${CLUSTER_NAME}"
return 0
fi
fi
log_info "Creating KIND cluster ${CLUSTER_NAME}..."
cat <<EOF | kind create cluster --name "${CLUSTER_NAME}" --config=-
kind: Cluster
apiVersion: kind.x-k8s.io/v1alpha4
containerdConfigPatches:
- |-
[plugins."io.containerd.grpc.v1.cri".registry.mirrors."localhost:${REGISTRY_PORT}"]
endpoint = ["http://${REGISTRY_NAME}:5000"]
nodes:
- role: control-plane
kubeadmConfigPatches:
- |
kind: InitConfiguration
nodeRegistration:
kubeletExtraArgs:
node-labels: "ingress-ready=true"
extraPortMappings:
- containerPort: 80
hostPort: 80
protocol: TCP
- containerPort: 443
hostPort: 443
protocol: TCP
- containerPort: 8080
hostPort: 8080
protocol: TCP
- containerPort: 3000
hostPort: 3000
protocol: TCP
EOF
# Connect the registry to the cluster network
if [ "$(docker inspect -f='{{json .NetworkSettings.Networks.kind}}' "${REGISTRY_NAME}")" = 'null' ]; then
log_info "Connecting registry to cluster network..."
docker network connect "kind" "${REGISTRY_NAME}"
fi
# Document the local registry
kubectl apply -f - <<EOF
apiVersion: v1
kind: ConfigMap
metadata:
name: local-registry-hosting
namespace: kube-public
data:
localRegistryHosting.v1: |
host: "localhost:${REGISTRY_PORT}"
help: "https://kind.sigs.k8s.io/docs/user/local-registry/"
EOF
log_info "KIND cluster created successfully"
}
# Build Docker images
build_images() {
log_info "Building Docker images..."
log_info "Building backend..."
make build-backend
docker build -t localhost:${REGISTRY_PORT}/maptest-api:local .
docker push localhost:${REGISTRY_PORT}/maptest-api:local
log_info "Building validator..."
make build-validator
docker build -f validation/Containerfile -t localhost:${REGISTRY_PORT}/maptest-validator:local .
docker push localhost:${REGISTRY_PORT}/maptest-validator:local
log_info "Building frontend..."
docker build web -f web/Containerfile -t localhost:${REGISTRY_PORT}/maptest-frontend:local .
docker push localhost:${REGISTRY_PORT}/maptest-frontend:local
log_info "All images built and pushed to local registry"
}
# Create secrets
create_secrets() {
log_info "Creating Kubernetes secrets..."
# Create dummy secrets for local development
kubectl create secret generic cockroach-qtdb \
--from-literal=HOST=data-postgres \
--from-literal=PORT=5432 \
--from-literal=USER=postgres \
--from-literal=PASS=localpassword \
--dry-run=client -o yaml | kubectl apply -f -
kubectl create secret generic maptest-cookie \
--from-literal=api=dummy-api-key \
--dry-run=client -o yaml | kubectl apply -f -
kubectl create secret generic auth-service-secrets \
--from-literal=DISCORD_CLIENT_ID=dummy \
--from-literal=DISCORD_CLIENT_SECRET=dummy \
--from-literal=RBX_API_KEY=dummy \
--dry-run=client -o yaml | kubectl apply -f -
log_info "Secrets created"
}
# Deploy dependencies
deploy_dependencies() {
log_info "Deploying dependencies..."
# Deploy PostgreSQL (manual deployment)
log_info "Deploying PostgreSQL..."
kubectl apply -f - <<EOF
apiVersion: v1
kind: Service
metadata:
name: data-postgres
spec:
ports:
- port: 5432
targetPort: 5432
selector:
app: data-postgres
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: data-postgres
spec:
replicas: 1
selector:
matchLabels:
app: data-postgres
template:
metadata:
labels:
app: data-postgres
spec:
containers:
- name: postgres
image: postgres:15
ports:
- containerPort: 5432
env:
- name: POSTGRES_USER
value: postgres
- name: POSTGRES_PASSWORD
value: localpassword
- name: POSTGRES_DB
value: postgres
volumeMounts:
- name: postgres-storage
mountPath: /var/lib/postgresql/data
volumes:
- name: postgres-storage
emptyDir: {}
EOF
# Deploy Redis (using a simple deployment)
log_info "Deploying Redis..."
kubectl apply -f - <<EOF
apiVersion: v1
kind: Service
metadata:
name: redis-master
spec:
ports:
- port: 6379
targetPort: 6379
selector:
app: redis
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: redis
spec:
replicas: 1
selector:
matchLabels:
app: redis
template:
metadata:
labels:
app: redis
spec:
containers:
- name: redis
image: redis:latest
ports:
- containerPort: 6379
command: ["redis-server", "--appendonly", "yes"]
EOF
# Deploy NATS
log_info "Deploying NATS..."
kubectl apply -f - <<EOF
apiVersion: v1
kind: Service
metadata:
name: nats
spec:
ports:
- port: 4222
targetPort: 4222
selector:
app: nats
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: nats
spec:
replicas: 1
selector:
matchLabels:
app: nats
template:
metadata:
labels:
app: nats
spec:
containers:
- name: nats
image: nats:latest
args: ["-js"]
ports:
- containerPort: 4222
EOF
# Deploy Auth Service (if needed)
if [ -d "${INFRA_PATH}/applications/auth-service/base" ]; then
log_info "Deploying auth-service..."
kubectl apply -k "${INFRA_PATH}/applications/auth-service/base" || log_warn "Auth service deployment failed, continuing..."
fi
# Deploy Data Service (if needed)
if [ -d "${INFRA_PATH}/applications/data-service/base" ]; then
log_info "Deploying data-service..."
kubectl apply -k "${INFRA_PATH}/applications/data-service/base" || log_warn "Data service deployment failed, continuing..."
fi
log_info "Waiting for dependencies to be ready..."
kubectl wait --for=condition=ready pod -l app=data-postgres --timeout=120s || log_warn "PostgreSQL not ready yet"
kubectl wait --for=condition=ready pod -l app=nats --timeout=60s || log_warn "NATS not ready yet"
}
# Deploy maps-service
deploy_maps_service() {
log_info "Deploying maps-service..."
# Create a local overlay for development
local temp_dir=$(mktemp -d)
trap "rm -rf ${temp_dir}" EXIT
cp -r "${INFRA_PATH}/applications/maps-services/base" "${temp_dir}/"
# Create a custom kustomization for local development
cat > "${temp_dir}/base/kustomization.yaml" <<EOF
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
commonLabels:
service: maps-service
resources:
- api.yaml
- configmap.yaml
- frontend.yaml
- validator.yaml
images:
- name: registry.itzana.me/strafesnet/maptest-api
newName: localhost:${REGISTRY_PORT}/maptest-api
newTag: local
- name: registry.itzana.me/strafesnet/maptest-frontend
newName: localhost:${REGISTRY_PORT}/maptest-frontend
newTag: local
- name: registry.itzana.me/strafesnet/maptest-validator
newName: localhost:${REGISTRY_PORT}/maptest-validator
newTag: local
patches:
- target:
kind: Deployment
patch: |-
- op: remove
path: /spec/template/spec/imagePullSecrets
EOF
kubectl apply -k "${temp_dir}/base" || {
log_error "Failed to deploy maps-service"
return 1
}
log_info "Waiting for maps-service to be ready..."
kubectl wait --for=condition=ready pod -l app=maptest-api --timeout=120s || log_warn "API not ready yet"
kubectl wait --for=condition=ready pod -l app=maptest-frontend --timeout=120s || log_warn "Frontend not ready yet"
kubectl wait --for=condition=ready pod -l app=maptest-validator --timeout=120s || log_warn "Validator not ready yet"
}
# Port forwarding
setup_port_forwarding() {
log_info "Setting up port forwarding..."
log_info "Port forwarding for API (8080)..."
kubectl port-forward svc/maptest-api 8080:8080 &
log_info "Port forwarding for Frontend (3000)..."
kubectl port-forward svc/maptest-frontend 3000:3000 &
log_info "Port forwarding setup complete"
log_info "You may need to manually manage these port-forwards or run them in separate terminals"
}
# Display cluster info
display_info() {
log_info "======================================"
log_info "KIND Cluster Setup Complete!"
log_info "======================================"
echo
log_info "Cluster name: ${CLUSTER_NAME}"
log_info "Local registry: localhost:${REGISTRY_PORT}"
echo
log_info "Services:"
kubectl get svc
echo
log_info "Pods:"
kubectl get pods
echo
log_info "Access your application:"
log_info " - Frontend: http://localhost:3000"
log_info " - API: http://localhost:8080"
echo
log_info "Useful commands:"
log_info " - View logs: kubectl logs -f <pod-name>"
log_info " - Get pods: kubectl get pods"
log_info " - Delete cluster: kind delete cluster --name ${CLUSTER_NAME}"
log_info " - Rebuild and redeploy: ./kind-setup.sh --rebuild"
}
# Cleanup function
cleanup() {
log_info "Cleaning up..."
kind delete cluster --name "${CLUSTER_NAME}"
docker stop ${REGISTRY_NAME} && docker rm ${REGISTRY_NAME}
log_info "Cleanup complete"
}
# Main function
main() {
local rebuild=false
local cleanup_only=false
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
--rebuild)
rebuild=true
shift
;;
--cleanup)
cleanup_only=true
shift
;;
--infra-path)
INFRA_PATH="$2"
shift 2
;;
--help)
echo "Usage: $0 [OPTIONS]"
echo "Options:"
echo " --rebuild Rebuild and push Docker images"
echo " --cleanup Delete the cluster and registry"
echo " --infra-path PATH Path to infra directory (default: ~/Documents/Projects/infra)"
echo " --help Show this help message"
exit 0
;;
*)
log_error "Unknown option: $1"
exit 1
;;
esac
done
if [ "$cleanup_only" = true ]; then
cleanup
exit 0
fi
# Validate infra path
if [ ! -d "$INFRA_PATH" ]; then
log_error "Infra path does not exist: $INFRA_PATH"
log_error "Please provide a valid path using --infra-path"
exit 1
fi
if [ ! -d "$INFRA_PATH/applications/maps-services" ]; then
log_error "maps-services not found in infra path: $INFRA_PATH/applications/maps-services"
exit 1
fi
log_info "Using infra path: $INFRA_PATH"
check_dependencies
create_registry
create_cluster
if [ "$rebuild" = true ]; then
build_images
fi
create_secrets
deploy_dependencies
deploy_maps_service
display_info
log_info "Setup complete! Press Ctrl+C to stop port forwarding and exit."
log_warn "Note: You may want to set up port-forwarding manually in separate terminals:"
log_info " kubectl port-forward svc/maptest-api 8080:8080"
log_info " kubectl port-forward svc/maptest-frontend 3000:3000"
}
# Run main function
main "$@"

File diff suppressed because it is too large Load Diff

View File

@@ -5,14 +5,14 @@ package api
import (
"net/http"
"go.opentelemetry.io/otel"
"go.opentelemetry.io/otel/metric"
"go.opentelemetry.io/otel/trace"
ht "github.com/ogen-go/ogen/http"
"github.com/ogen-go/ogen/middleware"
"github.com/ogen-go/ogen/ogenerrors"
"github.com/ogen-go/ogen/otelogen"
"go.opentelemetry.io/otel"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/metric"
"go.opentelemetry.io/otel/trace"
)
var (
@@ -32,7 +32,6 @@ type otelConfig struct {
Tracer trace.Tracer
MeterProvider metric.MeterProvider
Meter metric.Meter
Attributes []attribute.KeyValue
}
func (cfg *otelConfig) initOTEL() {
@@ -216,13 +215,6 @@ func WithMeterProvider(provider metric.MeterProvider) Option {
})
}
// WithAttributes specifies default otel attributes.
func WithAttributes(attributes ...attribute.KeyValue) Option {
return otelOptionFunc(func(cfg *otelConfig) {
cfg.Attributes = attributes
})
}
// WithClient specifies http client to use.
func WithClient(client ht.Client) ClientOption {
return optionFunc[clientConfig](func(cfg *clientConfig) {

File diff suppressed because it is too large Load Diff

View File

@@ -1,19 +0,0 @@
// Code generated by ogen, DO NOT EDIT.
package api
// setDefaults set default value of fields.
func (s *BatchAssetThumbnailsReq) setDefaults() {
{
val := BatchAssetThumbnailsReqSize("420x420")
s.Size.SetTo(val)
}
}
// setDefaults set default value of fields.
func (s *BatchUserThumbnailsReq) setDefaults() {
{
val := BatchUserThumbnailsReqSize("150x150")
s.Size.SetTo(val)
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -12,12 +12,10 @@ const (
ActionMapfixResetSubmittingOperation OperationName = "ActionMapfixResetSubmitting"
ActionMapfixRetryValidateOperation OperationName = "ActionMapfixRetryValidate"
ActionMapfixRevokeOperation OperationName = "ActionMapfixRevoke"
ActionMapfixTriggerReleaseOperation OperationName = "ActionMapfixTriggerRelease"
ActionMapfixTriggerSubmitOperation OperationName = "ActionMapfixTriggerSubmit"
ActionMapfixTriggerSubmitUncheckedOperation OperationName = "ActionMapfixTriggerSubmitUnchecked"
ActionMapfixTriggerUploadOperation OperationName = "ActionMapfixTriggerUpload"
ActionMapfixTriggerValidateOperation OperationName = "ActionMapfixTriggerValidate"
ActionMapfixUploadedOperation OperationName = "ActionMapfixUploaded"
ActionMapfixValidatedOperation OperationName = "ActionMapfixValidated"
ActionSubmissionAcceptedOperation OperationName = "ActionSubmissionAccepted"
ActionSubmissionRejectOperation OperationName = "ActionSubmissionReject"
@@ -30,9 +28,6 @@ const (
ActionSubmissionTriggerUploadOperation OperationName = "ActionSubmissionTriggerUpload"
ActionSubmissionTriggerValidateOperation OperationName = "ActionSubmissionTriggerValidate"
ActionSubmissionValidatedOperation OperationName = "ActionSubmissionValidated"
BatchAssetThumbnailsOperation OperationName = "BatchAssetThumbnails"
BatchUserThumbnailsOperation OperationName = "BatchUserThumbnails"
BatchUsernamesOperation OperationName = "BatchUsernames"
CreateMapfixOperation OperationName = "CreateMapfix"
CreateMapfixAuditCommentOperation OperationName = "CreateMapfixAuditComment"
CreateScriptOperation OperationName = "CreateScript"
@@ -40,41 +35,31 @@ const (
CreateSubmissionOperation OperationName = "CreateSubmission"
CreateSubmissionAdminOperation OperationName = "CreateSubmissionAdmin"
CreateSubmissionAuditCommentOperation OperationName = "CreateSubmissionAuditComment"
CreateSubmissionReviewOperation OperationName = "CreateSubmissionReview"
DeleteScriptOperation OperationName = "DeleteScript"
DeleteScriptPolicyOperation OperationName = "DeleteScriptPolicy"
DownloadMapAssetOperation OperationName = "DownloadMapAsset"
GetAOREventOperation OperationName = "GetAOREvent"
GetAOREventSubmissionsOperation OperationName = "GetAOREventSubmissions"
GetActiveAOREventOperation OperationName = "GetActiveAOREvent"
GetAssetThumbnailOperation OperationName = "GetAssetThumbnail"
GetMapOperation OperationName = "GetMap"
GetMapfixOperation OperationName = "GetMapfix"
GetOperationOperation OperationName = "GetOperation"
GetScriptOperation OperationName = "GetScript"
GetScriptPolicyOperation OperationName = "GetScriptPolicy"
GetStatsOperation OperationName = "GetStats"
GetSubmissionOperation OperationName = "GetSubmission"
GetUserThumbnailOperation OperationName = "GetUserThumbnail"
ListAOREventsOperation OperationName = "ListAOREvents"
ListMapfixAuditEventsOperation OperationName = "ListMapfixAuditEvents"
ListMapfixesOperation OperationName = "ListMapfixes"
ListMapsOperation OperationName = "ListMaps"
ListScriptPolicyOperation OperationName = "ListScriptPolicy"
ListScriptsOperation OperationName = "ListScripts"
ListSubmissionAuditEventsOperation OperationName = "ListSubmissionAuditEvents"
ListSubmissionReviewsOperation OperationName = "ListSubmissionReviews"
ListSubmissionsOperation OperationName = "ListSubmissions"
MigrateMapsOperation OperationName = "MigrateMaps"
ReleaseSubmissionsOperation OperationName = "ReleaseSubmissions"
SessionRolesOperation OperationName = "SessionRoles"
SessionUserOperation OperationName = "SessionUser"
SessionValidateOperation OperationName = "SessionValidate"
SetMapfixCompletedOperation OperationName = "SetMapfixCompleted"
SetSubmissionCompletedOperation OperationName = "SetSubmissionCompleted"
UpdateMapfixDescriptionOperation OperationName = "UpdateMapfixDescription"
UpdateMapfixModelOperation OperationName = "UpdateMapfixModel"
UpdateScriptOperation OperationName = "UpdateScript"
UpdateScriptPolicyOperation OperationName = "UpdateScriptPolicy"
UpdateSubmissionModelOperation OperationName = "UpdateSubmissionModel"
UpdateSubmissionReviewOperation OperationName = "UpdateSubmissionReview"
)

File diff suppressed because it is too large Load Diff

View File

@@ -3,7 +3,6 @@
package api
import (
"bytes"
"fmt"
"io"
"mime"
@@ -11,250 +10,13 @@ import (
"github.com/go-faster/errors"
"github.com/go-faster/jx"
"github.com/ogen-go/ogen/ogenerrors"
"github.com/ogen-go/ogen/validate"
)
func (s *Server) decodeBatchAssetThumbnailsRequest(r *http.Request) (
req *BatchAssetThumbnailsReq,
rawBody []byte,
close func() error,
rerr error,
) {
var closers []func() error
close = func() error {
var merr error
// Close in reverse order, to match defer behavior.
for i := len(closers) - 1; i >= 0; i-- {
c := closers[i]
merr = errors.Join(merr, c())
}
return merr
}
defer func() {
if rerr != nil {
rerr = errors.Join(rerr, close())
}
}()
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
if err != nil {
return req, rawBody, close, errors.Wrap(err, "parse media type")
}
switch {
case ct == "application/json":
if r.ContentLength == 0 {
return req, rawBody, close, validate.ErrBodyRequired
}
buf, err := io.ReadAll(r.Body)
defer func() {
_ = r.Body.Close()
}()
if err != nil {
return req, rawBody, close, err
}
// Reset the body to allow for downstream reading.
r.Body = io.NopCloser(bytes.NewBuffer(buf))
if len(buf) == 0 {
return req, rawBody, close, validate.ErrBodyRequired
}
rawBody = append(rawBody, buf...)
d := jx.DecodeBytes(buf)
var request BatchAssetThumbnailsReq
if err := func() error {
if err := request.Decode(d); err != nil {
return err
}
if err := d.Skip(); err != io.EOF {
return errors.New("unexpected trailing data")
}
return nil
}(); err != nil {
err = &ogenerrors.DecodeBodyError{
ContentType: ct,
Body: buf,
Err: err,
}
return req, rawBody, close, err
}
if err := func() error {
if err := request.Validate(); err != nil {
return err
}
return nil
}(); err != nil {
return req, rawBody, close, errors.Wrap(err, "validate")
}
return &request, rawBody, close, nil
default:
return req, rawBody, close, validate.InvalidContentType(ct)
}
}
func (s *Server) decodeBatchUserThumbnailsRequest(r *http.Request) (
req *BatchUserThumbnailsReq,
rawBody []byte,
close func() error,
rerr error,
) {
var closers []func() error
close = func() error {
var merr error
// Close in reverse order, to match defer behavior.
for i := len(closers) - 1; i >= 0; i-- {
c := closers[i]
merr = errors.Join(merr, c())
}
return merr
}
defer func() {
if rerr != nil {
rerr = errors.Join(rerr, close())
}
}()
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
if err != nil {
return req, rawBody, close, errors.Wrap(err, "parse media type")
}
switch {
case ct == "application/json":
if r.ContentLength == 0 {
return req, rawBody, close, validate.ErrBodyRequired
}
buf, err := io.ReadAll(r.Body)
defer func() {
_ = r.Body.Close()
}()
if err != nil {
return req, rawBody, close, err
}
// Reset the body to allow for downstream reading.
r.Body = io.NopCloser(bytes.NewBuffer(buf))
if len(buf) == 0 {
return req, rawBody, close, validate.ErrBodyRequired
}
rawBody = append(rawBody, buf...)
d := jx.DecodeBytes(buf)
var request BatchUserThumbnailsReq
if err := func() error {
if err := request.Decode(d); err != nil {
return err
}
if err := d.Skip(); err != io.EOF {
return errors.New("unexpected trailing data")
}
return nil
}(); err != nil {
err = &ogenerrors.DecodeBodyError{
ContentType: ct,
Body: buf,
Err: err,
}
return req, rawBody, close, err
}
if err := func() error {
if err := request.Validate(); err != nil {
return err
}
return nil
}(); err != nil {
return req, rawBody, close, errors.Wrap(err, "validate")
}
return &request, rawBody, close, nil
default:
return req, rawBody, close, validate.InvalidContentType(ct)
}
}
func (s *Server) decodeBatchUsernamesRequest(r *http.Request) (
req *BatchUsernamesReq,
rawBody []byte,
close func() error,
rerr error,
) {
var closers []func() error
close = func() error {
var merr error
// Close in reverse order, to match defer behavior.
for i := len(closers) - 1; i >= 0; i-- {
c := closers[i]
merr = errors.Join(merr, c())
}
return merr
}
defer func() {
if rerr != nil {
rerr = errors.Join(rerr, close())
}
}()
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
if err != nil {
return req, rawBody, close, errors.Wrap(err, "parse media type")
}
switch {
case ct == "application/json":
if r.ContentLength == 0 {
return req, rawBody, close, validate.ErrBodyRequired
}
buf, err := io.ReadAll(r.Body)
defer func() {
_ = r.Body.Close()
}()
if err != nil {
return req, rawBody, close, err
}
// Reset the body to allow for downstream reading.
r.Body = io.NopCloser(bytes.NewBuffer(buf))
if len(buf) == 0 {
return req, rawBody, close, validate.ErrBodyRequired
}
rawBody = append(rawBody, buf...)
d := jx.DecodeBytes(buf)
var request BatchUsernamesReq
if err := func() error {
if err := request.Decode(d); err != nil {
return err
}
if err := d.Skip(); err != io.EOF {
return errors.New("unexpected trailing data")
}
return nil
}(); err != nil {
err = &ogenerrors.DecodeBodyError{
ContentType: ct,
Body: buf,
Err: err,
}
return req, rawBody, close, err
}
if err := func() error {
if err := request.Validate(); err != nil {
return err
}
return nil
}(); err != nil {
return req, rawBody, close, errors.Wrap(err, "validate")
}
return &request, rawBody, close, nil
default:
return req, rawBody, close, validate.InvalidContentType(ct)
}
}
func (s *Server) decodeCreateMapfixRequest(r *http.Request) (
req *MapfixTriggerCreate,
rawBody []byte,
close func() error,
rerr error,
) {
@@ -275,29 +37,22 @@ func (s *Server) decodeCreateMapfixRequest(r *http.Request) (
}()
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
if err != nil {
return req, rawBody, close, errors.Wrap(err, "parse media type")
return req, close, errors.Wrap(err, "parse media type")
}
switch {
case ct == "application/json":
if r.ContentLength == 0 {
return req, rawBody, close, validate.ErrBodyRequired
return req, close, validate.ErrBodyRequired
}
buf, err := io.ReadAll(r.Body)
defer func() {
_ = r.Body.Close()
}()
if err != nil {
return req, rawBody, close, err
return req, close, err
}
// Reset the body to allow for downstream reading.
r.Body = io.NopCloser(bytes.NewBuffer(buf))
if len(buf) == 0 {
return req, rawBody, close, validate.ErrBodyRequired
return req, close, validate.ErrBodyRequired
}
rawBody = append(rawBody, buf...)
d := jx.DecodeBytes(buf)
var request MapfixTriggerCreate
@@ -315,7 +70,7 @@ func (s *Server) decodeCreateMapfixRequest(r *http.Request) (
Body: buf,
Err: err,
}
return req, rawBody, close, err
return req, close, err
}
if err := func() error {
if err := request.Validate(); err != nil {
@@ -323,17 +78,16 @@ func (s *Server) decodeCreateMapfixRequest(r *http.Request) (
}
return nil
}(); err != nil {
return req, rawBody, close, errors.Wrap(err, "validate")
return req, close, errors.Wrap(err, "validate")
}
return &request, rawBody, close, nil
return &request, close, nil
default:
return req, rawBody, close, validate.InvalidContentType(ct)
return req, close, validate.InvalidContentType(ct)
}
}
func (s *Server) decodeCreateMapfixAuditCommentRequest(r *http.Request) (
req CreateMapfixAuditCommentReq,
rawBody []byte,
close func() error,
rerr error,
) {
@@ -354,21 +108,20 @@ func (s *Server) decodeCreateMapfixAuditCommentRequest(r *http.Request) (
}()
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
if err != nil {
return req, rawBody, close, errors.Wrap(err, "parse media type")
return req, close, errors.Wrap(err, "parse media type")
}
switch {
case ct == "text/plain":
reader := r.Body
request := CreateMapfixAuditCommentReq{Data: reader}
return request, rawBody, close, nil
return request, close, nil
default:
return req, rawBody, close, validate.InvalidContentType(ct)
return req, close, validate.InvalidContentType(ct)
}
}
func (s *Server) decodeCreateScriptRequest(r *http.Request) (
req *ScriptCreate,
rawBody []byte,
close func() error,
rerr error,
) {
@@ -389,29 +142,22 @@ func (s *Server) decodeCreateScriptRequest(r *http.Request) (
}()
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
if err != nil {
return req, rawBody, close, errors.Wrap(err, "parse media type")
return req, close, errors.Wrap(err, "parse media type")
}
switch {
case ct == "application/json":
if r.ContentLength == 0 {
return req, rawBody, close, validate.ErrBodyRequired
return req, close, validate.ErrBodyRequired
}
buf, err := io.ReadAll(r.Body)
defer func() {
_ = r.Body.Close()
}()
if err != nil {
return req, rawBody, close, err
return req, close, err
}
// Reset the body to allow for downstream reading.
r.Body = io.NopCloser(bytes.NewBuffer(buf))
if len(buf) == 0 {
return req, rawBody, close, validate.ErrBodyRequired
return req, close, validate.ErrBodyRequired
}
rawBody = append(rawBody, buf...)
d := jx.DecodeBytes(buf)
var request ScriptCreate
@@ -429,7 +175,7 @@ func (s *Server) decodeCreateScriptRequest(r *http.Request) (
Body: buf,
Err: err,
}
return req, rawBody, close, err
return req, close, err
}
if err := func() error {
if err := request.Validate(); err != nil {
@@ -437,17 +183,16 @@ func (s *Server) decodeCreateScriptRequest(r *http.Request) (
}
return nil
}(); err != nil {
return req, rawBody, close, errors.Wrap(err, "validate")
return req, close, errors.Wrap(err, "validate")
}
return &request, rawBody, close, nil
return &request, close, nil
default:
return req, rawBody, close, validate.InvalidContentType(ct)
return req, close, validate.InvalidContentType(ct)
}
}
func (s *Server) decodeCreateScriptPolicyRequest(r *http.Request) (
req *ScriptPolicyCreate,
rawBody []byte,
close func() error,
rerr error,
) {
@@ -468,29 +213,22 @@ func (s *Server) decodeCreateScriptPolicyRequest(r *http.Request) (
}()
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
if err != nil {
return req, rawBody, close, errors.Wrap(err, "parse media type")
return req, close, errors.Wrap(err, "parse media type")
}
switch {
case ct == "application/json":
if r.ContentLength == 0 {
return req, rawBody, close, validate.ErrBodyRequired
return req, close, validate.ErrBodyRequired
}
buf, err := io.ReadAll(r.Body)
defer func() {
_ = r.Body.Close()
}()
if err != nil {
return req, rawBody, close, err
return req, close, err
}
// Reset the body to allow for downstream reading.
r.Body = io.NopCloser(bytes.NewBuffer(buf))
if len(buf) == 0 {
return req, rawBody, close, validate.ErrBodyRequired
return req, close, validate.ErrBodyRequired
}
rawBody = append(rawBody, buf...)
d := jx.DecodeBytes(buf)
var request ScriptPolicyCreate
@@ -508,7 +246,7 @@ func (s *Server) decodeCreateScriptPolicyRequest(r *http.Request) (
Body: buf,
Err: err,
}
return req, rawBody, close, err
return req, close, err
}
if err := func() error {
if err := request.Validate(); err != nil {
@@ -516,17 +254,16 @@ func (s *Server) decodeCreateScriptPolicyRequest(r *http.Request) (
}
return nil
}(); err != nil {
return req, rawBody, close, errors.Wrap(err, "validate")
return req, close, errors.Wrap(err, "validate")
}
return &request, rawBody, close, nil
return &request, close, nil
default:
return req, rawBody, close, validate.InvalidContentType(ct)
return req, close, validate.InvalidContentType(ct)
}
}
func (s *Server) decodeCreateSubmissionRequest(r *http.Request) (
req *SubmissionTriggerCreate,
rawBody []byte,
close func() error,
rerr error,
) {
@@ -547,29 +284,22 @@ func (s *Server) decodeCreateSubmissionRequest(r *http.Request) (
}()
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
if err != nil {
return req, rawBody, close, errors.Wrap(err, "parse media type")
return req, close, errors.Wrap(err, "parse media type")
}
switch {
case ct == "application/json":
if r.ContentLength == 0 {
return req, rawBody, close, validate.ErrBodyRequired
return req, close, validate.ErrBodyRequired
}
buf, err := io.ReadAll(r.Body)
defer func() {
_ = r.Body.Close()
}()
if err != nil {
return req, rawBody, close, err
return req, close, err
}
// Reset the body to allow for downstream reading.
r.Body = io.NopCloser(bytes.NewBuffer(buf))
if len(buf) == 0 {
return req, rawBody, close, validate.ErrBodyRequired
return req, close, validate.ErrBodyRequired
}
rawBody = append(rawBody, buf...)
d := jx.DecodeBytes(buf)
var request SubmissionTriggerCreate
@@ -587,7 +317,7 @@ func (s *Server) decodeCreateSubmissionRequest(r *http.Request) (
Body: buf,
Err: err,
}
return req, rawBody, close, err
return req, close, err
}
if err := func() error {
if err := request.Validate(); err != nil {
@@ -595,17 +325,16 @@ func (s *Server) decodeCreateSubmissionRequest(r *http.Request) (
}
return nil
}(); err != nil {
return req, rawBody, close, errors.Wrap(err, "validate")
return req, close, errors.Wrap(err, "validate")
}
return &request, rawBody, close, nil
return &request, close, nil
default:
return req, rawBody, close, validate.InvalidContentType(ct)
return req, close, validate.InvalidContentType(ct)
}
}
func (s *Server) decodeCreateSubmissionAdminRequest(r *http.Request) (
req *SubmissionTriggerCreate,
rawBody []byte,
close func() error,
rerr error,
) {
@@ -626,29 +355,22 @@ func (s *Server) decodeCreateSubmissionAdminRequest(r *http.Request) (
}()
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
if err != nil {
return req, rawBody, close, errors.Wrap(err, "parse media type")
return req, close, errors.Wrap(err, "parse media type")
}
switch {
case ct == "application/json":
if r.ContentLength == 0 {
return req, rawBody, close, validate.ErrBodyRequired
return req, close, validate.ErrBodyRequired
}
buf, err := io.ReadAll(r.Body)
defer func() {
_ = r.Body.Close()
}()
if err != nil {
return req, rawBody, close, err
return req, close, err
}
// Reset the body to allow for downstream reading.
r.Body = io.NopCloser(bytes.NewBuffer(buf))
if len(buf) == 0 {
return req, rawBody, close, validate.ErrBodyRequired
return req, close, validate.ErrBodyRequired
}
rawBody = append(rawBody, buf...)
d := jx.DecodeBytes(buf)
var request SubmissionTriggerCreate
@@ -666,7 +388,7 @@ func (s *Server) decodeCreateSubmissionAdminRequest(r *http.Request) (
Body: buf,
Err: err,
}
return req, rawBody, close, err
return req, close, err
}
if err := func() error {
if err := request.Validate(); err != nil {
@@ -674,17 +396,16 @@ func (s *Server) decodeCreateSubmissionAdminRequest(r *http.Request) (
}
return nil
}(); err != nil {
return req, rawBody, close, errors.Wrap(err, "validate")
return req, close, errors.Wrap(err, "validate")
}
return &request, rawBody, close, nil
return &request, close, nil
default:
return req, rawBody, close, validate.InvalidContentType(ct)
return req, close, validate.InvalidContentType(ct)
}
}
func (s *Server) decodeCreateSubmissionAuditCommentRequest(r *http.Request) (
req CreateSubmissionAuditCommentReq,
rawBody []byte,
close func() error,
rerr error,
) {
@@ -705,100 +426,20 @@ func (s *Server) decodeCreateSubmissionAuditCommentRequest(r *http.Request) (
}()
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
if err != nil {
return req, rawBody, close, errors.Wrap(err, "parse media type")
return req, close, errors.Wrap(err, "parse media type")
}
switch {
case ct == "text/plain":
reader := r.Body
request := CreateSubmissionAuditCommentReq{Data: reader}
return request, rawBody, close, nil
return request, close, nil
default:
return req, rawBody, close, validate.InvalidContentType(ct)
}
}
func (s *Server) decodeCreateSubmissionReviewRequest(r *http.Request) (
req *SubmissionReviewCreate,
rawBody []byte,
close func() error,
rerr error,
) {
var closers []func() error
close = func() error {
var merr error
// Close in reverse order, to match defer behavior.
for i := len(closers) - 1; i >= 0; i-- {
c := closers[i]
merr = errors.Join(merr, c())
}
return merr
}
defer func() {
if rerr != nil {
rerr = errors.Join(rerr, close())
}
}()
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
if err != nil {
return req, rawBody, close, errors.Wrap(err, "parse media type")
}
switch {
case ct == "application/json":
if r.ContentLength == 0 {
return req, rawBody, close, validate.ErrBodyRequired
}
buf, err := io.ReadAll(r.Body)
defer func() {
_ = r.Body.Close()
}()
if err != nil {
return req, rawBody, close, err
}
// Reset the body to allow for downstream reading.
r.Body = io.NopCloser(bytes.NewBuffer(buf))
if len(buf) == 0 {
return req, rawBody, close, validate.ErrBodyRequired
}
rawBody = append(rawBody, buf...)
d := jx.DecodeBytes(buf)
var request SubmissionReviewCreate
if err := func() error {
if err := request.Decode(d); err != nil {
return err
}
if err := d.Skip(); err != io.EOF {
return errors.New("unexpected trailing data")
}
return nil
}(); err != nil {
err = &ogenerrors.DecodeBodyError{
ContentType: ct,
Body: buf,
Err: err,
}
return req, rawBody, close, err
}
if err := func() error {
if err := request.Validate(); err != nil {
return err
}
return nil
}(); err != nil {
return req, rawBody, close, errors.Wrap(err, "validate")
}
return &request, rawBody, close, nil
default:
return req, rawBody, close, validate.InvalidContentType(ct)
return req, close, validate.InvalidContentType(ct)
}
}
func (s *Server) decodeReleaseSubmissionsRequest(r *http.Request) (
req []ReleaseInfo,
rawBody []byte,
close func() error,
rerr error,
) {
@@ -819,29 +460,22 @@ func (s *Server) decodeReleaseSubmissionsRequest(r *http.Request) (
}()
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
if err != nil {
return req, rawBody, close, errors.Wrap(err, "parse media type")
return req, close, errors.Wrap(err, "parse media type")
}
switch {
case ct == "application/json":
if r.ContentLength == 0 {
return req, rawBody, close, validate.ErrBodyRequired
return req, close, validate.ErrBodyRequired
}
buf, err := io.ReadAll(r.Body)
defer func() {
_ = r.Body.Close()
}()
if err != nil {
return req, rawBody, close, err
return req, close, err
}
// Reset the body to allow for downstream reading.
r.Body = io.NopCloser(bytes.NewBuffer(buf))
if len(buf) == 0 {
return req, rawBody, close, validate.ErrBodyRequired
return req, close, validate.ErrBodyRequired
}
rawBody = append(rawBody, buf...)
d := jx.DecodeBytes(buf)
var request []ReleaseInfo
@@ -867,7 +501,7 @@ func (s *Server) decodeReleaseSubmissionsRequest(r *http.Request) (
Body: buf,
Err: err,
}
return req, rawBody, close, err
return req, close, err
}
if err := func() error {
if request == nil {
@@ -900,52 +534,16 @@ func (s *Server) decodeReleaseSubmissionsRequest(r *http.Request) (
}
return nil
}(); err != nil {
return req, rawBody, close, errors.Wrap(err, "validate")
return req, close, errors.Wrap(err, "validate")
}
return request, rawBody, close, nil
return request, close, nil
default:
return req, rawBody, close, validate.InvalidContentType(ct)
}
}
func (s *Server) decodeUpdateMapfixDescriptionRequest(r *http.Request) (
req UpdateMapfixDescriptionReq,
rawBody []byte,
close func() error,
rerr error,
) {
var closers []func() error
close = func() error {
var merr error
// Close in reverse order, to match defer behavior.
for i := len(closers) - 1; i >= 0; i-- {
c := closers[i]
merr = errors.Join(merr, c())
}
return merr
}
defer func() {
if rerr != nil {
rerr = errors.Join(rerr, close())
}
}()
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
if err != nil {
return req, rawBody, close, errors.Wrap(err, "parse media type")
}
switch {
case ct == "text/plain":
reader := r.Body
request := UpdateMapfixDescriptionReq{Data: reader}
return request, rawBody, close, nil
default:
return req, rawBody, close, validate.InvalidContentType(ct)
return req, close, validate.InvalidContentType(ct)
}
}
func (s *Server) decodeUpdateScriptRequest(r *http.Request) (
req *ScriptUpdate,
rawBody []byte,
close func() error,
rerr error,
) {
@@ -966,29 +564,22 @@ func (s *Server) decodeUpdateScriptRequest(r *http.Request) (
}()
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
if err != nil {
return req, rawBody, close, errors.Wrap(err, "parse media type")
return req, close, errors.Wrap(err, "parse media type")
}
switch {
case ct == "application/json":
if r.ContentLength == 0 {
return req, rawBody, close, validate.ErrBodyRequired
return req, close, validate.ErrBodyRequired
}
buf, err := io.ReadAll(r.Body)
defer func() {
_ = r.Body.Close()
}()
if err != nil {
return req, rawBody, close, err
return req, close, err
}
// Reset the body to allow for downstream reading.
r.Body = io.NopCloser(bytes.NewBuffer(buf))
if len(buf) == 0 {
return req, rawBody, close, validate.ErrBodyRequired
return req, close, validate.ErrBodyRequired
}
rawBody = append(rawBody, buf...)
d := jx.DecodeBytes(buf)
var request ScriptUpdate
@@ -1006,7 +597,7 @@ func (s *Server) decodeUpdateScriptRequest(r *http.Request) (
Body: buf,
Err: err,
}
return req, rawBody, close, err
return req, close, err
}
if err := func() error {
if err := request.Validate(); err != nil {
@@ -1014,17 +605,16 @@ func (s *Server) decodeUpdateScriptRequest(r *http.Request) (
}
return nil
}(); err != nil {
return req, rawBody, close, errors.Wrap(err, "validate")
return req, close, errors.Wrap(err, "validate")
}
return &request, rawBody, close, nil
return &request, close, nil
default:
return req, rawBody, close, validate.InvalidContentType(ct)
return req, close, validate.InvalidContentType(ct)
}
}
func (s *Server) decodeUpdateScriptPolicyRequest(r *http.Request) (
req *ScriptPolicyUpdate,
rawBody []byte,
close func() error,
rerr error,
) {
@@ -1045,29 +635,22 @@ func (s *Server) decodeUpdateScriptPolicyRequest(r *http.Request) (
}()
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
if err != nil {
return req, rawBody, close, errors.Wrap(err, "parse media type")
return req, close, errors.Wrap(err, "parse media type")
}
switch {
case ct == "application/json":
if r.ContentLength == 0 {
return req, rawBody, close, validate.ErrBodyRequired
return req, close, validate.ErrBodyRequired
}
buf, err := io.ReadAll(r.Body)
defer func() {
_ = r.Body.Close()
}()
if err != nil {
return req, rawBody, close, err
return req, close, err
}
// Reset the body to allow for downstream reading.
r.Body = io.NopCloser(bytes.NewBuffer(buf))
if len(buf) == 0 {
return req, rawBody, close, validate.ErrBodyRequired
return req, close, validate.ErrBodyRequired
}
rawBody = append(rawBody, buf...)
d := jx.DecodeBytes(buf)
var request ScriptPolicyUpdate
@@ -1085,7 +668,7 @@ func (s *Server) decodeUpdateScriptPolicyRequest(r *http.Request) (
Body: buf,
Err: err,
}
return req, rawBody, close, err
return req, close, err
}
if err := func() error {
if err := request.Validate(); err != nil {
@@ -1093,89 +676,10 @@ func (s *Server) decodeUpdateScriptPolicyRequest(r *http.Request) (
}
return nil
}(); err != nil {
return req, rawBody, close, errors.Wrap(err, "validate")
return req, close, errors.Wrap(err, "validate")
}
return &request, rawBody, close, nil
return &request, close, nil
default:
return req, rawBody, close, validate.InvalidContentType(ct)
}
}
func (s *Server) decodeUpdateSubmissionReviewRequest(r *http.Request) (
req *SubmissionReviewCreate,
rawBody []byte,
close func() error,
rerr error,
) {
var closers []func() error
close = func() error {
var merr error
// Close in reverse order, to match defer behavior.
for i := len(closers) - 1; i >= 0; i-- {
c := closers[i]
merr = errors.Join(merr, c())
}
return merr
}
defer func() {
if rerr != nil {
rerr = errors.Join(rerr, close())
}
}()
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
if err != nil {
return req, rawBody, close, errors.Wrap(err, "parse media type")
}
switch {
case ct == "application/json":
if r.ContentLength == 0 {
return req, rawBody, close, validate.ErrBodyRequired
}
buf, err := io.ReadAll(r.Body)
defer func() {
_ = r.Body.Close()
}()
if err != nil {
return req, rawBody, close, err
}
// Reset the body to allow for downstream reading.
r.Body = io.NopCloser(bytes.NewBuffer(buf))
if len(buf) == 0 {
return req, rawBody, close, validate.ErrBodyRequired
}
rawBody = append(rawBody, buf...)
d := jx.DecodeBytes(buf)
var request SubmissionReviewCreate
if err := func() error {
if err := request.Decode(d); err != nil {
return err
}
if err := d.Skip(); err != io.EOF {
return errors.New("unexpected trailing data")
}
return nil
}(); err != nil {
err = &ogenerrors.DecodeBodyError{
ContentType: ct,
Body: buf,
Err: err,
}
return req, rawBody, close, err
}
if err := func() error {
if err := request.Validate(); err != nil {
return err
}
return nil
}(); err != nil {
return req, rawBody, close, errors.Wrap(err, "validate")
}
return &request, rawBody, close, nil
default:
return req, rawBody, close, validate.InvalidContentType(ct)
return req, close, validate.InvalidContentType(ct)
}
}

View File

@@ -7,51 +7,10 @@ import (
"net/http"
"github.com/go-faster/jx"
ht "github.com/ogen-go/ogen/http"
)
func encodeBatchAssetThumbnailsRequest(
req *BatchAssetThumbnailsReq,
r *http.Request,
) error {
const contentType = "application/json"
e := new(jx.Encoder)
{
req.Encode(e)
}
encoded := e.Bytes()
ht.SetBody(r, bytes.NewReader(encoded), contentType)
return nil
}
func encodeBatchUserThumbnailsRequest(
req *BatchUserThumbnailsReq,
r *http.Request,
) error {
const contentType = "application/json"
e := new(jx.Encoder)
{
req.Encode(e)
}
encoded := e.Bytes()
ht.SetBody(r, bytes.NewReader(encoded), contentType)
return nil
}
func encodeBatchUsernamesRequest(
req *BatchUsernamesReq,
r *http.Request,
) error {
const contentType = "application/json"
e := new(jx.Encoder)
{
req.Encode(e)
}
encoded := e.Bytes()
ht.SetBody(r, bytes.NewReader(encoded), contentType)
return nil
}
func encodeCreateMapfixRequest(
req *MapfixTriggerCreate,
r *http.Request,
@@ -142,20 +101,6 @@ func encodeCreateSubmissionAuditCommentRequest(
return nil
}
func encodeCreateSubmissionReviewRequest(
req *SubmissionReviewCreate,
r *http.Request,
) error {
const contentType = "application/json"
e := new(jx.Encoder)
{
req.Encode(e)
}
encoded := e.Bytes()
ht.SetBody(r, bytes.NewReader(encoded), contentType)
return nil
}
func encodeReleaseSubmissionsRequest(
req []ReleaseInfo,
r *http.Request,
@@ -174,16 +119,6 @@ func encodeReleaseSubmissionsRequest(
return nil
}
func encodeUpdateMapfixDescriptionRequest(
req UpdateMapfixDescriptionReq,
r *http.Request,
) error {
const contentType = "text/plain"
body := req
ht.SetBody(r, body, contentType)
return nil
}
func encodeUpdateScriptRequest(
req *ScriptUpdate,
r *http.Request,
@@ -211,17 +146,3 @@ func encodeUpdateScriptPolicyRequest(
ht.SetBody(r, bytes.NewReader(encoded), contentType)
return nil
}
func encodeUpdateSubmissionReviewRequest(
req *SubmissionReviewCreate,
r *http.Request,
) error {
const contentType = "application/json"
e := new(jx.Encoder)
{
req.Encode(e)
}
encoded := e.Bytes()
ht.SetBody(r, bytes.NewReader(encoded), contentType)
return nil
}

File diff suppressed because it is too large Load Diff

View File

@@ -8,11 +8,10 @@ import (
"github.com/go-faster/errors"
"github.com/go-faster/jx"
"github.com/ogen-go/ogen/conv"
ht "github.com/ogen-go/ogen/http"
"github.com/ogen-go/ogen/uri"
"go.opentelemetry.io/otel/codes"
"go.opentelemetry.io/otel/trace"
ht "github.com/ogen-go/ogen/http"
)
func encodeActionMapfixAcceptedResponse(response *ActionMapfixAcceptedNoContent, w http.ResponseWriter, span trace.Span) error {
@@ -57,13 +56,6 @@ func encodeActionMapfixRevokeResponse(response *ActionMapfixRevokeNoContent, w h
return nil
}
func encodeActionMapfixTriggerReleaseResponse(response *ActionMapfixTriggerReleaseNoContent, w http.ResponseWriter, span trace.Span) error {
w.WriteHeader(204)
span.SetStatus(codes.Ok, http.StatusText(204))
return nil
}
func encodeActionMapfixTriggerSubmitResponse(response *ActionMapfixTriggerSubmitNoContent, w http.ResponseWriter, span trace.Span) error {
w.WriteHeader(204)
span.SetStatus(codes.Ok, http.StatusText(204))
@@ -92,13 +84,6 @@ func encodeActionMapfixTriggerValidateResponse(response *ActionMapfixTriggerVali
return nil
}
func encodeActionMapfixUploadedResponse(response *ActionMapfixUploadedNoContent, w http.ResponseWriter, span trace.Span) error {
w.WriteHeader(204)
span.SetStatus(codes.Ok, http.StatusText(204))
return nil
}
func encodeActionMapfixValidatedResponse(response *ActionMapfixValidatedNoContent, w http.ResponseWriter, span trace.Span) error {
w.WriteHeader(204)
span.SetStatus(codes.Ok, http.StatusText(204))
@@ -183,48 +168,6 @@ func encodeActionSubmissionValidatedResponse(response *ActionSubmissionValidated
return nil
}
func encodeBatchAssetThumbnailsResponse(response *BatchAssetThumbnailsOK, w http.ResponseWriter, span trace.Span) error {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.WriteHeader(200)
span.SetStatus(codes.Ok, http.StatusText(200))
e := new(jx.Encoder)
response.Encode(e)
if _, err := e.WriteTo(w); err != nil {
return errors.Wrap(err, "write")
}
return nil
}
func encodeBatchUserThumbnailsResponse(response *BatchUserThumbnailsOK, w http.ResponseWriter, span trace.Span) error {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.WriteHeader(200)
span.SetStatus(codes.Ok, http.StatusText(200))
e := new(jx.Encoder)
response.Encode(e)
if _, err := e.WriteTo(w); err != nil {
return errors.Wrap(err, "write")
}
return nil
}
func encodeBatchUsernamesResponse(response *BatchUsernamesOK, w http.ResponseWriter, span trace.Span) error {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.WriteHeader(200)
span.SetStatus(codes.Ok, http.StatusText(200))
e := new(jx.Encoder)
response.Encode(e)
if _, err := e.WriteTo(w); err != nil {
return errors.Wrap(err, "write")
}
return nil
}
func encodeCreateMapfixResponse(response *OperationID, w http.ResponseWriter, span trace.Span) error {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.WriteHeader(201)
@@ -309,20 +252,6 @@ func encodeCreateSubmissionAuditCommentResponse(response *CreateSubmissionAuditC
return nil
}
func encodeCreateSubmissionReviewResponse(response *SubmissionReview, w http.ResponseWriter, span trace.Span) error {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.WriteHeader(200)
span.SetStatus(codes.Ok, http.StatusText(200))
e := new(jx.Encoder)
response.Encode(e)
if _, err := e.WriteTo(w); err != nil {
return errors.Wrap(err, "write")
}
return nil
}
func encodeDeleteScriptResponse(response *DeleteScriptNoContent, w http.ResponseWriter, span trace.Span) error {
w.WriteHeader(204)
span.SetStatus(codes.Ok, http.StatusText(204))
@@ -353,78 +282,6 @@ func encodeDownloadMapAssetResponse(response DownloadMapAssetOK, w http.Response
return nil
}
func encodeGetAOREventResponse(response *AOREvent, w http.ResponseWriter, span trace.Span) error {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.WriteHeader(200)
span.SetStatus(codes.Ok, http.StatusText(200))
e := new(jx.Encoder)
response.Encode(e)
if _, err := e.WriteTo(w); err != nil {
return errors.Wrap(err, "write")
}
return nil
}
func encodeGetAOREventSubmissionsResponse(response []Submission, w http.ResponseWriter, span trace.Span) error {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.WriteHeader(200)
span.SetStatus(codes.Ok, http.StatusText(200))
e := new(jx.Encoder)
e.ArrStart()
for _, elem := range response {
elem.Encode(e)
}
e.ArrEnd()
if _, err := e.WriteTo(w); err != nil {
return errors.Wrap(err, "write")
}
return nil
}
func encodeGetActiveAOREventResponse(response *AOREvent, w http.ResponseWriter, span trace.Span) error {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.WriteHeader(200)
span.SetStatus(codes.Ok, http.StatusText(200))
e := new(jx.Encoder)
response.Encode(e)
if _, err := e.WriteTo(w); err != nil {
return errors.Wrap(err, "write")
}
return nil
}
func encodeGetAssetThumbnailResponse(response *GetAssetThumbnailFound, w http.ResponseWriter, span trace.Span) error {
// Encoding response headers.
{
h := uri.NewHeaderEncoder(w.Header())
// Encode "Location" header.
{
cfg := uri.HeaderParameterEncodingConfig{
Name: "Location",
Explode: false,
}
if err := h.EncodeParam(cfg, func(e uri.Encoder) error {
if val, ok := response.Location.Get(); ok {
return e.EncodeValue(conv.StringToString(val))
}
return nil
}); err != nil {
return errors.Wrap(err, "encode Location header")
}
}
}
w.WriteHeader(302)
span.SetStatus(codes.Ok, http.StatusText(302))
return nil
}
func encodeGetMapResponse(response *Map, w http.ResponseWriter, span trace.Span) error {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.WriteHeader(200)
@@ -495,20 +352,6 @@ func encodeGetScriptPolicyResponse(response *ScriptPolicy, w http.ResponseWriter
return nil
}
func encodeGetStatsResponse(response *Stats, w http.ResponseWriter, span trace.Span) error {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.WriteHeader(200)
span.SetStatus(codes.Ok, http.StatusText(200))
e := new(jx.Encoder)
response.Encode(e)
if _, err := e.WriteTo(w); err != nil {
return errors.Wrap(err, "write")
}
return nil
}
func encodeGetSubmissionResponse(response *Submission, w http.ResponseWriter, span trace.Span) error {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.WriteHeader(200)
@@ -523,50 +366,6 @@ func encodeGetSubmissionResponse(response *Submission, w http.ResponseWriter, sp
return nil
}
func encodeGetUserThumbnailResponse(response *GetUserThumbnailFound, w http.ResponseWriter, span trace.Span) error {
// Encoding response headers.
{
h := uri.NewHeaderEncoder(w.Header())
// Encode "Location" header.
{
cfg := uri.HeaderParameterEncodingConfig{
Name: "Location",
Explode: false,
}
if err := h.EncodeParam(cfg, func(e uri.Encoder) error {
if val, ok := response.Location.Get(); ok {
return e.EncodeValue(conv.StringToString(val))
}
return nil
}); err != nil {
return errors.Wrap(err, "encode Location header")
}
}
}
w.WriteHeader(302)
span.SetStatus(codes.Ok, http.StatusText(302))
return nil
}
func encodeListAOREventsResponse(response []AOREvent, w http.ResponseWriter, span trace.Span) error {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.WriteHeader(200)
span.SetStatus(codes.Ok, http.StatusText(200))
e := new(jx.Encoder)
e.ArrStart()
for _, elem := range response {
elem.Encode(e)
}
e.ArrEnd()
if _, err := e.WriteTo(w); err != nil {
return errors.Wrap(err, "write")
}
return nil
}
func encodeListMapfixAuditEventsResponse(response []AuditEvent, w http.ResponseWriter, span trace.Span) error {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.WriteHeader(200)
@@ -671,24 +470,6 @@ func encodeListSubmissionAuditEventsResponse(response []AuditEvent, w http.Respo
return nil
}
func encodeListSubmissionReviewsResponse(response []SubmissionReview, w http.ResponseWriter, span trace.Span) error {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.WriteHeader(200)
span.SetStatus(codes.Ok, http.StatusText(200))
e := new(jx.Encoder)
e.ArrStart()
for _, elem := range response {
elem.Encode(e)
}
e.ArrEnd()
if _, err := e.WriteTo(w); err != nil {
return errors.Wrap(err, "write")
}
return nil
}
func encodeListSubmissionsResponse(response *Submissions, w http.ResponseWriter, span trace.Span) error {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.WriteHeader(200)
@@ -703,17 +484,17 @@ func encodeListSubmissionsResponse(response *Submissions, w http.ResponseWriter,
return nil
}
func encodeReleaseSubmissionsResponse(response *OperationID, w http.ResponseWriter, span trace.Span) error {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
func encodeMigrateMapsResponse(response *MigrateMapsOK, w http.ResponseWriter, span trace.Span) error {
w.WriteHeader(200)
span.SetStatus(codes.Ok, http.StatusText(200))
return nil
}
func encodeReleaseSubmissionsResponse(response *ReleaseSubmissionsCreated, w http.ResponseWriter, span trace.Span) error {
w.WriteHeader(201)
span.SetStatus(codes.Ok, http.StatusText(201))
e := new(jx.Encoder)
response.Encode(e)
if _, err := e.WriteTo(w); err != nil {
return errors.Wrap(err, "write")
}
return nil
}
@@ -773,13 +554,6 @@ func encodeSetSubmissionCompletedResponse(response *SetSubmissionCompletedNoCont
return nil
}
func encodeUpdateMapfixDescriptionResponse(response *UpdateMapfixDescriptionNoContent, w http.ResponseWriter, span trace.Span) error {
w.WriteHeader(204)
span.SetStatus(codes.Ok, http.StatusText(204))
return nil
}
func encodeUpdateMapfixModelResponse(response *UpdateMapfixModelNoContent, w http.ResponseWriter, span trace.Span) error {
w.WriteHeader(204)
span.SetStatus(codes.Ok, http.StatusText(204))
@@ -808,20 +582,6 @@ func encodeUpdateSubmissionModelResponse(response *UpdateSubmissionModelNoConten
return nil
}
func encodeUpdateSubmissionReviewResponse(response *SubmissionReview, w http.ResponseWriter, span trace.Span) error {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.WriteHeader(200)
span.SetStatus(codes.Ok, http.StatusText(200))
e := new(jx.Encoder)
response.Encode(e)
if _, err := e.WriteTo(w); err != nil {
return errors.Wrap(err, "write")
}
return nil
}
func encodeErrorResponse(response *ErrorStatusCode, w http.ResponseWriter, span trace.Span) error {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
code := response.StatusCode

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -8,6 +8,7 @@ import (
"strings"
"github.com/go-faster/errors"
"github.com/ogen-go/ogen/ogenerrors"
)
@@ -39,12 +40,10 @@ var operationRolesCookieAuth = map[string][]string{
ActionMapfixResetSubmittingOperation: []string{},
ActionMapfixRetryValidateOperation: []string{},
ActionMapfixRevokeOperation: []string{},
ActionMapfixTriggerReleaseOperation: []string{},
ActionMapfixTriggerSubmitOperation: []string{},
ActionMapfixTriggerSubmitUncheckedOperation: []string{},
ActionMapfixTriggerUploadOperation: []string{},
ActionMapfixTriggerValidateOperation: []string{},
ActionMapfixUploadedOperation: []string{},
ActionMapfixValidatedOperation: []string{},
ActionSubmissionAcceptedOperation: []string{},
ActionSubmissionRejectOperation: []string{},
@@ -64,24 +63,21 @@ var operationRolesCookieAuth = map[string][]string{
CreateSubmissionOperation: []string{},
CreateSubmissionAdminOperation: []string{},
CreateSubmissionAuditCommentOperation: []string{},
CreateSubmissionReviewOperation: []string{},
DeleteScriptOperation: []string{},
DeleteScriptPolicyOperation: []string{},
DownloadMapAssetOperation: []string{},
GetOperationOperation: []string{},
ListSubmissionReviewsOperation: []string{},
MigrateMapsOperation: []string{},
ReleaseSubmissionsOperation: []string{},
SessionRolesOperation: []string{},
SessionUserOperation: []string{},
SessionValidateOperation: []string{},
SetMapfixCompletedOperation: []string{},
SetSubmissionCompletedOperation: []string{},
UpdateMapfixDescriptionOperation: []string{},
UpdateMapfixModelOperation: []string{},
UpdateScriptOperation: []string{},
UpdateScriptPolicyOperation: []string{},
UpdateSubmissionModelOperation: []string{},
UpdateSubmissionReviewOperation: []string{},
}
func (s *Server) securityCookieAuth(ctx context.Context, operationName OperationName, req *http.Request) (context.Context, bool, error) {

View File

@@ -45,12 +45,6 @@ type Handler interface {
//
// POST /mapfixes/{MapfixID}/status/revoke
ActionMapfixRevoke(ctx context.Context, params ActionMapfixRevokeParams) error
// ActionMapfixTriggerRelease implements actionMapfixTriggerRelease operation.
//
// Role MapfixUpload changes status from Uploaded -> Releasing.
//
// POST /mapfixes/{MapfixID}/status/trigger-release
ActionMapfixTriggerRelease(ctx context.Context, params ActionMapfixTriggerReleaseParams) error
// ActionMapfixTriggerSubmit implements actionMapfixTriggerSubmit operation.
//
// Role Submitter changes status from UnderConstruction|ChangesRequested -> Submitting.
@@ -65,7 +59,7 @@ type Handler interface {
ActionMapfixTriggerSubmitUnchecked(ctx context.Context, params ActionMapfixTriggerSubmitUncheckedParams) error
// ActionMapfixTriggerUpload implements actionMapfixTriggerUpload operation.
//
// Role MapfixUpload changes status from Validated -> Uploading.
// Role Admin changes status from Validated -> Uploading.
//
// POST /mapfixes/{MapfixID}/status/trigger-upload
ActionMapfixTriggerUpload(ctx context.Context, params ActionMapfixTriggerUploadParams) error
@@ -75,15 +69,9 @@ type Handler interface {
//
// POST /mapfixes/{MapfixID}/status/trigger-validate
ActionMapfixTriggerValidate(ctx context.Context, params ActionMapfixTriggerValidateParams) error
// ActionMapfixUploaded implements actionMapfixUploaded operation.
//
// Role MapfixUpload manually resets releasing softlock and changes status from Releasing -> Uploaded.
//
// POST /mapfixes/{MapfixID}/status/reset-releasing
ActionMapfixUploaded(ctx context.Context, params ActionMapfixUploadedParams) error
// ActionMapfixValidated implements actionMapfixValidated operation.
//
// Role MapfixUpload manually resets uploading softlock and changes status from Uploading -> Validated.
// Role Admin manually resets uploading softlock and changes status from Uploading -> Validated.
//
// POST /mapfixes/{MapfixID}/status/reset-uploading
ActionMapfixValidated(ctx context.Context, params ActionMapfixValidatedParams) error
@@ -138,7 +126,7 @@ type Handler interface {
ActionSubmissionTriggerSubmitUnchecked(ctx context.Context, params ActionSubmissionTriggerSubmitUncheckedParams) error
// ActionSubmissionTriggerUpload implements actionSubmissionTriggerUpload operation.
//
// Role SubmissionUpload changes status from Validated -> Uploading.
// Role Admin changes status from Validated -> Uploading.
//
// POST /submissions/{SubmissionID}/status/trigger-upload
ActionSubmissionTriggerUpload(ctx context.Context, params ActionSubmissionTriggerUploadParams) error
@@ -150,29 +138,10 @@ type Handler interface {
ActionSubmissionTriggerValidate(ctx context.Context, params ActionSubmissionTriggerValidateParams) error
// ActionSubmissionValidated implements actionSubmissionValidated operation.
//
// Role SubmissionUpload manually resets uploading softlock and changes status from Uploading ->
// Validated.
// Role Admin manually resets uploading softlock and changes status from Uploading -> Validated.
//
// POST /submissions/{SubmissionID}/status/reset-uploading
ActionSubmissionValidated(ctx context.Context, params ActionSubmissionValidatedParams) error
// BatchAssetThumbnails implements batchAssetThumbnails operation.
//
// Batch fetch asset thumbnails.
//
// POST /thumbnails/assets
BatchAssetThumbnails(ctx context.Context, req *BatchAssetThumbnailsReq) (*BatchAssetThumbnailsOK, error)
// BatchUserThumbnails implements batchUserThumbnails operation.
//
// Batch fetch user avatar thumbnails.
//
// POST /thumbnails/users
BatchUserThumbnails(ctx context.Context, req *BatchUserThumbnailsReq) (*BatchUserThumbnailsOK, error)
// BatchUsernames implements batchUsernames operation.
//
// Batch fetch usernames.
//
// POST /usernames
BatchUsernames(ctx context.Context, req *BatchUsernamesReq) (*BatchUsernamesOK, error)
// CreateMapfix implements createMapfix operation.
//
// Trigger the validator to create a mapfix.
@@ -215,12 +184,6 @@ type Handler interface {
//
// POST /submissions/{SubmissionID}/comment
CreateSubmissionAuditComment(ctx context.Context, req CreateSubmissionAuditCommentReq, params CreateSubmissionAuditCommentParams) error
// CreateSubmissionReview implements createSubmissionReview operation.
//
// Create a review for a submission.
//
// POST /submissions/{SubmissionID}/reviews
CreateSubmissionReview(ctx context.Context, req *SubmissionReviewCreate, params CreateSubmissionReviewParams) (*SubmissionReview, error)
// DeleteScript implements deleteScript operation.
//
// Delete the specified script by ID.
@@ -239,30 +202,6 @@ type Handler interface {
//
// GET /maps/{MapID}/download
DownloadMapAsset(ctx context.Context, params DownloadMapAssetParams) (DownloadMapAssetOK, error)
// GetAOREvent implements getAOREvent operation.
//
// Get a specific AOR event.
//
// GET /aor-events/{AOREventID}
GetAOREvent(ctx context.Context, params GetAOREventParams) (*AOREvent, error)
// GetAOREventSubmissions implements getAOREventSubmissions operation.
//
// Get all submissions for a specific AOR event.
//
// GET /aor-events/{AOREventID}/submissions
GetAOREventSubmissions(ctx context.Context, params GetAOREventSubmissionsParams) ([]Submission, error)
// GetActiveAOREvent implements getActiveAOREvent operation.
//
// Get the currently active AOR event.
//
// GET /aor-events/active
GetActiveAOREvent(ctx context.Context) (*AOREvent, error)
// GetAssetThumbnail implements getAssetThumbnail operation.
//
// Get single asset thumbnail.
//
// GET /thumbnails/asset/{AssetID}
GetAssetThumbnail(ctx context.Context, params GetAssetThumbnailParams) (*GetAssetThumbnailFound, error)
// GetMap implements getMap operation.
//
// Retrieve map with ID.
@@ -293,30 +232,12 @@ type Handler interface {
//
// GET /script-policy/{ScriptPolicyID}
GetScriptPolicy(ctx context.Context, params GetScriptPolicyParams) (*ScriptPolicy, error)
// GetStats implements getStats operation.
//
// Get aggregate statistics.
//
// GET /stats
GetStats(ctx context.Context) (*Stats, error)
// GetSubmission implements getSubmission operation.
//
// Retrieve map with ID.
//
// GET /submissions/{SubmissionID}
GetSubmission(ctx context.Context, params GetSubmissionParams) (*Submission, error)
// GetUserThumbnail implements getUserThumbnail operation.
//
// Get single user avatar thumbnail.
//
// GET /thumbnails/user/{UserID}
GetUserThumbnail(ctx context.Context, params GetUserThumbnailParams) (*GetUserThumbnailFound, error)
// ListAOREvents implements listAOREvents operation.
//
// Get list of AOR events.
//
// GET /aor-events
ListAOREvents(ctx context.Context, params ListAOREventsParams) ([]AOREvent, error)
// ListMapfixAuditEvents implements listMapfixAuditEvents operation.
//
// Retrieve a list of audit events.
@@ -353,24 +274,24 @@ type Handler interface {
//
// GET /submissions/{SubmissionID}/audit-events
ListSubmissionAuditEvents(ctx context.Context, params ListSubmissionAuditEventsParams) ([]AuditEvent, error)
// ListSubmissionReviews implements listSubmissionReviews operation.
//
// Get all reviews for a submission.
//
// GET /submissions/{SubmissionID}/reviews
ListSubmissionReviews(ctx context.Context, params ListSubmissionReviewsParams) ([]SubmissionReview, error)
// ListSubmissions implements listSubmissions operation.
//
// Get list of submissions.
//
// GET /submissions
ListSubmissions(ctx context.Context, params ListSubmissionsParams) (*Submissions, error)
// MigrateMaps implements migrateMaps operation.
//
// Perform maps migration.
//
// POST /migrate-maps
MigrateMaps(ctx context.Context) error
// ReleaseSubmissions implements releaseSubmissions operation.
//
// Release a set of uploaded maps. Role SubmissionRelease.
// Release a set of uploaded maps.
//
// POST /release-submissions
ReleaseSubmissions(ctx context.Context, req []ReleaseInfo) (*OperationID, error)
ReleaseSubmissions(ctx context.Context, req []ReleaseInfo) error
// SessionRoles implements sessionRoles operation.
//
// Get list of roles for the current session.
@@ -401,12 +322,6 @@ type Handler interface {
//
// POST /submissions/{SubmissionID}/completed
SetSubmissionCompleted(ctx context.Context, params SetSubmissionCompletedParams) error
// UpdateMapfixDescription implements updateMapfixDescription operation.
//
// Update description (submitter only).
//
// PATCH /mapfixes/{MapfixID}/description
UpdateMapfixDescription(ctx context.Context, req UpdateMapfixDescriptionReq, params UpdateMapfixDescriptionParams) error
// UpdateMapfixModel implements updateMapfixModel operation.
//
// Update model following role restrictions.
@@ -431,12 +346,6 @@ type Handler interface {
//
// POST /submissions/{SubmissionID}/model
UpdateSubmissionModel(ctx context.Context, params UpdateSubmissionModelParams) error
// UpdateSubmissionReview implements updateSubmissionReview operation.
//
// Update an existing review.
//
// PATCH /submissions/{SubmissionID}/reviews/{ReviewID}
UpdateSubmissionReview(ctx context.Context, req *SubmissionReviewCreate, params UpdateSubmissionReviewParams) (*SubmissionReview, error)
// NewError creates *ErrorStatusCode from error returned by handler.
//
// Used for common default response.

View File

@@ -68,15 +68,6 @@ func (UnimplementedHandler) ActionMapfixRevoke(ctx context.Context, params Actio
return ht.ErrNotImplemented
}
// ActionMapfixTriggerRelease implements actionMapfixTriggerRelease operation.
//
// Role MapfixUpload changes status from Uploaded -> Releasing.
//
// POST /mapfixes/{MapfixID}/status/trigger-release
func (UnimplementedHandler) ActionMapfixTriggerRelease(ctx context.Context, params ActionMapfixTriggerReleaseParams) error {
return ht.ErrNotImplemented
}
// ActionMapfixTriggerSubmit implements actionMapfixTriggerSubmit operation.
//
// Role Submitter changes status from UnderConstruction|ChangesRequested -> Submitting.
@@ -97,7 +88,7 @@ func (UnimplementedHandler) ActionMapfixTriggerSubmitUnchecked(ctx context.Conte
// ActionMapfixTriggerUpload implements actionMapfixTriggerUpload operation.
//
// Role MapfixUpload changes status from Validated -> Uploading.
// Role Admin changes status from Validated -> Uploading.
//
// POST /mapfixes/{MapfixID}/status/trigger-upload
func (UnimplementedHandler) ActionMapfixTriggerUpload(ctx context.Context, params ActionMapfixTriggerUploadParams) error {
@@ -113,18 +104,9 @@ func (UnimplementedHandler) ActionMapfixTriggerValidate(ctx context.Context, par
return ht.ErrNotImplemented
}
// ActionMapfixUploaded implements actionMapfixUploaded operation.
//
// Role MapfixUpload manually resets releasing softlock and changes status from Releasing -> Uploaded.
//
// POST /mapfixes/{MapfixID}/status/reset-releasing
func (UnimplementedHandler) ActionMapfixUploaded(ctx context.Context, params ActionMapfixUploadedParams) error {
return ht.ErrNotImplemented
}
// ActionMapfixValidated implements actionMapfixValidated operation.
//
// Role MapfixUpload manually resets uploading softlock and changes status from Uploading -> Validated.
// Role Admin manually resets uploading softlock and changes status from Uploading -> Validated.
//
// POST /mapfixes/{MapfixID}/status/reset-uploading
func (UnimplementedHandler) ActionMapfixValidated(ctx context.Context, params ActionMapfixValidatedParams) error {
@@ -206,7 +188,7 @@ func (UnimplementedHandler) ActionSubmissionTriggerSubmitUnchecked(ctx context.C
// ActionSubmissionTriggerUpload implements actionSubmissionTriggerUpload operation.
//
// Role SubmissionUpload changes status from Validated -> Uploading.
// Role Admin changes status from Validated -> Uploading.
//
// POST /submissions/{SubmissionID}/status/trigger-upload
func (UnimplementedHandler) ActionSubmissionTriggerUpload(ctx context.Context, params ActionSubmissionTriggerUploadParams) error {
@@ -224,41 +206,13 @@ func (UnimplementedHandler) ActionSubmissionTriggerValidate(ctx context.Context,
// ActionSubmissionValidated implements actionSubmissionValidated operation.
//
// Role SubmissionUpload manually resets uploading softlock and changes status from Uploading ->
// Validated.
// Role Admin manually resets uploading softlock and changes status from Uploading -> Validated.
//
// POST /submissions/{SubmissionID}/status/reset-uploading
func (UnimplementedHandler) ActionSubmissionValidated(ctx context.Context, params ActionSubmissionValidatedParams) error {
return ht.ErrNotImplemented
}
// BatchAssetThumbnails implements batchAssetThumbnails operation.
//
// Batch fetch asset thumbnails.
//
// POST /thumbnails/assets
func (UnimplementedHandler) BatchAssetThumbnails(ctx context.Context, req *BatchAssetThumbnailsReq) (r *BatchAssetThumbnailsOK, _ error) {
return r, ht.ErrNotImplemented
}
// BatchUserThumbnails implements batchUserThumbnails operation.
//
// Batch fetch user avatar thumbnails.
//
// POST /thumbnails/users
func (UnimplementedHandler) BatchUserThumbnails(ctx context.Context, req *BatchUserThumbnailsReq) (r *BatchUserThumbnailsOK, _ error) {
return r, ht.ErrNotImplemented
}
// BatchUsernames implements batchUsernames operation.
//
// Batch fetch usernames.
//
// POST /usernames
func (UnimplementedHandler) BatchUsernames(ctx context.Context, req *BatchUsernamesReq) (r *BatchUsernamesOK, _ error) {
return r, ht.ErrNotImplemented
}
// CreateMapfix implements createMapfix operation.
//
// Trigger the validator to create a mapfix.
@@ -322,15 +276,6 @@ func (UnimplementedHandler) CreateSubmissionAuditComment(ctx context.Context, re
return ht.ErrNotImplemented
}
// CreateSubmissionReview implements createSubmissionReview operation.
//
// Create a review for a submission.
//
// POST /submissions/{SubmissionID}/reviews
func (UnimplementedHandler) CreateSubmissionReview(ctx context.Context, req *SubmissionReviewCreate, params CreateSubmissionReviewParams) (r *SubmissionReview, _ error) {
return r, ht.ErrNotImplemented
}
// DeleteScript implements deleteScript operation.
//
// Delete the specified script by ID.
@@ -358,42 +303,6 @@ func (UnimplementedHandler) DownloadMapAsset(ctx context.Context, params Downloa
return r, ht.ErrNotImplemented
}
// GetAOREvent implements getAOREvent operation.
//
// Get a specific AOR event.
//
// GET /aor-events/{AOREventID}
func (UnimplementedHandler) GetAOREvent(ctx context.Context, params GetAOREventParams) (r *AOREvent, _ error) {
return r, ht.ErrNotImplemented
}
// GetAOREventSubmissions implements getAOREventSubmissions operation.
//
// Get all submissions for a specific AOR event.
//
// GET /aor-events/{AOREventID}/submissions
func (UnimplementedHandler) GetAOREventSubmissions(ctx context.Context, params GetAOREventSubmissionsParams) (r []Submission, _ error) {
return r, ht.ErrNotImplemented
}
// GetActiveAOREvent implements getActiveAOREvent operation.
//
// Get the currently active AOR event.
//
// GET /aor-events/active
func (UnimplementedHandler) GetActiveAOREvent(ctx context.Context) (r *AOREvent, _ error) {
return r, ht.ErrNotImplemented
}
// GetAssetThumbnail implements getAssetThumbnail operation.
//
// Get single asset thumbnail.
//
// GET /thumbnails/asset/{AssetID}
func (UnimplementedHandler) GetAssetThumbnail(ctx context.Context, params GetAssetThumbnailParams) (r *GetAssetThumbnailFound, _ error) {
return r, ht.ErrNotImplemented
}
// GetMap implements getMap operation.
//
// Retrieve map with ID.
@@ -439,15 +348,6 @@ func (UnimplementedHandler) GetScriptPolicy(ctx context.Context, params GetScrip
return r, ht.ErrNotImplemented
}
// GetStats implements getStats operation.
//
// Get aggregate statistics.
//
// GET /stats
func (UnimplementedHandler) GetStats(ctx context.Context) (r *Stats, _ error) {
return r, ht.ErrNotImplemented
}
// GetSubmission implements getSubmission operation.
//
// Retrieve map with ID.
@@ -457,24 +357,6 @@ func (UnimplementedHandler) GetSubmission(ctx context.Context, params GetSubmiss
return r, ht.ErrNotImplemented
}
// GetUserThumbnail implements getUserThumbnail operation.
//
// Get single user avatar thumbnail.
//
// GET /thumbnails/user/{UserID}
func (UnimplementedHandler) GetUserThumbnail(ctx context.Context, params GetUserThumbnailParams) (r *GetUserThumbnailFound, _ error) {
return r, ht.ErrNotImplemented
}
// ListAOREvents implements listAOREvents operation.
//
// Get list of AOR events.
//
// GET /aor-events
func (UnimplementedHandler) ListAOREvents(ctx context.Context, params ListAOREventsParams) (r []AOREvent, _ error) {
return r, ht.ErrNotImplemented
}
// ListMapfixAuditEvents implements listMapfixAuditEvents operation.
//
// Retrieve a list of audit events.
@@ -529,15 +411,6 @@ func (UnimplementedHandler) ListSubmissionAuditEvents(ctx context.Context, param
return r, ht.ErrNotImplemented
}
// ListSubmissionReviews implements listSubmissionReviews operation.
//
// Get all reviews for a submission.
//
// GET /submissions/{SubmissionID}/reviews
func (UnimplementedHandler) ListSubmissionReviews(ctx context.Context, params ListSubmissionReviewsParams) (r []SubmissionReview, _ error) {
return r, ht.ErrNotImplemented
}
// ListSubmissions implements listSubmissions operation.
//
// Get list of submissions.
@@ -547,13 +420,22 @@ func (UnimplementedHandler) ListSubmissions(ctx context.Context, params ListSubm
return r, ht.ErrNotImplemented
}
// MigrateMaps implements migrateMaps operation.
//
// Perform maps migration.
//
// POST /migrate-maps
func (UnimplementedHandler) MigrateMaps(ctx context.Context) error {
return ht.ErrNotImplemented
}
// ReleaseSubmissions implements releaseSubmissions operation.
//
// Release a set of uploaded maps. Role SubmissionRelease.
// Release a set of uploaded maps.
//
// POST /release-submissions
func (UnimplementedHandler) ReleaseSubmissions(ctx context.Context, req []ReleaseInfo) (r *OperationID, _ error) {
return r, ht.ErrNotImplemented
func (UnimplementedHandler) ReleaseSubmissions(ctx context.Context, req []ReleaseInfo) error {
return ht.ErrNotImplemented
}
// SessionRoles implements sessionRoles operation.
@@ -601,15 +483,6 @@ func (UnimplementedHandler) SetSubmissionCompleted(ctx context.Context, params S
return ht.ErrNotImplemented
}
// UpdateMapfixDescription implements updateMapfixDescription operation.
//
// Update description (submitter only).
//
// PATCH /mapfixes/{MapfixID}/description
func (UnimplementedHandler) UpdateMapfixDescription(ctx context.Context, req UpdateMapfixDescriptionReq, params UpdateMapfixDescriptionParams) error {
return ht.ErrNotImplemented
}
// UpdateMapfixModel implements updateMapfixModel operation.
//
// Update model following role restrictions.
@@ -646,15 +519,6 @@ func (UnimplementedHandler) UpdateSubmissionModel(ctx context.Context, params Up
return ht.ErrNotImplemented
}
// UpdateSubmissionReview implements updateSubmissionReview operation.
//
// Update an existing review.
//
// PATCH /submissions/{SubmissionID}/reviews/{ReviewID}
func (UnimplementedHandler) UpdateSubmissionReview(ctx context.Context, req *SubmissionReviewCreate, params UpdateSubmissionReviewParams) (r *SubmissionReview, _ error) {
return r, ht.ErrNotImplemented
}
// NewError creates *ErrorStatusCode from error returned by handler.
//
// Used for common default response.

File diff suppressed because it is too large Load Diff

View File

@@ -1,75 +0,0 @@
package cmds
import (
"git.itzana.me/strafesnet/maps-service/pkg/datastore/gormstore"
"git.itzana.me/strafesnet/maps-service/pkg/service"
log "github.com/sirupsen/logrus"
"github.com/urfave/cli/v2"
)
func NewAORCommand() *cli.Command {
return &cli.Command{
Name: "aor",
Usage: "Run AOR (Accept or Reject) event processor",
Action: runAORProcessor,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "pg-host",
Usage: "Host of postgres database",
EnvVars: []string{"PG_HOST"},
Required: true,
},
&cli.IntFlag{
Name: "pg-port",
Usage: "Port of postgres database",
EnvVars: []string{"PG_PORT"},
Required: true,
},
&cli.StringFlag{
Name: "pg-db",
Usage: "Name of database to connect to",
EnvVars: []string{"PG_DB"},
Required: true,
},
&cli.StringFlag{
Name: "pg-user",
Usage: "User to connect with",
EnvVars: []string{"PG_USER"},
Required: true,
},
&cli.StringFlag{
Name: "pg-password",
Usage: "Password to connect with",
EnvVars: []string{"PG_PASSWORD"},
Required: true,
},
&cli.BoolFlag{
Name: "migrate",
Usage: "Run database migrations",
Value: false,
EnvVars: []string{"MIGRATE"},
},
},
}
}
func runAORProcessor(ctx *cli.Context) error {
log.Info("Starting AOR event processor")
// Connect to database
db, err := gormstore.New(ctx)
if err != nil {
log.WithError(err).Fatal("failed to connect database")
return err
}
// Create scheduler and process events
scheduler := service.NewAORScheduler(db)
if err := scheduler.ProcessAOREvents(); err != nil {
log.WithError(err).Error("AOR event processing failed")
return err
}
log.Info("AOR event processor completed successfully")
return nil
}

View File

@@ -1,57 +0,0 @@
package cmds
import (
"git.itzana.me/strafesnet/maps-service/pkg/public_api"
"github.com/urfave/cli/v2"
"google.golang.org/grpc"
"google.golang.org/grpc/credentials/insecure"
)
func NewApiCommand() *cli.Command {
return &cli.Command{
Name: "api",
Usage: "Run api service",
Action: runAPI,
Flags: []cli.Flag{
&cli.IntFlag{
Name: "port",
Usage: "Listen port",
EnvVars: []string{"PORT"},
Value: 8080,
},
&cli.StringFlag{
Name: "dev-rpc-host",
Usage: "Host of dev rpc",
EnvVars: []string{"DEV_RPC_HOST"},
Value: "dev-service:8081",
},
&cli.StringFlag{
Name: "maps-rpc-host",
Usage: "Host of maps rpc",
EnvVars: []string{"MAPS_RPC_HOST"},
Value: "maptest-api:8081",
},
},
}
}
func runAPI(ctx *cli.Context) error {
// Dev service client
devConn, err := grpc.Dial(ctx.String("dev-rpc-host"), grpc.WithTransportCredentials(insecure.NewCredentials()))
if err != nil {
return err
}
// Data service client
mapsConn, err := grpc.Dial(ctx.String("maps-rpc-host"), grpc.WithTransportCredentials(insecure.NewCredentials()))
if err != nil {
return err
}
return api.NewRouter(
api.WithContext(ctx),
api.WithPort(ctx.Int("port")),
api.WithDevClient(devConn),
api.WithMapsClient(mapsConn),
)
}

View File

@@ -18,7 +18,6 @@ import (
"git.itzana.me/strafesnet/maps-service/pkg/validator_controller"
"git.itzana.me/strafesnet/maps-service/pkg/web_api"
"github.com/nats-io/nats.go"
"github.com/redis/go-redis/v9"
log "github.com/sirupsen/logrus"
"github.com/urfave/cli/v2"
"google.golang.org/grpc"
@@ -103,24 +102,6 @@ func NewServeCommand() *cli.Command {
EnvVars: []string{"RBX_API_KEY"},
Required: true,
},
&cli.StringFlag{
Name: "redis-host",
Usage: "Host of Redis cache",
EnvVars: []string{"REDIS_HOST"},
Value: "localhost:6379",
},
&cli.StringFlag{
Name: "redis-password",
Usage: "Password for Redis",
EnvVars: []string{"REDIS_PASSWORD"},
Value: "",
},
&cli.IntFlag{
Name: "redis-db",
Usage: "Redis database number",
EnvVars: []string{"REDIS_DB"},
Value: 0,
},
},
}
}
@@ -148,24 +129,6 @@ func serve(ctx *cli.Context) error {
log.WithError(err).Fatal("failed to add stream")
}
// Initialize Redis client
redisClient := redis.NewClient(&redis.Options{
Addr: ctx.String("redis-host"),
Password: ctx.String("redis-password"),
DB: ctx.Int("redis-db"),
})
// Test Redis connection
if err := redisClient.Ping(ctx.Context).Err(); err != nil {
log.WithError(err).Warn("failed to connect to Redis - thumbnails will not be cached")
}
// Initialize Roblox client
robloxClient := &roblox.Client{
HttpClient: http.DefaultClient,
ApiKey: ctx.String("rbx-api-key"),
}
// connect to main game database
conn, err := grpc.Dial(ctx.String("data-rpc-host"), grpc.WithTransportCredentials(insecure.NewCredentials()))
if err != nil {
@@ -176,15 +139,13 @@ func serve(ctx *cli.Context) error {
js,
maps.NewMapsServiceClient(conn),
users.NewUsersServiceClient(conn),
robloxClient,
redisClient,
)
svc_external := web_api.NewService(
&svc_inner,
roblox.Client{
HttpClient: http.DefaultClient,
ApiKey: ctx.String("rbx-api-key"),
ApiKey: ctx.String("rbx-api-key"),
},
)

View File

@@ -24,14 +24,11 @@ const (
)
type Datastore interface {
AOREvents() AOREvents
AORSubmissions() AORSubmissions
AuditEvents() AuditEvents
Maps() Maps
Mapfixes() Mapfixes
Operations() Operations
Submissions() Submissions
SubmissionReviews() SubmissionReviews
Scripts() Scripts
ScriptPolicy() ScriptPolicy
}
@@ -86,16 +83,6 @@ type Submissions interface {
ListWithTotal(ctx context.Context, filters OptionalMap, page model.Page, sort ListSort) (int64, []model.Submission, error)
}
type SubmissionReviews interface {
Get(ctx context.Context, id int64) (model.SubmissionReview, error)
GetBySubmissionAndReviewer(ctx context.Context, submissionID int64, reviewerID uint64) (model.SubmissionReview, error)
Create(ctx context.Context, review model.SubmissionReview) (model.SubmissionReview, error)
Update(ctx context.Context, id int64, values OptionalMap) error
Delete(ctx context.Context, id int64) error
ListBySubmission(ctx context.Context, submissionID int64) ([]model.SubmissionReview, error)
MarkOutdatedBySubmission(ctx context.Context, submissionID int64) error
}
type Scripts interface {
Get(ctx context.Context, id int64) (model.Script, error)
Create(ctx context.Context, smap model.Script) (model.Script, error)
@@ -112,22 +99,3 @@ type ScriptPolicy interface {
Delete(ctx context.Context, id int64) error
List(ctx context.Context, filters OptionalMap, page model.Page) ([]model.ScriptPolicy, error)
}
type AOREvents interface {
Get(ctx context.Context, id int64) (model.AOREvent, error)
GetActive(ctx context.Context) (model.AOREvent, error)
GetByStatus(ctx context.Context, status model.AOREventStatus) ([]model.AOREvent, error)
Create(ctx context.Context, event model.AOREvent) (model.AOREvent, error)
Update(ctx context.Context, id int64, values OptionalMap) error
Delete(ctx context.Context, id int64) error
List(ctx context.Context, filters OptionalMap, page model.Page) ([]model.AOREvent, error)
}
type AORSubmissions interface {
Get(ctx context.Context, id int64) (model.AORSubmission, error)
GetByAOREvent(ctx context.Context, eventID int64) ([]model.AORSubmission, error)
GetBySubmission(ctx context.Context, submissionID int64) ([]model.AORSubmission, error)
Create(ctx context.Context, aorSubmission model.AORSubmission) (model.AORSubmission, error)
Delete(ctx context.Context, id int64) error
ListWithSubmissions(ctx context.Context, eventID int64) ([]model.Submission, error)
}

View File

@@ -1,89 +0,0 @@
package gormstore
import (
"context"
"errors"
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
"git.itzana.me/strafesnet/maps-service/pkg/model"
"gorm.io/gorm"
)
type AOREvents struct {
db *gorm.DB
}
func (env *AOREvents) Get(ctx context.Context, id int64) (model.AOREvent, error) {
var event model.AOREvent
if err := env.db.First(&event, id).Error; err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) {
return event, datastore.ErrNotExist
}
return event, err
}
return event, nil
}
func (env *AOREvents) GetActive(ctx context.Context) (model.AOREvent, error) {
var event model.AOREvent
// Get the most recent non-closed event
if err := env.db.Where("status != ?", model.AOREventStatusClosed).
Order("start_date DESC").
First(&event).Error; err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) {
return event, datastore.ErrNotExist
}
return event, err
}
return event, nil
}
func (env *AOREvents) GetByStatus(ctx context.Context, status model.AOREventStatus) ([]model.AOREvent, error) {
var events []model.AOREvent
if err := env.db.Where("status = ?", status).Order("start_date DESC").Find(&events).Error; err != nil {
return nil, err
}
return events, nil
}
func (env *AOREvents) Create(ctx context.Context, event model.AOREvent) (model.AOREvent, error) {
if err := env.db.Create(&event).Error; err != nil {
return event, err
}
return event, nil
}
func (env *AOREvents) Update(ctx context.Context, id int64, values datastore.OptionalMap) error {
if err := env.db.Model(&model.AOREvent{}).Where("id = ?", id).Updates(values.Map()).Error; err != nil {
if err == gorm.ErrRecordNotFound {
return datastore.ErrNotExist
}
return err
}
return nil
}
func (env *AOREvents) Delete(ctx context.Context, id int64) error {
if err := env.db.Delete(&model.AOREvent{}, id).Error; err != nil {
if err == gorm.ErrRecordNotFound {
return datastore.ErrNotExist
}
return err
}
return nil
}
func (env *AOREvents) List(ctx context.Context, filters datastore.OptionalMap, page model.Page) ([]model.AOREvent, error) {
var events []model.AOREvent
query := env.db.Where(filters.Map())
if page.Size > 0 {
offset := (page.Number - 1) * page.Size
query = query.Limit(int(page.Size)).Offset(int(offset))
}
if err := query.Order("start_date DESC").Find(&events).Error; err != nil {
return nil, err
}
return events, nil
}

View File

@@ -1,70 +0,0 @@
package gormstore
import (
"context"
"errors"
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
"git.itzana.me/strafesnet/maps-service/pkg/model"
"gorm.io/gorm"
)
type AORSubmissions struct {
db *gorm.DB
}
func (env *AORSubmissions) Get(ctx context.Context, id int64) (model.AORSubmission, error) {
var aorSubmission model.AORSubmission
if err := env.db.First(&aorSubmission, id).Error; err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) {
return aorSubmission, datastore.ErrNotExist
}
return aorSubmission, err
}
return aorSubmission, nil
}
func (env *AORSubmissions) GetByAOREvent(ctx context.Context, eventID int64) ([]model.AORSubmission, error) {
var aorSubmissions []model.AORSubmission
if err := env.db.Where("aor_event_id = ?", eventID).Order("added_at DESC").Find(&aorSubmissions).Error; err != nil {
return nil, err
}
return aorSubmissions, nil
}
func (env *AORSubmissions) GetBySubmission(ctx context.Context, submissionID int64) ([]model.AORSubmission, error) {
var aorSubmissions []model.AORSubmission
if err := env.db.Where("submission_id = ?", submissionID).Order("added_at DESC").Find(&aorSubmissions).Error; err != nil {
return nil, err
}
return aorSubmissions, nil
}
func (env *AORSubmissions) Create(ctx context.Context, aorSubmission model.AORSubmission) (model.AORSubmission, error) {
if err := env.db.Create(&aorSubmission).Error; err != nil {
return aorSubmission, err
}
return aorSubmission, nil
}
func (env *AORSubmissions) Delete(ctx context.Context, id int64) error {
if err := env.db.Delete(&model.AORSubmission{}, id).Error; err != nil {
if err == gorm.ErrRecordNotFound {
return datastore.ErrNotExist
}
return err
}
return nil
}
func (env *AORSubmissions) ListWithSubmissions(ctx context.Context, eventID int64) ([]model.Submission, error) {
var submissions []model.Submission
if err := env.db.
Joins("JOIN aor_submissions ON aor_submissions.submission_id = submissions.id").
Where("aor_submissions.aor_event_id = ?", eventID).
Order("aor_submissions.added_at DESC").
Find(&submissions).Error; err != nil {
return nil, err
}
return submissions, nil
}

View File

@@ -31,14 +31,11 @@ func New(ctx *cli.Context) (datastore.Datastore, error) {
if ctx.Bool("migrate") {
if err := db.AutoMigrate(
&model.AOREvent{},
&model.AORSubmission{},
&model.AuditEvent{},
&model.Map{},
&model.Mapfix{},
&model.Operation{},
&model.Submission{},
&model.SubmissionReview{},
&model.Script{},
&model.ScriptPolicy{},
); err != nil {

View File

@@ -9,14 +9,6 @@ type Gormstore struct {
db *gorm.DB
}
func (g Gormstore) AOREvents() datastore.AOREvents {
return &AOREvents{db: g.db}
}
func (g Gormstore) AORSubmissions() datastore.AORSubmissions {
return &AORSubmissions{db: g.db}
}
func (g Gormstore) AuditEvents() datastore.AuditEvents {
return &AuditEvents{db: g.db}
}
@@ -37,10 +29,6 @@ func (g Gormstore) Submissions() datastore.Submissions {
return &Submissions{db: g.db}
}
func (g Gormstore) SubmissionReviews() datastore.SubmissionReviews {
return &SubmissionReviews{db: g.db}
}
func (g Gormstore) Scripts() datastore.Scripts {
return &Scripts{db: g.db}
}

View File

@@ -1,83 +0,0 @@
package gormstore
import (
"context"
"errors"
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
"git.itzana.me/strafesnet/maps-service/pkg/model"
"gorm.io/gorm"
)
type SubmissionReviews struct {
db *gorm.DB
}
func (env *SubmissionReviews) Get(ctx context.Context, id int64) (model.SubmissionReview, error) {
var review model.SubmissionReview
if err := env.db.First(&review, id).Error; err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) {
return review, datastore.ErrNotExist
}
return review, err
}
return review, nil
}
func (env *SubmissionReviews) GetBySubmissionAndReviewer(ctx context.Context, submissionID int64, reviewerID uint64) (model.SubmissionReview, error) {
var review model.SubmissionReview
if err := env.db.Where("submission_id = ? AND reviewer_id = ?", submissionID, reviewerID).First(&review).Error; err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) {
return review, datastore.ErrNotExist
}
return review, err
}
return review, nil
}
func (env *SubmissionReviews) Create(ctx context.Context, review model.SubmissionReview) (model.SubmissionReview, error) {
if err := env.db.Create(&review).Error; err != nil {
return review, err
}
return review, nil
}
func (env *SubmissionReviews) Update(ctx context.Context, id int64, values datastore.OptionalMap) error {
if err := env.db.Model(&model.SubmissionReview{}).Where("id = ?", id).Updates(values.Map()).Error; err != nil {
if err == gorm.ErrRecordNotFound {
return datastore.ErrNotExist
}
return err
}
return nil
}
func (env *SubmissionReviews) Delete(ctx context.Context, id int64) error {
if err := env.db.Delete(&model.SubmissionReview{}, id).Error; err != nil {
if err == gorm.ErrRecordNotFound {
return datastore.ErrNotExist
}
return err
}
return nil
}
func (env *SubmissionReviews) ListBySubmission(ctx context.Context, submissionID int64) ([]model.SubmissionReview, error) {
var reviews []model.SubmissionReview
if err := env.db.Where("submission_id = ?", submissionID).Order("created_at DESC").Find(&reviews).Error; err != nil {
return nil, err
}
return reviews, nil
}
func (env *SubmissionReviews) MarkOutdatedBySubmission(ctx context.Context, submissionID int64) error {
if err := env.db.Model(&model.SubmissionReview{}).Where("submission_id = ?", submissionID).Update("outdated", true).Error; err != nil {
return err
}
return nil
}

View File

@@ -1,37 +0,0 @@
package model
import "time"
type AOREventStatus int32
const (
AOREventStatusScheduled AOREventStatus = 0 // Event scheduled, waiting for start
AOREventStatusOpen AOREventStatus = 1 // Event started, accepting submissions (1st of month)
AOREventStatusFrozen AOREventStatus = 2 // Submissions frozen (after 1st of month)
AOREventStatusSelected AOREventStatus = 3 // Submissions selected for AOR (after week 1)
AOREventStatusCompleted AOREventStatus = 4 // Decisions finalized (end of month)
AOREventStatusClosed AOREventStatus = 5 // Event closed/archived
)
// AOREvent represents an Accept or Reject event cycle
// AOR events occur every 4 months (April, August, December)
type AOREvent struct {
ID int64 `gorm:"primaryKey"`
StartDate time.Time `gorm:"index"` // 1st day of AOR month
FreezeDate time.Time // End of 1st day (23:59:59)
SelectionDate time.Time // End of week 1 (7 days after start)
DecisionDate time.Time // End of month (when final decisions are made)
Status AOREventStatus
CreatedAt time.Time
UpdatedAt time.Time
}
// AORSubmission represents a submission that was added to an AOR event
type AORSubmission struct {
ID int64 `gorm:"primaryKey"`
AOREventID int64 `gorm:"index"`
SubmissionID int64 `gorm:"index"`
AddedAt time.Time
CreatedAt time.Time
UpdatedAt time.Time
}

View File

@@ -18,12 +18,10 @@ const (
MapfixStatusValidating MapfixStatus = 5
MapfixStatusValidated MapfixStatus = 6
MapfixStatusUploading MapfixStatus = 7
MapfixStatusUploaded MapfixStatus = 8 // uploaded to the group, but pending release
MapfixStatusReleasing MapfixStatus = 11
// Phase: Final MapfixStatus
MapfixStatusUploaded MapfixStatus = 8 // uploaded to the group, but pending release
MapfixStatusRejected MapfixStatus = 9
MapfixStatusReleased MapfixStatus = 10
)
type Mapfix struct {

View File

@@ -65,29 +65,3 @@ type UploadMapfixRequest struct {
ModelVersion uint64
TargetAssetID uint64
}
type ReleaseSubmissionRequest struct {
// Release schedule
SubmissionID int64
ReleaseDate int64
// Model download info
ModelID uint64
ModelVersion uint64
// MapCreate
UploadedAssetID uint64
DisplayName string
Creator string
GameID uint32
Submitter uint64
}
type BatchReleaseSubmissionsRequest struct {
Submissions []ReleaseSubmissionRequest
OperationID int32
}
type ReleaseMapfixRequest struct {
MapfixID int64
ModelID uint64
ModelVersion uint64
TargetAssetID uint64
}

View File

@@ -17,7 +17,7 @@ type ScriptPolicy struct {
// Hash of the source code that leads to this policy.
// If this is a replacement mapping, the original source may not be pointed to by any policy.
// The original source should still exist in the scripts table, which can be located by the same hash.
FromScriptHash int64 `gorm:"uniqueIndex"` // postgres does not support unsigned integers, so we have to pretend
FromScriptHash int64 // postgres does not support unsigned integers, so we have to pretend
// The ID of the replacement source (ScriptPolicyReplace)
// or verbatim source (ScriptPolicyAllowed)
// or 0 (other)

View File

@@ -26,7 +26,7 @@ func HashParse(hash string) (uint64, error){
type Script struct {
ID int64 `gorm:"primaryKey"`
Name string
Hash int64 `gorm:"uniqueIndex"` // postgres does not support unsigned integers, so we have to pretend
Hash int64 // postgres does not support unsigned integers, so we have to pretend
Source string
ResourceType ResourceType // is this a submission or is it a mapfix
ResourceID int64 // which submission / mapfix did this script first appear in

View File

@@ -1,14 +0,0 @@
package model
import "time"
type SubmissionReview struct {
ID int64 `gorm:"primaryKey"`
SubmissionID int64 `gorm:"index"`
ReviewerID uint64
Recommend bool
Description string
Outdated bool
CreatedAt time.Time
UpdatedAt time.Time
}

View File

@@ -1,47 +0,0 @@
package dto
import (
"git.itzana.me/strafesnet/go-grpc/maps_extended"
"time"
)
type MapFilter struct {
GameID *uint32 `json:"game_id" form:"game_id"`
} // @name MapFilter
type Map struct {
ID int64 `json:"id"`
DisplayName string `json:"display_name"`
Creator string `json:"creator"`
GameID uint32 `json:"game_id"`
Date time.Time `json:"date"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
Submitter uint64 `json:"submitter"`
Thumbnail uint64 `json:"thumbnail"`
AssetVersion uint64 `json:"asset_version"`
LoadCount uint32 `json:"load_count"`
Modes uint32 `json:"modes"`
} // @name Map
// FromGRPC converts a maps.MapResponse protobuf message to a Map domain object
func (m *Map) FromGRPC(resp *maps_extended.MapResponse) *Map {
if resp == nil {
return nil
}
m.ID = resp.ID
m.DisplayName = resp.DisplayName
m.Creator = resp.Creator
m.Date = time.Unix(resp.Date, 0)
m.GameID = resp.GameID
m.CreatedAt = time.Unix(resp.CreatedAt, 0)
m.UpdatedAt = time.Unix(resp.UpdatedAt, 0)
m.Submitter = resp.Submitter
m.Thumbnail = resp.Thumbnail
m.AssetVersion = resp.AssetVersion
m.LoadCount = resp.LoadCount
m.Modes = resp.Modes
return m
}

View File

@@ -1,52 +0,0 @@
package dto
// @Description Generic response
type Response[T any] struct {
// Data contains the actual response payload
Data T `json:"data"`
} // @name Response
type PagedTotalResponse[T any] struct {
// Data contains the actual response payload
Data []T `json:"data"`
// Pagination contains information about paging
Pagination PaginationWithTotal `json:"pagination"`
} // @name PagedTotalResponse
// PaginationWithTotal holds information about the current page, total items, etc.
type PaginationWithTotal struct {
// Current page number
Page int `json:"page"`
// Number of items per page
PageSize int `json:"page_size"`
// Total number of items across all pages
TotalItems int `json:"total_items"`
// Total number of pages
TotalPages int `json:"total_pages"`
} // @name PaginationWithTotal
type PagedResponse[T any] struct {
// Data contains the actual response payload
Data []T `json:"data"`
// Pagination contains information about paging
Pagination Pagination `json:"pagination"`
} // @name PagedResponse
// Pagination holds information about the current page.
type Pagination struct {
// Current page number
Page int `json:"page"`
// Number of items per page
PageSize int `json:"page_size"`
} // @name Pagination
// Error holds error responses
type Error struct {
Error string `json:"error"`
} // @name Error

View File

@@ -1,98 +0,0 @@
package handlers
import (
"fmt"
"github.com/gin-gonic/gin"
"google.golang.org/grpc"
"strconv"
)
const (
ErrMsgDataClient = "data client is required"
)
// Handler is a base handler that provides common functionality for all HTTP handlers.
type Handler struct {
mapsClient *grpc.ClientConn
}
// HandlerOption defines a functional option for configuring a Handler
type HandlerOption func(*Handler)
// WithMapsClient sets the data client for the Handler
func WithMapsClient(mapsClient *grpc.ClientConn) HandlerOption {
return func(h *Handler) {
h.mapsClient = mapsClient
}
}
// NewHandler creates a new Handler with the provided options.
// It requires both a datastore and an authentication service to function properly.
func NewHandler(options ...HandlerOption) (*Handler, error) {
handler := &Handler{}
// Apply all provided options
for _, option := range options {
option(handler)
}
// Validate required dependencies
if err := handler.validateDependencies(); err != nil {
return nil, err
}
return handler, nil
}
// validateDependencies ensures all required dependencies are properly set
func (h *Handler) validateDependencies() error {
if h.mapsClient == nil {
return fmt.Errorf(ErrMsgDataClient)
}
return nil
}
// validateRange ensures a value is within the specified range, returning defaultValue if outside
func validateRange(value, min, max, defaultValue int) int {
if value < min {
return defaultValue
}
if value > max {
return max
}
return value
}
// validateMin ensures a value is at least the minimum, returning defaultValue if below
func validateMin(value, min, defaultValue int) int {
if value < min {
return defaultValue
}
return value
}
// getPagination extracts pagination parameters from query string.
// It applies validation rules to ensure parameters are within acceptable ranges.
func getPagination(ctx *gin.Context, defaultPageSize, minPageSize, maxPageSize int) (pageSize, pageNumber int) {
// Get page size from query string, parse to integer
pageSizeStr := ctx.Query("page_size")
if pageSizeStr != "" {
pageSize, _ = strconv.Atoi(pageSizeStr)
} else {
pageSize = defaultPageSize
}
// Get page number from query string, parse to integer
pageNumberStr := ctx.Query("page_number")
if pageNumberStr != "" {
pageNumber, _ = strconv.Atoi(pageNumberStr)
} else {
pageNumber = 1 // Default to first page
}
// Apply validation rules
pageSize = validateRange(pageSize, minPageSize, maxPageSize, defaultPageSize)
pageNumber = validateMin(pageNumber, 1, 1)
return pageSize, pageNumber
}

View File

@@ -1,153 +0,0 @@
package handlers
import (
"git.itzana.me/strafesnet/go-grpc/maps_extended"
"git.itzana.me/strafesnet/maps-service/pkg/public_api/dto"
"github.com/gin-gonic/gin"
log "github.com/sirupsen/logrus"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
"net/http"
"strconv"
)
// MapHandler handles HTTP requests related to maps.
type MapHandler struct {
*Handler
}
// NewMapHandler creates a new MapHandler with the provided options.
func NewMapHandler(options ...HandlerOption) (*MapHandler, error) {
baseHandler, err := NewHandler(options...)
if err != nil {
return nil, err
}
return &MapHandler{
Handler: baseHandler,
}, nil
}
// @Summary Get map by ID
// @Description Get a specific map by its ID
// @Tags maps
// @Produce json
// @Security ApiKeyAuth
// @Param id path int true "Map ID"
// @Success 200 {object} dto.Response[dto.Map]
// @Failure 404 {object} dto.Error "Map not found"
// @Failure default {object} dto.Error "General error response"
// @Router /map/{id} [get]
func (h *MapHandler) Get(ctx *gin.Context) {
// Extract map ID from path parameter
id := ctx.Param("id")
mapID, err := strconv.ParseInt(id, 10, 64)
if err != nil {
ctx.JSON(http.StatusBadRequest, dto.Error{
Error: "Invalid map ID format",
})
return
}
// Call the gRPC service
mapData, err := maps_extended.NewMapsServiceClient(h.mapsClient).Get(ctx, &maps_extended.MapId{
ID: mapID,
})
if err != nil {
statusCode := http.StatusInternalServerError
errorMessage := "Failed to get map"
// Check if it's a "not found" error
if status.Code(err) == codes.NotFound {
statusCode = http.StatusNotFound
errorMessage = "Map not found"
}
ctx.JSON(statusCode, dto.Error{
Error: errorMessage,
})
log.WithError(err).Error(
"Failed to get map",
)
return
}
// Convert gRPC MapResponse object to dto.Map object
var mapDto dto.Map
result := mapDto.FromGRPC(mapData)
// Return the map data
ctx.JSON(http.StatusOK, dto.Response[dto.Map]{
Data: *result,
})
}
// @Summary List maps
// @Description Get a list of maps
// @Tags maps
// @Produce json
// @Security ApiKeyAuth
// @Param page_size query int false "Page size (max 100)" default(10) minimum(1) maximum(100)
// @Param page_number query int false "Page number" default(1) minimum(1)
// @Param filter query dto.MapFilter false "Map filter parameters"
// @Success 200 {object} dto.PagedResponse[dto.Map]
// @Failure default {object} dto.Error "General error response"
// @Router /map [get]
func (h *MapHandler) List(ctx *gin.Context) {
// Extract and constrain pagination parameters
query := struct {
PageSize int `form:"page_size,default=10" binding:"min=1,max=100"`
PageNumber int `form:"page_number,default=1" binding:"min=1"`
SortBy int `form:"sort_by,default=0" binding:"min=0,max=3"`
}{}
if err := ctx.ShouldBindQuery(&query); err != nil {
ctx.JSON(http.StatusBadRequest, dto.Error{
Error: err.Error(),
})
return
}
// Get list filter
var filter dto.MapFilter
if err := ctx.ShouldBindQuery(&filter); err != nil {
ctx.JSON(http.StatusBadRequest, dto.Error{
Error: err.Error(),
})
return
}
// Call the gRPC service
mapList, err := maps_extended.NewMapsServiceClient(h.mapsClient).List(ctx, &maps_extended.ListRequest{
Filter: &maps_extended.MapFilter{
GameID: filter.GameID,
},
Page: &maps_extended.Pagination{
Size: uint32(query.PageSize),
Number: uint32(query.PageNumber),
},
})
if err != nil {
ctx.JSON(http.StatusInternalServerError, dto.Error{
Error: "Failed to list maps",
})
log.WithError(err).Error(
"Failed to list maps",
)
return
}
// Convert gRPC MapResponse objects to dto.Map objects
dtoMaps := make([]dto.Map, len(mapList.Maps))
for i, m := range mapList.Maps {
var mapDto dto.Map
dtoMaps[i] = *mapDto.FromGRPC(m)
}
// Return the paged response
ctx.JSON(http.StatusOK, dto.PagedResponse[dto.Map]{
Data: dtoMaps,
Pagination: dto.Pagination{
Page: query.PageNumber,
PageSize: query.PageSize,
},
})
}

View File

@@ -1,159 +0,0 @@
package api
import (
"context"
"errors"
"fmt"
"git.itzana.me/StrafesNET/dev-service/pkg/api/middleware"
"git.itzana.me/strafesnet/maps-service/docs"
"git.itzana.me/strafesnet/maps-service/pkg/public_api/handlers"
"github.com/gin-gonic/gin"
log "github.com/sirupsen/logrus"
swaggerfiles "github.com/swaggo/files"
ginSwagger "github.com/swaggo/gin-swagger"
"github.com/urfave/cli/v2"
"google.golang.org/grpc"
"net/http"
"time"
)
// Option defines a function that configures a Router
type Option func(*RouterConfig)
// RouterConfig holds all router configuration
type RouterConfig struct {
port int
devClient *grpc.ClientConn
mapsClient *grpc.ClientConn
context *cli.Context
shutdownTimeout time.Duration
}
// WithPort sets the port for the server£
func WithPort(port int) Option {
return func(cfg *RouterConfig) {
cfg.port = port
}
}
// WithContext sets the context for the server
func WithContext(ctx *cli.Context) Option {
return func(cfg *RouterConfig) {
cfg.context = ctx
}
}
// WithDevClient sets the dev gRPC client
func WithDevClient(conn *grpc.ClientConn) Option {
return func(cfg *RouterConfig) {
cfg.devClient = conn
}
}
// WithMapsClient sets the data gRPC client
func WithMapsClient(conn *grpc.ClientConn) Option {
return func(cfg *RouterConfig) {
cfg.mapsClient = conn
}
}
// WithShutdownTimeout sets the graceful shutdown timeout
func WithShutdownTimeout(timeout time.Duration) Option {
return func(cfg *RouterConfig) {
cfg.shutdownTimeout = timeout
}
}
func setupRoutes(cfg *RouterConfig) (*gin.Engine, error) {
r := gin.Default()
r.ForwardedByClientIP = true
r.Use(gin.Logger())
r.Use(gin.Recovery())
handlerOptions := []handlers.HandlerOption{
handlers.WithMapsClient(cfg.mapsClient),
}
// Maps handler
mapsHandler, err := handlers.NewMapHandler(handlerOptions...)
if err != nil {
return nil, err
}
docs.SwaggerInfo.BasePath = "/public-api/v1"
public_api := r.Group("/public-api")
{
v1 := public_api.Group("/v1")
{
// Auth middleware
v1.Use(middleware.ValidateRequest("Maps", "Read", cfg.devClient))
// Maps
v1.GET("/map", mapsHandler.List)
v1.GET("/map/:id", mapsHandler.Get)
}
// Docs
public_api.GET("/docs/*any", ginSwagger.WrapHandler(swaggerfiles.Handler))
public_api.GET("/", func(ctx *gin.Context) {
ctx.Redirect(http.StatusPermanentRedirect, "/public-api/docs/index.html")
})
}
return r, nil
}
// NewRouter creates a new router with the given options
func NewRouter(options ...Option) error {
// Default configuration
cfg := &RouterConfig{
port: 8080, // Default port
context: nil,
shutdownTimeout: 5 * time.Second,
}
// Apply options
for _, option := range options {
option(cfg)
}
// Validate configuration
if cfg.context == nil {
return errors.New("context is required")
}
if cfg.devClient == nil {
return errors.New("dev client is required")
}
routes, err := setupRoutes(cfg)
if err != nil {
return err
}
log.Info("Starting server")
return runServer(cfg.context.Context, fmt.Sprint(":", cfg.port), routes, cfg.shutdownTimeout)
}
func runServer(ctx context.Context, addr string, r *gin.Engine, shutdownTimeout time.Duration) error {
srv := &http.Server{
Addr: addr,
Handler: r,
}
// Run the server in a separate goroutine
go func() {
if err := srv.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) {
log.WithError(err).Fatal("web server exit")
}
}()
// Wait for a shutdown signal
<-ctx.Done()
// Shutdown server gracefully
ctxShutdown, cancel := context.WithTimeout(context.Background(), shutdownTimeout)
defer cancel()
return srv.Shutdown(ctxShutdown)
}

View File

@@ -1,160 +0,0 @@
package roblox
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
)
// ThumbnailSize represents valid Roblox thumbnail sizes
type ThumbnailSize string
const (
Size150x150 ThumbnailSize = "150x150"
Size420x420 ThumbnailSize = "420x420"
Size768x432 ThumbnailSize = "768x432"
)
// ThumbnailFormat represents the image format
type ThumbnailFormat string
const (
FormatPng ThumbnailFormat = "Png"
FormatJpeg ThumbnailFormat = "Jpeg"
)
// ThumbnailRequest represents a single thumbnail request
type ThumbnailRequest struct {
RequestID string `json:"requestId,omitempty"`
Type string `json:"type"`
TargetID uint64 `json:"targetId"`
Size string `json:"size,omitempty"`
Format string `json:"format,omitempty"`
}
// ThumbnailData represents a single thumbnail response
type ThumbnailData struct {
TargetID uint64 `json:"targetId"`
State string `json:"state"` // "Completed", "Error", "Pending"
ImageURL string `json:"imageUrl"`
}
// BatchThumbnailsResponse represents the API response
type BatchThumbnailsResponse struct {
Data []ThumbnailData `json:"data"`
}
// GetAssetThumbnails fetches thumbnails for multiple assets in a single batch request
// Roblox allows up to 100 assets per batch
func (c *Client) GetAssetThumbnails(assetIDs []uint64, size ThumbnailSize, format ThumbnailFormat) ([]ThumbnailData, error) {
if len(assetIDs) == 0 {
return []ThumbnailData{}, nil
}
if len(assetIDs) > 100 {
return nil, GetError("batch size cannot exceed 100 assets")
}
// Build request payload - the API expects an array directly, not wrapped in an object
requests := make([]ThumbnailRequest, len(assetIDs))
for i, assetID := range assetIDs {
requests[i] = ThumbnailRequest{
Type: "Asset",
TargetID: assetID,
Size: string(size),
Format: string(format),
}
}
jsonData, err := json.Marshal(requests)
if err != nil {
return nil, GetError("JSONMarshalError: " + err.Error())
}
req, err := http.NewRequest("POST", "https://thumbnails.roblox.com/v1/batch", bytes.NewBuffer(jsonData))
if err != nil {
return nil, GetError("RequestCreationError: " + err.Error())
}
req.Header.Set("Content-Type", "application/json")
resp, err := c.HttpClient.Do(req)
if err != nil {
return nil, GetError("RequestError: " + err.Error())
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return nil, GetError(fmt.Sprintf("ResponseError: status code %d, body: %s", resp.StatusCode, string(body)))
}
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, GetError("ReadBodyError: " + err.Error())
}
var response BatchThumbnailsResponse
if err := json.Unmarshal(body, &response); err != nil {
return nil, GetError("JSONUnmarshalError: " + err.Error())
}
return response.Data, nil
}
// GetUserAvatarThumbnails fetches avatar thumbnails for multiple users in a single batch request
func (c *Client) GetUserAvatarThumbnails(userIDs []uint64, size ThumbnailSize, format ThumbnailFormat) ([]ThumbnailData, error) {
if len(userIDs) == 0 {
return []ThumbnailData{}, nil
}
if len(userIDs) > 100 {
return nil, GetError("batch size cannot exceed 100 users")
}
// Build request payload - the API expects an array directly, not wrapped in an object
requests := make([]ThumbnailRequest, len(userIDs))
for i, userID := range userIDs {
requests[i] = ThumbnailRequest{
Type: "AvatarHeadShot",
TargetID: userID,
Size: string(size),
Format: string(format),
}
}
jsonData, err := json.Marshal(requests)
if err != nil {
return nil, GetError("JSONMarshalError: " + err.Error())
}
req, err := http.NewRequest("POST", "https://thumbnails.roblox.com/v1/batch", bytes.NewBuffer(jsonData))
if err != nil {
return nil, GetError("RequestCreationError: " + err.Error())
}
req.Header.Set("Content-Type", "application/json")
resp, err := c.HttpClient.Do(req)
if err != nil {
return nil, GetError("RequestError: " + err.Error())
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return nil, GetError(fmt.Sprintf("ResponseError: status code %d, body: %s", resp.StatusCode, string(body)))
}
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, GetError("ReadBodyError: " + err.Error())
}
var response BatchThumbnailsResponse
if err := json.Unmarshal(body, &response); err != nil {
return nil, GetError("JSONUnmarshalError: " + err.Error())
}
return response.Data, nil
}

View File

@@ -1,72 +0,0 @@
package roblox
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
)
// UserData represents a single user's information
type UserData struct {
ID uint64 `json:"id"`
Name string `json:"name"`
DisplayName string `json:"displayName"`
}
// BatchUsersResponse represents the API response for batch user requests
type BatchUsersResponse struct {
Data []UserData `json:"data"`
}
// GetUsernames fetches usernames for multiple users in a single batch request
// Roblox allows up to 100 users per batch
func (c *Client) GetUsernames(userIDs []uint64) ([]UserData, error) {
if len(userIDs) == 0 {
return []UserData{}, nil
}
if len(userIDs) > 100 {
return nil, GetError("batch size cannot exceed 100 users")
}
// Build request payload
payload := map[string][]uint64{
"userIds": userIDs,
}
jsonData, err := json.Marshal(payload)
if err != nil {
return nil, GetError("JSONMarshalError: " + err.Error())
}
req, err := http.NewRequest("POST", "https://users.roblox.com/v1/users", bytes.NewBuffer(jsonData))
if err != nil {
return nil, GetError("RequestCreationError: " + err.Error())
}
req.Header.Set("Content-Type", "application/json")
resp, err := c.HttpClient.Do(req)
if err != nil {
return nil, GetError("RequestError: " + err.Error())
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return nil, GetError(fmt.Sprintf("ResponseError: status code %d, body: %s", resp.StatusCode, string(body)))
}
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, GetError("ReadBodyError: " + err.Error())
}
var response BatchUsersResponse
if err := json.Unmarshal(body, &response); err != nil {
return nil, GetError("JSONUnmarshalError: " + err.Error())
}
return response.Data, nil
}

View File

@@ -1,30 +0,0 @@
package service
import (
"context"
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
"git.itzana.me/strafesnet/maps-service/pkg/model"
)
// AOR Event service methods
func (svc *Service) GetAOREvent(ctx context.Context, id int64) (model.AOREvent, error) {
return svc.db.AOREvents().Get(ctx, id)
}
func (svc *Service) GetActiveAOREvent(ctx context.Context) (model.AOREvent, error) {
return svc.db.AOREvents().GetActive(ctx)
}
func (svc *Service) ListAOREvents(ctx context.Context, page model.Page) ([]model.AOREvent, error) {
return svc.db.AOREvents().List(ctx, datastore.Optional(), page)
}
func (svc *Service) GetAORSubmissionsByEvent(ctx context.Context, eventID int64) ([]model.Submission, error) {
return svc.db.AORSubmissions().ListWithSubmissions(ctx, eventID)
}
func (svc *Service) GetAORSubmissionsBySubmission(ctx context.Context, submissionID int64) ([]model.AORSubmission, error) {
return svc.db.AORSubmissions().GetBySubmission(ctx, submissionID)
}

View File

@@ -1,389 +0,0 @@
package service
import (
"context"
"time"
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
"git.itzana.me/strafesnet/maps-service/pkg/model"
log "github.com/sirupsen/logrus"
)
// AORScheduler manages AOR events and their lifecycle
type AORScheduler struct {
ds datastore.Datastore
ctx context.Context
}
// NewAORScheduler creates a new AOR scheduler
func NewAORScheduler(ds datastore.Datastore) *AORScheduler {
return &AORScheduler{
ds: ds,
ctx: context.Background(),
}
}
// ProcessAOREvents is the main entry point for the cron job
// It checks and updates AOR event statuses
func (s *AORScheduler) ProcessAOREvents() error {
log.Info("AOR Scheduler: Processing events")
// Initialize: create next AOR event if none exists
if err := s.ensureNextAOREvent(); err != nil {
log.WithError(err).Error("Failed to ensure next AOR event")
return err
}
// Process current active event
if err := s.processAOREvents(); err != nil {
log.WithError(err).Error("Failed to process AOR events")
return err
}
log.Info("AOR Scheduler: Processing completed successfully")
return nil
}
// ensureNextAOREvent creates the next AOR event if one doesn't exist
func (s *AORScheduler) ensureNextAOREvent() error {
// Check if there's an active or scheduled event
_, err := s.ds.AOREvents().GetActive(s.ctx)
if err == nil {
// Event exists, nothing to do
return nil
}
if err != datastore.ErrNotExist {
return err
}
// No active event, create the next one
nextDate := s.calculateNextAORDate(time.Now())
return s.createAOREvent(nextDate)
}
// calculateNextAORDate calculates the next AOR start date
// AOR events are held every 4 months: April, August, December
func (s *AORScheduler) calculateNextAORDate(from time.Time) time.Time {
aorMonths := []time.Month{time.April, time.August, time.December}
currentYear := from.Year()
currentMonth := from.Month()
// Find the next AOR month
for _, month := range aorMonths {
if month > currentMonth {
// Next AOR is this year
return time.Date(currentYear, month, 1, 0, 0, 0, 0, time.UTC)
}
}
// Next AOR is in April of next year
return time.Date(currentYear+1, time.April, 1, 0, 0, 0, 0, time.UTC)
}
// createAOREvent creates a new AOR event with calculated dates
func (s *AORScheduler) createAOREvent(startDate time.Time) error {
freezeDate := startDate.Add(24*time.Hour - time.Second) // End of first day (23:59:59)
selectionDate := startDate.Add(7 * 24 * time.Hour) // 7 days after start
// Decision date is the last day of the month at 23:59:59
// Calculate the first day of next month, then subtract 1 second
year, month, _ := startDate.Date()
firstOfNextMonth := time.Date(year, month+1, 1, 0, 0, 0, 0, time.UTC)
decisionDate := firstOfNextMonth.Add(-time.Second)
event := model.AOREvent{
StartDate: startDate,
FreezeDate: freezeDate,
SelectionDate: selectionDate,
DecisionDate: decisionDate,
Status: model.AOREventStatusScheduled,
}
_, err := s.ds.AOREvents().Create(s.ctx, event)
if err != nil {
return err
}
log.WithFields(log.Fields{
"start_date": startDate,
"freeze_date": freezeDate,
"selection_date": selectionDate,
"decision_date": decisionDate,
}).Info("Created new AOR event")
return nil
}
// processAOREvents checks and updates AOR event statuses
func (s *AORScheduler) processAOREvents() error {
now := time.Now()
// Get active event
event, err := s.ds.AOREvents().GetActive(s.ctx)
if err == datastore.ErrNotExist {
// No active event, ensure one is created
return s.ensureNextAOREvent()
}
if err != nil {
return err
}
// Process event based on current status and dates
switch event.Status {
case model.AOREventStatusScheduled:
// Check if event should start (it's now the 1st of the AOR month)
if now.After(event.StartDate) || now.Equal(event.StartDate) {
if err := s.openAOREvent(event.ID); err != nil {
return err
}
}
case model.AOREventStatusOpen:
// Check if submissions should be frozen (past the freeze date)
if now.After(event.FreezeDate) {
if err := s.freezeAOREvent(event.ID); err != nil {
return err
}
}
case model.AOREventStatusFrozen:
// Check if it's time to select submissions (past selection date)
if now.After(event.SelectionDate) {
if err := s.selectSubmissions(event.ID); err != nil {
return err
}
}
case model.AOREventStatusSelected:
// Check if it's time to finalize decisions (past decision date)
if now.After(event.DecisionDate) {
if err := s.finalizeDecisions(event.ID); err != nil {
return err
}
}
case model.AOREventStatusCompleted:
// Event completed, create next one and close this one
nextDate := s.calculateNextAORDate(event.StartDate)
if err := s.createAOREvent(nextDate); err != nil {
return err
}
if err := s.closeAOREvent(event.ID); err != nil {
return err
}
}
return nil
}
// openAOREvent transitions an event to Open status
func (s *AORScheduler) openAOREvent(eventID int64) error {
err := s.ds.AOREvents().Update(s.ctx, eventID, datastore.Optional().Add("status", model.AOREventStatusOpen))
if err != nil {
return err
}
log.WithField("event_id", eventID).Info("AOR event opened - submissions now accepted")
return nil
}
// freezeAOREvent transitions an event to Frozen status
// TODO: lock submission from updates
func (s *AORScheduler) freezeAOREvent(eventID int64) error {
err := s.ds.AOREvents().Update(s.ctx, eventID, datastore.Optional().Add("status", model.AOREventStatusFrozen))
if err != nil {
return err
}
log.WithField("event_id", eventID).Info("AOR event frozen - submissions locked")
return nil
}
// selectSubmissions automatically selects qualifying submissions
func (s *AORScheduler) selectSubmissions(eventID int64) error {
// Get all submissions in Submitted status
submissions, err := s.ds.Submissions().List(s.ctx, datastore.Optional().Add("status_id", model.SubmissionStatusSubmitted), model.Page{Number: 0, Size: 0}, datastore.ListSortDisabled)
if err != nil {
return err
}
selectedCount := 0
for _, submission := range submissions {
// Get all reviews for this submission
reviews, err := s.ds.SubmissionReviews().ListBySubmission(s.ctx, submission.ID)
if err != nil {
log.WithError(err).WithField("submission_id", submission.ID).Error("Failed to get reviews")
continue
}
// Apply selection criteria
if s.shouldAddToAOR(reviews) {
// Add to AOR event
aorSubmission := model.AORSubmission{
AOREventID: eventID,
SubmissionID: submission.ID,
AddedAt: time.Now(),
}
_, err := s.ds.AORSubmissions().Create(s.ctx, aorSubmission)
if err != nil {
log.WithError(err).WithField("submission_id", submission.ID).Error("Failed to add submission to AOR")
continue
}
selectedCount++
log.WithField("submission_id", submission.ID).Info("Added submission to AOR event")
}
}
// Mark event as selected (waiting for end of month to finalize)
err = s.ds.AOREvents().Update(s.ctx, eventID, datastore.Optional().Add("status", model.AOREventStatusSelected))
if err != nil {
return err
}
log.WithFields(log.Fields{
"event_id": eventID,
"selected_count": selectedCount,
}).Info("AOR submission selection completed - waiting for end of month to finalize decisions")
return nil
}
// shouldAddToAOR determines if a submission should be added to the AOR event
// Criteria:
// - If there are 0 reviews: NOT added
// - If there is 1+ review with recommend=true and not outdated: added
// - If majority (>=50%) of non-outdated reviews recommend: added
// TODO: Audit events
func (s *AORScheduler) shouldAddToAOR(reviews []model.SubmissionReview) bool {
// Filter out outdated reviews
var validReviews []model.SubmissionReview
for _, review := range reviews {
if !review.Outdated {
validReviews = append(validReviews, review)
}
}
// If there are 0 valid reviews, don't add
if len(validReviews) == 0 {
return false
}
// Count recommendations
recommendCount := 0
for _, review := range validReviews {
if review.Recommend {
recommendCount++
}
}
// Need at least 50% recommendations (2 accept + 2 deny = 50% = added)
// This means recommendCount * 2 >= len(validReviews)
return recommendCount*2 >= len(validReviews)
}
// shouldAccept determines if a submission should be accepted in final decisions
// Criteria: Must have >50% (strictly greater than) recommendations
func (s *AORScheduler) shouldAccept(reviews []model.SubmissionReview) bool {
// Filter out outdated reviews
var validReviews []model.SubmissionReview
for _, review := range reviews {
if !review.Outdated {
validReviews = append(validReviews, review)
}
}
// If there are 0 valid reviews, don't accept
if len(validReviews) == 0 {
return false
}
// Count recommendations
recommendCount := 0
for _, review := range validReviews {
if review.Recommend {
recommendCount++
}
}
// Need MORE than 50% recommendations (strictly greater)
// This means recommendCount * 2 > len(validReviews)
return recommendCount*2 > len(validReviews)
}
// finalizeDecisions makes final accept/reject decisions at end of month
// Submissions in the AOR event with >50% recommends are accepted
// Submissions in the AOR event with <=50% recommends are rejected
// TODO: Implement acceptance logic
// TODO: Query roblox group to get get min votes needed for acceptance
// TODO: Audit events
func (s *AORScheduler) finalizeDecisions(eventID int64) error {
// Get all submissions that were selected for this AOR event
aorSubmissions, err := s.ds.AORSubmissions().GetByAOREvent(s.ctx, eventID)
if err != nil {
return err
}
acceptedCount := 0
rejectedCount := 0
// Process each submission in the AOR event
for _, aorSub := range aorSubmissions {
// Get the submission
submission, err := s.ds.Submissions().Get(s.ctx, aorSub.SubmissionID)
if err != nil {
log.WithError(err).WithField("submission_id", aorSub.SubmissionID).Error("Failed to get submission")
continue
}
// Get all reviews for this submission
reviews, err := s.ds.SubmissionReviews().ListBySubmission(s.ctx, aorSub.SubmissionID)
if err != nil {
log.WithError(err).WithField("submission_id", aorSub.SubmissionID).Error("Failed to get reviews")
continue
}
// Check if submission has >50% recommends (strictly greater)
if s.shouldAccept(reviews) {
// This submission has >50% recommends - accept it
// TODO: Implement acceptance logic
// For now, this is a placeholder
log.WithField("submission_id", submission.ID).Info("TODO: Accept submission (placeholder)")
acceptedCount++
} else {
// This submission does not have >50% recommends - reject it
err := s.ds.Submissions().Update(s.ctx, submission.ID, datastore.Optional().Add("status_id", model.SubmissionStatusRejected))
if err != nil {
log.WithError(err).WithField("submission_id", submission.ID).Error("Failed to reject submission")
continue
}
log.WithField("submission_id", submission.ID).Info("Rejected submission")
rejectedCount++
}
}
// Mark event as completed
err = s.ds.AOREvents().Update(s.ctx, eventID, datastore.Optional().Add("status", model.AOREventStatusCompleted))
if err != nil {
return err
}
log.WithFields(log.Fields{
"event_id": eventID,
"accepted_count": acceptedCount,
"rejected_count": rejectedCount,
}).Info("AOR decisions finalized")
return nil
}
// closeAOREvent transitions an event to Closed status
func (s *AORScheduler) closeAOREvent(eventID int64) error {
err := s.ds.AOREvents().Update(s.ctx, eventID, datastore.Optional().Add("status", model.AOREventStatusClosed))
if err != nil {
return err
}
log.WithField("event_id", eventID).Info("AOR event closed")
return nil
}

View File

@@ -27,7 +27,7 @@ func (update MapUpdate) SetGameID(game_id uint32) {
datastore.OptionalMap(update).Add("game_id", game_id)
}
func (update MapUpdate) SetDate(date int64) {
datastore.OptionalMap(update).Add("date", time.Unix(date, 0))
datastore.OptionalMap(update).Add("date", date)
}
func (update MapUpdate) SetSubmitter(submitter uint64) {
datastore.OptionalMap(update).Add("submitter", submitter)
@@ -47,12 +47,16 @@ func (update MapUpdate) GetDisplayName() (string, bool) {
value, ok := datastore.OptionalMap(update).Map()["display_name"].(string)
return value, ok
}
func (update MapUpdate) GetCreator() (string, bool) {
value, ok := datastore.OptionalMap(update).Map()["creator"].(string)
return value, ok
}
func (update MapUpdate) GetGameID() (uint32, bool) {
value, ok := datastore.OptionalMap(update).Map()["game_id"].(uint32)
return value, ok
}
func (update MapUpdate) GetThumbnail() (uint64, bool) {
value, ok := datastore.OptionalMap(update).Map()["thumbnail"].(uint64)
func (update MapUpdate) GetDate() (int64, bool) {
value, ok := datastore.OptionalMap(update).Map()["date"].(int64)
return value, ok
}
@@ -77,6 +81,35 @@ func (update MapFilter) SetSubmitter(submitter uint64) {
datastore.OptionalMap(update).Add("submitter", submitter)
}
func (svc *Service) TEMP_DoMapsMigration(ctx context.Context) (error) {
// get all maps
maps, err := svc.maps.List(ctx, &maps.ListRequest{})
if err != nil {
return err
}
// create all maps
for _, item := range maps.Maps {
migrated := model.Map{
ID: item.ID,
DisplayName: item.DisplayName,
Creator: item.Creator,
GameID: uint32(item.GameID),
Date: time.Unix(item.Date, 0),
// CreatedAt: time.Time{},
// UpdatedAt: time.Time{},
// Submitter: 0,
// Thumbnail: 0,
// AssetVersion: 0,
// LoadCount: 0,
// Modes: 0,
}
_, err := svc.db.Maps().Create(ctx, migrated)
if err != nil {
return err
}
}
return nil
}
func (svc *Service) CreateMap(ctx context.Context, item model.Map) (int64, error) {
// 2 jobs:
// create map on maps-service
@@ -85,12 +118,14 @@ func (svc *Service) CreateMap(ctx context.Context, item model.Map) (int64, error
return 0, err
}
// create map on data-service
date := item.Date.Unix()
game_id := int32(item.GameID)
_, err = svc.maps.Create(ctx, &maps.MapRequest{
ID: item.ID,
DisplayName: &item.DisplayName,
Creator: &item.Creator,
GameID: &game_id,
Thumbnail: &item.Thumbnail,
Date: &date,
})
if err != nil {
return 0, err
@@ -131,12 +166,15 @@ func (svc *Service) UpdateMap(ctx context.Context, id int64, pmap MapUpdate) err
if display_name, ok := pmap.GetDisplayName(); ok {
update.DisplayName = &display_name
}
if creator, ok := pmap.GetCreator(); ok {
update.Creator = &creator
}
if game_id, ok := pmap.GetGameID(); ok {
game_id_int32 := int32(game_id)
update.GameID = &game_id_int32
}
if thumbnail, ok := pmap.GetThumbnail(); ok {
update.Thumbnail = &thumbnail
if date, ok := pmap.GetDate(); ok {
update.Date = &date
}
_, err = svc.maps.Update(ctx, &update)
if err != nil {

View File

@@ -112,29 +112,3 @@ func (svc *Service) NatsValidateMapfix(
return nil
}
func (svc *Service) NatsReleaseMapfix(
MapfixID int64,
ModelID uint64,
ModelVersion uint64,
TargetAssetID uint64,
) error {
release_fix_request := model.ReleaseMapfixRequest{
MapfixID: MapfixID,
ModelID: ModelID,
ModelVersion: ModelVersion,
TargetAssetID: TargetAssetID,
}
j, err := json.Marshal(release_fix_request)
if err != nil {
return err
}
_, err = svc.nats.Publish("maptest.mapfixes.release", []byte(j))
if err != nil {
return err
}
return nil
}

View File

@@ -88,28 +88,6 @@ func (svc *Service) NatsUploadSubmission(
return nil
}
func (svc *Service) NatsBatchReleaseSubmissions(
Submissions []model.ReleaseSubmissionRequest,
operation int32,
) error {
release_new_request := model.BatchReleaseSubmissionsRequest{
Submissions: Submissions,
OperationID: operation,
}
j, err := json.Marshal(release_new_request)
if err != nil {
return err
}
_, err = svc.nats.Publish("maptest.submissions.batchrelease", []byte(j))
if err != nil {
return err
}
return nil
}
func (svc *Service) NatsValidateSubmission(
SubmissionID int64,
ModelID uint64,

View File

@@ -1,22 +1,17 @@
package service
import (
"context"
"git.itzana.me/strafesnet/go-grpc/maps"
"git.itzana.me/strafesnet/go-grpc/users"
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
"git.itzana.me/strafesnet/maps-service/pkg/roblox"
"github.com/nats-io/nats.go"
"github.com/redis/go-redis/v9"
)
type Service struct {
db datastore.Datastore
nats nats.JetStreamContext
maps maps.MapsServiceClient
users users.UsersServiceClient
thumbnailService *ThumbnailService
db datastore.Datastore
nats nats.JetStreamContext
maps maps.MapsServiceClient
users users.UsersServiceClient
}
func NewService(
@@ -24,44 +19,11 @@ func NewService(
nats nats.JetStreamContext,
maps maps.MapsServiceClient,
users users.UsersServiceClient,
robloxClient *roblox.Client,
redisClient *redis.Client,
) Service {
return Service{
db: db,
nats: nats,
maps: maps,
users: users,
thumbnailService: NewThumbnailService(robloxClient, redisClient),
db: db,
nats: nats,
maps: maps,
users: users,
}
}
// GetAssetThumbnails proxies to the thumbnail service
func (s *Service) GetAssetThumbnails(ctx context.Context, assetIDs []uint64, size roblox.ThumbnailSize) (map[uint64]string, error) {
return s.thumbnailService.GetAssetThumbnails(ctx, assetIDs, size)
}
// GetUserAvatarThumbnails proxies to the thumbnail service
func (s *Service) GetUserAvatarThumbnails(ctx context.Context, userIDs []uint64, size roblox.ThumbnailSize) (map[uint64]string, error) {
return s.thumbnailService.GetUserAvatarThumbnails(ctx, userIDs, size)
}
// GetSingleAssetThumbnail proxies to the thumbnail service
func (s *Service) GetSingleAssetThumbnail(ctx context.Context, assetID uint64, size roblox.ThumbnailSize) (string, error) {
return s.thumbnailService.GetSingleAssetThumbnail(ctx, assetID, size)
}
// GetSingleUserAvatarThumbnail proxies to the thumbnail service
func (s *Service) GetSingleUserAvatarThumbnail(ctx context.Context, userID uint64, size roblox.ThumbnailSize) (string, error) {
return s.thumbnailService.GetSingleUserAvatarThumbnail(ctx, userID, size)
}
// GetUsernames proxies to the thumbnail service
func (s *Service) GetUsernames(ctx context.Context, userIDs []uint64) (map[uint64]string, error) {
return s.thumbnailService.GetUsernames(ctx, userIDs)
}
// GetSingleUsername proxies to the thumbnail service
func (s *Service) GetSingleUsername(ctx context.Context, userID uint64) (string, error) {
return s.thumbnailService.GetSingleUsername(ctx, userID)
}

View File

@@ -1,55 +0,0 @@
package service
import (
"context"
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
"git.itzana.me/strafesnet/maps-service/pkg/model"
)
type SubmissionReviewUpdate datastore.OptionalMap
func NewSubmissionReviewUpdate() SubmissionReviewUpdate {
update := datastore.Optional()
return SubmissionReviewUpdate(update)
}
func (update SubmissionReviewUpdate) SetRecommend(recommend bool) {
datastore.OptionalMap(update).Add("recommend", recommend)
}
func (update SubmissionReviewUpdate) SetDescription(description string) {
datastore.OptionalMap(update).Add("description", description)
}
func (update SubmissionReviewUpdate) SetOutdated(outdated bool) {
datastore.OptionalMap(update).Add("outdated", outdated)
}
func (svc *Service) CreateSubmissionReview(ctx context.Context, review model.SubmissionReview) (model.SubmissionReview, error) {
return svc.db.SubmissionReviews().Create(ctx, review)
}
func (svc *Service) GetSubmissionReview(ctx context.Context, id int64) (model.SubmissionReview, error) {
return svc.db.SubmissionReviews().Get(ctx, id)
}
func (svc *Service) GetSubmissionReviewBySubmissionAndReviewer(ctx context.Context, submissionID int64, reviewerID uint64) (model.SubmissionReview, error) {
return svc.db.SubmissionReviews().GetBySubmissionAndReviewer(ctx, submissionID, reviewerID)
}
func (svc *Service) UpdateSubmissionReview(ctx context.Context, id int64, update SubmissionReviewUpdate) error {
return svc.db.SubmissionReviews().Update(ctx, id, datastore.OptionalMap(update))
}
func (svc *Service) DeleteSubmissionReview(ctx context.Context, id int64) error {
return svc.db.SubmissionReviews().Delete(ctx, id)
}
func (svc *Service) ListSubmissionReviewsBySubmission(ctx context.Context, submissionID int64) ([]model.SubmissionReview, error) {
return svc.db.SubmissionReviews().ListBySubmission(ctx, submissionID)
}
func (svc *Service) MarkSubmissionReviewsOutdated(ctx context.Context, submissionID int64) error {
return svc.db.SubmissionReviews().MarkOutdatedBySubmission(ctx, submissionID)
}

View File

@@ -1,218 +0,0 @@
package service
import (
"context"
"encoding/json"
"fmt"
"time"
"git.itzana.me/strafesnet/maps-service/pkg/roblox"
"github.com/redis/go-redis/v9"
)
type ThumbnailService struct {
robloxClient *roblox.Client
redisClient *redis.Client
cacheTTL time.Duration
}
func NewThumbnailService(robloxClient *roblox.Client, redisClient *redis.Client) *ThumbnailService {
return &ThumbnailService{
robloxClient: robloxClient,
redisClient: redisClient,
cacheTTL: 24 * time.Hour, // Cache thumbnails for 24 hours
}
}
// CachedThumbnail represents a cached thumbnail entry
type CachedThumbnail struct {
ImageURL string `json:"imageUrl"`
State string `json:"state"`
CachedAt time.Time `json:"cachedAt"`
}
// GetAssetThumbnails fetches thumbnails with Redis caching and batching
func (s *ThumbnailService) GetAssetThumbnails(ctx context.Context, assetIDs []uint64, size roblox.ThumbnailSize) (map[uint64]string, error) {
if len(assetIDs) == 0 {
return map[uint64]string{}, nil
}
result := make(map[uint64]string)
var missingIDs []uint64
// Try to get from cache first
for _, assetID := range assetIDs {
cacheKey := fmt.Sprintf("thumbnail:asset:%d:%s", assetID, size)
cached, err := s.redisClient.Get(ctx, cacheKey).Result()
if err == redis.Nil {
// Cache miss
missingIDs = append(missingIDs, assetID)
} else if err != nil {
// Redis error - treat as cache miss
missingIDs = append(missingIDs, assetID)
} else {
// Cache hit
var thumbnail CachedThumbnail
if err := json.Unmarshal([]byte(cached), &thumbnail); err == nil && thumbnail.State == "Completed" {
result[assetID] = thumbnail.ImageURL
} else {
missingIDs = append(missingIDs, assetID)
}
}
}
// If all were cached, return early
if len(missingIDs) == 0 {
return result, nil
}
// Batch fetch missing thumbnails from Roblox API
// Split into batches of 100 (Roblox API limit)
for i := 0; i < len(missingIDs); i += 100 {
end := i + 100
if end > len(missingIDs) {
end = len(missingIDs)
}
batch := missingIDs[i:end]
thumbnails, err := s.robloxClient.GetAssetThumbnails(batch, size, roblox.FormatPng)
if err != nil {
return nil, fmt.Errorf("failed to fetch thumbnails: %w", err)
}
// Process results and cache them
for _, thumb := range thumbnails {
cached := CachedThumbnail{
ImageURL: thumb.ImageURL,
State: thumb.State,
CachedAt: time.Now(),
}
if thumb.State == "Completed" && thumb.ImageURL != "" {
result[thumb.TargetID] = thumb.ImageURL
}
// Cache the result (even if incomplete, to avoid repeated API calls)
cacheKey := fmt.Sprintf("thumbnail:asset:%d:%s", thumb.TargetID, size)
cachedJSON, _ := json.Marshal(cached)
// Use shorter TTL for incomplete thumbnails
ttl := s.cacheTTL
if thumb.State != "Completed" {
ttl = 5 * time.Minute
}
s.redisClient.Set(ctx, cacheKey, cachedJSON, ttl)
}
}
return result, nil
}
// GetUserAvatarThumbnails fetches user avatar thumbnails with Redis caching and batching
func (s *ThumbnailService) GetUserAvatarThumbnails(ctx context.Context, userIDs []uint64, size roblox.ThumbnailSize) (map[uint64]string, error) {
if len(userIDs) == 0 {
return map[uint64]string{}, nil
}
result := make(map[uint64]string)
var missingIDs []uint64
// Try to get from cache first
for _, userID := range userIDs {
cacheKey := fmt.Sprintf("thumbnail:user:%d:%s", userID, size)
cached, err := s.redisClient.Get(ctx, cacheKey).Result()
if err == redis.Nil {
// Cache miss
missingIDs = append(missingIDs, userID)
} else if err != nil {
// Redis error - treat as cache miss
missingIDs = append(missingIDs, userID)
} else {
// Cache hit
var thumbnail CachedThumbnail
if err := json.Unmarshal([]byte(cached), &thumbnail); err == nil && thumbnail.State == "Completed" {
result[userID] = thumbnail.ImageURL
} else {
missingIDs = append(missingIDs, userID)
}
}
}
// If all were cached, return early
if len(missingIDs) == 0 {
return result, nil
}
// Batch fetch missing thumbnails from Roblox API
// Split into batches of 100 (Roblox API limit)
for i := 0; i < len(missingIDs); i += 100 {
end := i + 100
if end > len(missingIDs) {
end = len(missingIDs)
}
batch := missingIDs[i:end]
thumbnails, err := s.robloxClient.GetUserAvatarThumbnails(batch, size, roblox.FormatPng)
if err != nil {
return nil, fmt.Errorf("failed to fetch user thumbnails: %w", err)
}
// Process results and cache them
for _, thumb := range thumbnails {
cached := CachedThumbnail{
ImageURL: thumb.ImageURL,
State: thumb.State,
CachedAt: time.Now(),
}
if thumb.State == "Completed" && thumb.ImageURL != "" {
result[thumb.TargetID] = thumb.ImageURL
}
// Cache the result
cacheKey := fmt.Sprintf("thumbnail:user:%d:%s", thumb.TargetID, size)
cachedJSON, _ := json.Marshal(cached)
// Use shorter TTL for incomplete thumbnails
ttl := s.cacheTTL
if thumb.State != "Completed" {
ttl = 5 * time.Minute
}
s.redisClient.Set(ctx, cacheKey, cachedJSON, ttl)
}
}
return result, nil
}
// GetSingleAssetThumbnail is a convenience method for fetching a single asset thumbnail
func (s *ThumbnailService) GetSingleAssetThumbnail(ctx context.Context, assetID uint64, size roblox.ThumbnailSize) (string, error) {
results, err := s.GetAssetThumbnails(ctx, []uint64{assetID}, size)
if err != nil {
return "", err
}
if url, ok := results[assetID]; ok {
return url, nil
}
return "", fmt.Errorf("thumbnail not available for asset %d", assetID)
}
// GetSingleUserAvatarThumbnail is a convenience method for fetching a single user avatar thumbnail
func (s *ThumbnailService) GetSingleUserAvatarThumbnail(ctx context.Context, userID uint64, size roblox.ThumbnailSize) (string, error) {
results, err := s.GetUserAvatarThumbnails(ctx, []uint64{userID}, size)
if err != nil {
return "", err
}
if url, ok := results[userID]; ok {
return url, nil
}
return "", fmt.Errorf("thumbnail not available for user %d", userID)
}

View File

@@ -1,108 +0,0 @@
package service
import (
"context"
"encoding/json"
"fmt"
"time"
"git.itzana.me/strafesnet/maps-service/pkg/roblox"
"github.com/redis/go-redis/v9"
)
// CachedUser represents a cached user entry
type CachedUser struct {
Name string `json:"name"`
DisplayName string `json:"displayName"`
CachedAt time.Time `json:"cachedAt"`
}
// GetUsernames fetches usernames with Redis caching and batching
func (s *ThumbnailService) GetUsernames(ctx context.Context, userIDs []uint64) (map[uint64]string, error) {
if len(userIDs) == 0 {
return map[uint64]string{}, nil
}
result := make(map[uint64]string)
var missingIDs []uint64
// Try to get from cache first
for _, userID := range userIDs {
cacheKey := fmt.Sprintf("user:name:%d", userID)
cached, err := s.redisClient.Get(ctx, cacheKey).Result()
if err == redis.Nil {
// Cache miss
missingIDs = append(missingIDs, userID)
} else if err != nil {
// Redis error - treat as cache miss
missingIDs = append(missingIDs, userID)
} else {
// Cache hit
var user CachedUser
if err := json.Unmarshal([]byte(cached), &user); err == nil && user.Name != "" {
result[userID] = user.Name
} else {
missingIDs = append(missingIDs, userID)
}
}
}
// If all were cached, return early
if len(missingIDs) == 0 {
return result, nil
}
// Batch fetch missing usernames from Roblox API
// Split into batches of 100 (Roblox API limit)
for i := 0; i < len(missingIDs); i += 100 {
end := i + 100
if end > len(missingIDs) {
end = len(missingIDs)
}
batch := missingIDs[i:end]
var users []roblox.UserData
var err error
users, err = s.robloxClient.GetUsernames(batch)
if err != nil {
return nil, fmt.Errorf("failed to fetch usernames: %w", err)
}
// Process results and cache them
for _, user := range users {
cached := CachedUser{
Name: user.Name,
DisplayName: user.DisplayName,
CachedAt: time.Now(),
}
if user.Name != "" {
result[user.ID] = user.Name
}
// Cache the result
cacheKey := fmt.Sprintf("user:name:%d", user.ID)
cachedJSON, _ := json.Marshal(cached)
// Cache usernames for a long time (7 days) since they rarely change
s.redisClient.Set(ctx, cacheKey, cachedJSON, 7*24*time.Hour)
}
}
return result, nil
}
// GetSingleUsername is a convenience method for fetching a single username
func (s *ThumbnailService) GetSingleUsername(ctx context.Context, userID uint64) (string, error) {
results, err := s.GetUsernames(ctx, []uint64{userID})
if err != nil {
return "", err
}
if name, ok := results[userID]; ok {
return name, nil
}
return "", fmt.Errorf("username not available for user %d", userID)
}

View File

@@ -26,8 +26,6 @@ func NewMapfixesController(
var(
// prevent two mapfixes with same asset id
ActiveMapfixStatuses = []model.MapfixStatus{
model.MapfixStatusReleasing,
model.MapfixStatusUploaded,
model.MapfixStatusUploading,
model.MapfixStatusValidated,
model.MapfixStatusValidating,
@@ -186,7 +184,7 @@ func (svc *Mapfixes) SetStatusValidated(ctx context.Context, params *validator.M
// (Internal endpoint) Role Validator changes status from Validating -> Accepted.
//
// POST /mapfixes/{MapfixID}/status/validator-failed
func (svc *Mapfixes) SetStatusNotValidated(ctx context.Context, params *validator.MapfixID) (*validator.NullResponse, error) {
func (svc *Mapfixes) SetStatusFailed(ctx context.Context, params *validator.MapfixID) (*validator.NullResponse, error) {
MapfixID := int64(params.ID)
// transaction
target_status := model.MapfixStatusAcceptedUnvalidated
@@ -255,117 +253,6 @@ func (svc *Mapfixes) SetStatusUploaded(ctx context.Context, params *validator.Ma
return &validator.NullResponse{}, nil
}
func (svc *Mapfixes) SetStatusNotUploaded(ctx context.Context, params *validator.MapfixID) (*validator.NullResponse, error) {
MapfixID := int64(params.ID)
// transaction
target_status := model.MapfixStatusValidated
update := service.NewMapfixUpdate()
update.SetStatusID(target_status)
allow_statuses := []model.MapfixStatus{model.MapfixStatusUploading}
err := svc.inner.UpdateMapfixIfStatus(ctx, MapfixID, allow_statuses, update)
if err != nil {
return nil, err
}
// push an action audit event
event_data := model.AuditEventDataAction{
TargetStatus: uint32(target_status),
}
err = svc.inner.CreateAuditEventAction(
ctx,
model.ValidatorUserID,
model.Resource{
ID: MapfixID,
Type: model.ResourceMapfix,
},
event_data,
)
if err != nil {
return nil, err
}
return &validator.NullResponse{}, nil
}
// ActionMapfixReleased implements actionMapfixReleased operation.
//
// (Internal endpoint) Role Validator changes status from Releasing -> Released.
//
// POST /mapfixes/{MapfixID}/status/validator-released
func (svc *Mapfixes) SetStatusReleased(ctx context.Context, params *validator.MapfixReleaseRequest) (*validator.NullResponse, error) {
MapfixID := int64(params.MapfixID)
// transaction
target_status := model.MapfixStatusReleased
update := service.NewMapfixUpdate()
update.SetStatusID(target_status)
allow_statuses := []model.MapfixStatus{model.MapfixStatusReleasing}
err := svc.inner.UpdateMapfixIfStatus(ctx, MapfixID, allow_statuses, update)
if err != nil {
return nil, err
}
event_data := model.AuditEventDataAction{
TargetStatus: uint32(target_status),
}
err = svc.inner.CreateAuditEventAction(
ctx,
model.ValidatorUserID,
model.Resource{
ID: MapfixID,
Type: model.ResourceMapfix,
},
event_data,
)
if err != nil {
return nil, err
}
// metadata maintenance
map_update := service.NewMapUpdate()
map_update.SetAssetVersion(params.AssetVersion)
map_update.SetModes(params.Modes)
err = svc.inner.UpdateMap(ctx, int64(params.TargetAssetID), map_update)
if err != nil {
return nil, err
}
return &validator.NullResponse{}, nil
}
func (svc *Mapfixes) SetStatusNotReleased(ctx context.Context, params *validator.MapfixID) (*validator.NullResponse, error) {
MapfixID := int64(params.ID)
// transaction
target_status := model.MapfixStatusUploaded
update := service.NewMapfixUpdate()
update.SetStatusID(target_status)
allow_statuses := []model.MapfixStatus{model.MapfixStatusReleasing}
err := svc.inner.UpdateMapfixIfStatus(ctx, MapfixID, allow_statuses, update)
if err != nil {
return nil, err
}
// push an action audit event
event_data := model.AuditEventDataAction{
TargetStatus: uint32(target_status),
}
err = svc.inner.CreateAuditEventAction(
ctx,
model.ValidatorUserID,
model.Resource{
ID: MapfixID,
Type: model.ResourceMapfix,
},
event_data,
)
if err != nil {
return nil, err
}
return &validator.NullResponse{}, nil
}
// CreateMapfixAuditError implements createMapfixAuditError operation.
//

View File

@@ -19,18 +19,6 @@ func NewOperationsController(
}
}
func (svc *Operations) Success(ctx context.Context, params *validator.OperationSuccessRequest) (*validator.NullResponse, error) {
success_params := service.NewOperationCompleteParams(
params.Path,
)
err := svc.inner.CompleteOperation(ctx, int32(params.OperationID), success_params)
if err != nil {
return nil, err
}
return &validator.NullResponse{}, nil
}
// ActionOperationFailed implements actionOperationFailed operation.
//
// Fail the specified OperationID with a StatusMessage.

View File

@@ -4,7 +4,6 @@ import (
"context"
"errors"
"fmt"
"time"
"git.itzana.me/strafesnet/go-grpc/validator"
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
@@ -25,7 +24,7 @@ func NewSubmissionsController(
}
var(
// prevent two submissions with same asset id
// prevent two mapfixes with same asset id
ActiveSubmissionStatuses = []model.SubmissionStatus{
model.SubmissionStatusUploading,
model.SubmissionStatusValidated,
@@ -203,7 +202,7 @@ func (svc *Submissions) SetStatusValidated(ctx context.Context, params *validato
// (Internal endpoint) Role Validator changes status from Validating -> Accepted.
//
// POST /submissions/{SubmissionID}/status/validator-failed
func (svc *Submissions) SetStatusNotValidated(ctx context.Context, params *validator.SubmissionID) (*validator.NullResponse, error) {
func (svc *Submissions) SetStatusFailed(ctx context.Context, params *validator.SubmissionID) (*validator.NullResponse, error) {
SubmissionID := int64(params.ID)
// transaction
target_status := model.SubmissionStatusAcceptedUnvalidated
@@ -274,68 +273,6 @@ func (svc *Submissions) SetStatusUploaded(ctx context.Context, params *validator
return &validator.NullResponse{}, nil
}
func (svc *Submissions) SetStatusNotUploaded(ctx context.Context, params *validator.SubmissionID) (*validator.NullResponse, error) {
SubmissionID := int64(params.ID)
// transaction
target_status := model.SubmissionStatusValidated
update := service.NewSubmissionUpdate()
update.SetStatusID(target_status)
allowed_statuses :=[]model.SubmissionStatus{model.SubmissionStatusUploading}
err := svc.inner.UpdateSubmissionIfStatus(ctx, SubmissionID, allowed_statuses, update)
if err != nil {
return nil, err
}
// push an action audit event
event_data := model.AuditEventDataAction{
TargetStatus: uint32(target_status),
}
err = svc.inner.CreateAuditEventAction(
ctx,
model.ValidatorUserID,
model.Resource{
ID: SubmissionID,
Type: model.ResourceSubmission,
},
event_data,
)
if err != nil {
return nil, err
}
return &validator.NullResponse{}, nil
}
func (svc *Submissions) SetStatusReleased(ctx context.Context, params *validator.SubmissionReleaseRequest) (*validator.NullResponse, error){
// create map with go-grpc
_, err := svc.inner.CreateMap(ctx, model.Map{
ID: params.MapCreate.ID,
DisplayName: params.MapCreate.DisplayName,
Creator: params.MapCreate.Creator,
GameID: params.MapCreate.GameID,
Date: time.Unix(params.MapCreate.Date, 0),
Submitter: params.MapCreate.Submitter,
Thumbnail: 0,
AssetVersion: params.MapCreate.AssetVersion,
LoadCount: 0,
Modes: params.MapCreate.Modes,
})
if err != nil {
return nil, err
}
// update status to Released
update := service.NewSubmissionUpdate()
update.SetStatusID(model.SubmissionStatusReleased)
err = svc.inner.UpdateSubmissionIfStatus(ctx, int64(params.SubmissionID), []model.SubmissionStatus{model.SubmissionStatusUploaded}, update)
if err != nil {
return nil, err
}
return &validator.NullResponse{}, nil
}
// CreateSubmissionAuditError implements createSubmissionAuditError operation.
//
// Post an error to the audit log

View File

@@ -1,121 +0,0 @@
package web_api
import (
"context"
"git.itzana.me/strafesnet/maps-service/pkg/api"
"git.itzana.me/strafesnet/maps-service/pkg/model"
)
// ListAOREvents implements listAOREvents operation.
//
// Get list of AOR events.
//
// GET /aor-events
func (svc *Service) ListAOREvents(ctx context.Context, params api.ListAOREventsParams) ([]api.AOREvent, error) {
page := model.Page{
Number: params.Page,
Size: params.Limit,
}
events, err := svc.inner.ListAOREvents(ctx, page)
if err != nil {
return nil, err
}
var resp []api.AOREvent
for _, event := range events {
resp = append(resp, api.AOREvent{
ID: event.ID,
StartDate: event.StartDate.Unix(),
FreezeDate: event.FreezeDate.Unix(),
SelectionDate: event.SelectionDate.Unix(),
DecisionDate: event.DecisionDate.Unix(),
Status: int32(event.Status),
CreatedAt: event.CreatedAt.Unix(),
UpdatedAt: event.UpdatedAt.Unix(),
})
}
return resp, nil
}
// GetActiveAOREvent implements getActiveAOREvent operation.
//
// Get the currently active AOR event.
//
// GET /aor-events/active
func (svc *Service) GetActiveAOREvent(ctx context.Context) (*api.AOREvent, error) {
event, err := svc.inner.GetActiveAOREvent(ctx)
if err != nil {
return nil, err
}
return &api.AOREvent{
ID: event.ID,
StartDate: event.StartDate.Unix(),
FreezeDate: event.FreezeDate.Unix(),
SelectionDate: event.SelectionDate.Unix(),
DecisionDate: event.DecisionDate.Unix(),
Status: int32(event.Status),
CreatedAt: event.CreatedAt.Unix(),
UpdatedAt: event.UpdatedAt.Unix(),
}, nil
}
// GetAOREvent implements getAOREvent operation.
//
// Get a specific AOR event.
//
// GET /aor-events/{AOREventID}
func (svc *Service) GetAOREvent(ctx context.Context, params api.GetAOREventParams) (*api.AOREvent, error) {
event, err := svc.inner.GetAOREvent(ctx, params.AOREventID)
if err != nil {
return nil, err
}
return &api.AOREvent{
ID: event.ID,
StartDate: event.StartDate.Unix(),
FreezeDate: event.FreezeDate.Unix(),
SelectionDate: event.SelectionDate.Unix(),
DecisionDate: event.DecisionDate.Unix(),
Status: int32(event.Status),
CreatedAt: event.CreatedAt.Unix(),
UpdatedAt: event.UpdatedAt.Unix(),
}, nil
}
// GetAOREventSubmissions implements getAOREventSubmissions operation.
//
// Get all submissions for a specific AOR event.
//
// GET /aor-events/{AOREventID}/submissions
func (svc *Service) GetAOREventSubmissions(ctx context.Context, params api.GetAOREventSubmissionsParams) ([]api.Submission, error) {
submissions, err := svc.inner.GetAORSubmissionsByEvent(ctx, params.AOREventID)
if err != nil {
return nil, err
}
var resp []api.Submission
for _, submission := range submissions {
resp = append(resp, api.Submission{
ID: submission.ID,
DisplayName: submission.DisplayName,
Creator: submission.Creator,
GameID: int32(submission.GameID),
CreatedAt: submission.CreatedAt.Unix(),
UpdatedAt: submission.UpdatedAt.Unix(),
Submitter: int64(submission.Submitter),
AssetID: int64(submission.AssetID),
AssetVersion: int64(submission.AssetVersion),
ValidatedAssetID: api.NewOptInt64(int64(submission.ValidatedAssetID)),
ValidatedAssetVersion: api.NewOptInt64(int64(submission.ValidatedAssetVersion)),
Completed: submission.Completed,
UploadedAssetID: api.NewOptInt64(int64(submission.UploadedAssetID)),
StatusID: int32(submission.StatusID),
})
}
return resp, nil
}

View File

@@ -22,8 +22,6 @@ var(
}
// limit mapfixes in the pipeline to one per target map
ActiveAcceptedMapfixStatuses = []model.MapfixStatus{
model.MapfixStatusReleasing,
model.MapfixStatusUploaded,
model.MapfixStatusUploading,
model.MapfixStatusValidated,
model.MapfixStatusValidating,
@@ -195,9 +193,6 @@ func (svc *Service) ListMapfixes(ctx context.Context, params api.ListMapfixesPar
if asset_id, asset_id_ok := params.AssetID.Get(); asset_id_ok{
filter.SetAssetID(uint64(asset_id))
}
if asset_version, asset_version_ok := params.AssetVersion.Get(); asset_version_ok{
filter.SetAssetVersion(uint64(asset_version))
}
if target_asset_id, target_asset_id_ok := params.TargetAssetID.Get(); target_asset_id_ok{
filter.SetTargetAssetID(uint64(target_asset_id))
}
@@ -327,48 +322,6 @@ func (svc *Service) UpdateMapfixModel(ctx context.Context, params api.UpdateMapf
)
}
// UpdateMapfixDescription implements updateMapfixDescription operation.
//
// Update description (submitter only, status ChangesRequested or UnderConstruction).
//
// PATCH /mapfixes/{MapfixID}/description
func (svc *Service) UpdateMapfixDescription(ctx context.Context, req api.UpdateMapfixDescriptionReq, params api.UpdateMapfixDescriptionParams) error {
userInfo, ok := ctx.Value("UserInfo").(UserInfoHandle)
if !ok {
return ErrUserInfo
}
// read mapfix
mapfix, err := svc.inner.GetMapfix(ctx, params.MapfixID)
if err != nil {
return err
}
userId, err := userInfo.GetUserID()
if err != nil {
return err
}
// check if caller is the submitter
if userId != mapfix.Submitter {
return ErrPermissionDeniedNotSubmitter
}
// read the new description from request body
data, err := io.ReadAll(req)
if err != nil {
return err
}
newDescription := string(data)
// check if Status is ChangesRequested or UnderConstruction
update := service.NewMapfixUpdate()
update.SetDescription(newDescription)
allow_statuses := []model.MapfixStatus{model.MapfixStatusChangesRequested, model.MapfixStatusUnderConstruction}
return svc.inner.UpdateMapfixIfStatus(ctx, params.MapfixID, allow_statuses, update)
}
// ActionMapfixReject invokes actionMapfixReject operation.
//
// Role Reviewer changes status from Submitted -> Rejected.
@@ -833,127 +786,6 @@ func (svc *Service) ActionMapfixValidated(ctx context.Context, params api.Action
)
}
// ActionMapfixTriggerRelease invokes actionMapfixTriggerRelease operation.
//
// Role MapfixUpload changes status from Uploaded -> Releasing.
//
// POST /mapfixes/{MapfixID}/status/trigger-release
func (svc *Service) ActionMapfixTriggerRelease(ctx context.Context, params api.ActionMapfixTriggerReleaseParams) error {
userInfo, ok := ctx.Value("UserInfo").(UserInfoHandle)
if !ok {
return ErrUserInfo
}
has_role, err := userInfo.HasRoleMapfixUpload()
if err != nil {
return err
}
// check if caller has required role
if !has_role {
return ErrPermissionDeniedNeedRoleMapfixUpload
}
userId, err := userInfo.GetUserID()
if err != nil {
return err
}
// transaction
target_status := model.MapfixStatusReleasing
update := service.NewMapfixUpdate()
update.SetStatusID(target_status)
allow_statuses := []model.MapfixStatus{model.MapfixStatusUploaded}
mapfix, err := svc.inner.UpdateAndGetMapfixIfStatus(ctx, params.MapfixID, allow_statuses, update)
if err != nil {
return err
}
// this is a map fix
err = svc.inner.NatsReleaseMapfix(
mapfix.ID,
mapfix.ValidatedAssetID,
mapfix.ValidatedAssetVersion,
mapfix.TargetAssetID,
)
if err != nil {
return err
}
event_data := model.AuditEventDataAction{
TargetStatus: uint32(target_status),
}
return svc.inner.CreateAuditEventAction(
ctx,
userId,
model.Resource{
ID: params.MapfixID,
Type: model.ResourceMapfix,
},
event_data,
)
}
// ActionMapfixUploaded invokes actionMapfixUploaded operation.
//
// Role MapfixUpload manually resets releasing softlock and changes status from Releasing -> Uploaded.
//
// POST /mapfixes/{MapfixID}/status/reset-releasing
func (svc *Service) ActionMapfixUploaded(ctx context.Context, params api.ActionMapfixUploadedParams) error {
userInfo, ok := ctx.Value("UserInfo").(UserInfoHandle)
if !ok {
return ErrUserInfo
}
has_role, err := userInfo.HasRoleMapfixUpload()
if err != nil {
return err
}
// check if caller has required role
if !has_role {
return ErrPermissionDeniedNeedRoleMapfixUpload
}
userId, err := userInfo.GetUserID()
if err != nil {
return err
}
// check when mapfix was updated
mapfix, err := svc.inner.GetMapfix(ctx, params.MapfixID)
if err != nil {
return err
}
if time.Now().Before(mapfix.UpdatedAt.Add(time.Second*10)) {
// the last time the mapfix was updated must be longer than 10 seconds ago
return ErrDelayReset
}
// transaction
target_status := model.MapfixStatusUploaded
update := service.NewMapfixUpdate()
update.SetStatusID(target_status)
allow_statuses := []model.MapfixStatus{model.MapfixStatusReleasing}
err = svc.inner.UpdateMapfixIfStatus(ctx, params.MapfixID, allow_statuses, update)
if err != nil {
return err
}
event_data := model.AuditEventDataAction{
TargetStatus: uint32(target_status),
}
return svc.inner.CreateAuditEventAction(
ctx,
userId,
model.Resource{
ID: params.MapfixID,
Type: model.ResourceMapfix,
},
event_data,
)
}
// ActionMapfixTriggerValidate invokes actionMapfixTriggerValidate operation.
//
// Role Reviewer triggers validation and changes status from Submitted -> Validating.

View File

@@ -9,6 +9,29 @@ import (
"git.itzana.me/strafesnet/maps-service/pkg/service"
)
// MigrateMaps implements migrateMaps operation.
//
// Perform maps migration.
//
// POST /migrate
func (svc *Service) MigrateMaps(ctx context.Context) (error) {
userInfo, ok := ctx.Value("UserInfo").(UserInfoHandle)
if !ok {
return ErrUserInfo
}
has_role, err := userInfo.HasRoleSubmissionRelease()
if err != nil {
return err
}
if !has_role {
return ErrPermissionDeniedNeedRoleSubmissionRelease
}
return svc.inner.TEMP_DoMapsMigration(ctx)
}
// ListMaps implements listMaps operation.
//
// Get list of maps.
@@ -46,13 +69,6 @@ func (svc *Service) ListMaps(ctx context.Context, params api.ListMapsParams) ([]
Creator: item.Creator,
GameID: int32(item.GameID),
Date: item.Date.Unix(),
CreatedAt: item.CreatedAt.Unix(),
UpdatedAt: item.UpdatedAt.Unix(),
Submitter: item.Submitter,
Thumbnail: item.Thumbnail,
AssetVersion: item.AssetVersion,
LoadCount: item.LoadCount,
Modes: item.Modes,
})
}
@@ -76,13 +92,6 @@ func (svc *Service) GetMap(ctx context.Context, params api.GetMapParams) (*api.M
Creator: mapResponse.Creator,
GameID: int32(mapResponse.GameID),
Date: mapResponse.Date.Unix(),
CreatedAt: mapResponse.CreatedAt.Unix(),
UpdatedAt: mapResponse.UpdatedAt.Unix(),
Submitter: mapResponse.Submitter,
Thumbnail: mapResponse.Thumbnail,
AssetVersion: mapResponse.AssetVersion,
LoadCount: mapResponse.LoadCount,
Modes: mapResponse.Modes,
}, nil
}

View File

@@ -36,28 +36,10 @@ func (svc *Service) CreateScript(ctx context.Context, req *api.ScriptCreate) (*a
return nil, err
}
hash := int64(model.HashSource(req.Source))
// Check if a script with this hash already exists
filter := service.NewScriptFilter()
filter.SetHash(hash)
existingScripts, err := svc.inner.ListScripts(ctx, filter, model.Page{Number: 1, Size: 1})
if err != nil {
return nil, err
}
// If script with this hash exists, return existing script ID
if len(existingScripts) > 0 {
return &api.ScriptID{
ScriptID: existingScripts[0].ID,
}, nil
}
// Create new script
script, err := svc.inner.CreateScript(ctx, model.Script{
ID: 0,
Name: req.Name,
Hash: hash,
Hash: int64(model.HashSource(req.Source)),
Source: req.Source,
ResourceType: model.ResourceType(req.ResourceType),
ResourceID: req.ResourceID.Or(0),

View File

@@ -58,7 +58,7 @@ func (usr UserInfoHandle) Validate() (bool, error) {
}
return validate.Valid, nil
}
func (usr UserInfoHandle) HasRoles(wantRoles model.Roles) (bool, error) {
func (usr UserInfoHandle) hasRoles(wantRoles model.Roles) (bool, error) {
haveroles, err := usr.GetRoles()
if err != nil {
return false, err
@@ -94,25 +94,25 @@ func (usr UserInfoHandle) GetRoles() (model.Roles, error) {
// RoleThumbnail
func (usr UserInfoHandle) HasRoleMapfixUpload() (bool, error) {
return usr.HasRoles(model.RolesMapfixUpload)
return usr.hasRoles(model.RolesMapfixUpload)
}
func (usr UserInfoHandle) HasRoleMapfixReview() (bool, error) {
return usr.HasRoles(model.RolesMapfixReview)
return usr.hasRoles(model.RolesMapfixReview)
}
func (usr UserInfoHandle) HasRoleMapDownload() (bool, error) {
return usr.HasRoles(model.RolesMapDownload)
return usr.hasRoles(model.RolesMapDownload)
}
func (usr UserInfoHandle) HasRoleSubmissionRelease() (bool, error) {
return usr.HasRoles(model.RolesSubmissionRelease)
return usr.hasRoles(model.RolesSubmissionRelease)
}
func (usr UserInfoHandle) HasRoleSubmissionUpload() (bool, error) {
return usr.HasRoles(model.RolesSubmissionUpload)
return usr.hasRoles(model.RolesSubmissionUpload)
}
func (usr UserInfoHandle) HasRoleSubmissionReview() (bool, error) {
return usr.HasRoles(model.RolesSubmissionReview)
return usr.hasRoles(model.RolesSubmissionReview)
}
func (usr UserInfoHandle) HasRoleScriptWrite() (bool, error) {
return usr.HasRoles(model.RolesScriptWrite)
return usr.hasRoles(model.RolesScriptWrite)
}
/// Not implemented
func (usr UserInfoHandle) HasRoleMaptest() (bool, error) {

View File

@@ -1,105 +0,0 @@
package web_api
import (
"context"
"git.itzana.me/strafesnet/maps-service/pkg/api"
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
"git.itzana.me/strafesnet/maps-service/pkg/model"
"git.itzana.me/strafesnet/maps-service/pkg/service"
)
// GET /stats
func (svc *Service) GetStats(ctx context.Context) (*api.Stats, error) {
// Get total submissions count
totalSubmissions, _, err := svc.inner.ListSubmissionsWithTotal(ctx, service.NewSubmissionFilter(), model.Page{
Number: 1,
Size: 0, // We only want the count, not the items
}, datastore.ListSortDisabled)
if err != nil {
return nil, err
}
// Get total mapfixes count
totalMapfixes, _, err := svc.inner.ListMapfixesWithTotal(ctx, service.NewMapfixFilter(), model.Page{
Number: 1,
Size: 0, // We only want the count, not the items
}, datastore.ListSortDisabled)
if err != nil {
return nil, err
}
// Get released submissions count
releasedSubmissionsFilter := service.NewSubmissionFilter()
releasedSubmissionsFilter.SetStatuses([]model.SubmissionStatus{model.SubmissionStatusReleased})
releasedSubmissions, _, err := svc.inner.ListSubmissionsWithTotal(ctx, releasedSubmissionsFilter, model.Page{
Number: 1,
Size: 0,
}, datastore.ListSortDisabled)
if err != nil {
return nil, err
}
// Get released mapfixes count
releasedMapfixesFilter := service.NewMapfixFilter()
releasedMapfixesFilter.SetStatuses([]model.MapfixStatus{model.MapfixStatusReleased})
releasedMapfixes, _, err := svc.inner.ListMapfixesWithTotal(ctx, releasedMapfixesFilter, model.Page{
Number: 1,
Size: 0,
}, datastore.ListSortDisabled)
if err != nil {
return nil, err
}
// Get submitted submissions count (under review)
submittedSubmissionsFilter := service.NewSubmissionFilter()
submittedSubmissionsFilter.SetStatuses([]model.SubmissionStatus{
model.SubmissionStatusUnderConstruction,
model.SubmissionStatusChangesRequested,
model.SubmissionStatusSubmitting,
model.SubmissionStatusSubmitted,
model.SubmissionStatusAcceptedUnvalidated,
model.SubmissionStatusValidating,
model.SubmissionStatusValidated,
model.SubmissionStatusUploading,
model.SubmissionStatusUploaded,
})
submittedSubmissions, _, err := svc.inner.ListSubmissionsWithTotal(ctx, submittedSubmissionsFilter, model.Page{
Number: 1,
Size: 0,
}, datastore.ListSortDisabled)
if err != nil {
return nil, err
}
// Get submitted mapfixes count (under review)
submittedMapfixesFilter := service.NewMapfixFilter()
submittedMapfixesFilter.SetStatuses([]model.MapfixStatus{
model.MapfixStatusUnderConstruction,
model.MapfixStatusChangesRequested,
model.MapfixStatusSubmitting,
model.MapfixStatusSubmitted,
model.MapfixStatusAcceptedUnvalidated,
model.MapfixStatusValidating,
model.MapfixStatusValidated,
model.MapfixStatusUploading,
model.MapfixStatusUploaded,
model.MapfixStatusReleasing,
})
submittedMapfixes, _, err := svc.inner.ListMapfixesWithTotal(ctx, submittedMapfixesFilter, model.Page{
Number: 1,
Size: 0,
}, datastore.ListSortDisabled)
if err != nil {
return nil, err
}
return &api.Stats{
TotalSubmissions: totalSubmissions,
TotalMapfixes: totalMapfixes,
ReleasedSubmissions: releasedSubmissions,
ReleasedMapfixes: releasedMapfixes,
SubmittedSubmissions: submittedSubmissions,
SubmittedMapfixes: submittedMapfixes,
}, nil
}

View File

@@ -1,207 +0,0 @@
package web_api
import (
"context"
"errors"
"git.itzana.me/strafesnet/maps-service/pkg/api"
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
"git.itzana.me/strafesnet/maps-service/pkg/model"
"git.itzana.me/strafesnet/maps-service/pkg/service"
)
var (
ErrReviewNotOwner = errors.New("You can only edit your own review")
ErrReviewNotSubmitted = errors.New("Reviews can only be created or edited when the submission is in Submitted status")
)
// ListSubmissionReviews implements listSubmissionReviews operation.
//
// Get all reviews for a submission.
//
// GET /submissions/{SubmissionID}/reviews
func (svc *Service) ListSubmissionReviews(ctx context.Context, params api.ListSubmissionReviewsParams) ([]api.SubmissionReview, error) {
reviews, err := svc.inner.ListSubmissionReviewsBySubmission(ctx, params.SubmissionID)
if err != nil {
return nil, err
}
var resp []api.SubmissionReview
for _, review := range reviews {
resp = append(resp, api.SubmissionReview{
ID: review.ID,
SubmissionID: review.SubmissionID,
ReviewerID: int64(review.ReviewerID),
Recommend: review.Recommend,
Description: review.Description,
Outdated: review.Outdated,
CreatedAt: review.CreatedAt.Unix(),
UpdatedAt: review.UpdatedAt.Unix(),
})
}
return resp, nil
}
// CreateSubmissionReview implements createSubmissionReview operation.
//
// Create a review for a submission.
//
// POST /submissions/{SubmissionID}/reviews
func (svc *Service) CreateSubmissionReview(ctx context.Context, req *api.SubmissionReviewCreate, params api.CreateSubmissionReviewParams) (*api.SubmissionReview, error) {
userInfo, ok := ctx.Value("UserInfo").(UserInfoHandle)
if !ok {
return nil, ErrUserInfo
}
// Check if caller has required role
has_role, err := userInfo.HasRoleSubmissionReview()
if err != nil {
return nil, err
}
if !has_role {
return nil, ErrPermissionDeniedNeedRoleSubmissionReview
}
userId, err := userInfo.GetUserID()
if err != nil {
return nil, err
}
// Check if submission exists and is in Submitted status
submission, err := svc.inner.GetSubmission(ctx, params.SubmissionID)
if err != nil {
return nil, err
}
if submission.StatusID != model.SubmissionStatusSubmitted {
return nil, ErrReviewNotSubmitted
}
// Check if user already has a review for this submission
existingReview, err := svc.inner.GetSubmissionReviewBySubmissionAndReviewer(ctx, params.SubmissionID, userId)
if err != nil && !errors.Is(err, datastore.ErrNotExist) {
return nil, err
}
// If review exists, update it instead
if err == nil {
update := service.NewSubmissionReviewUpdate()
update.SetRecommend(req.Recommend)
update.SetDescription(req.Description)
update.SetOutdated(false)
err = svc.inner.UpdateSubmissionReview(ctx, existingReview.ID, update)
if err != nil {
return nil, err
}
// Fetch updated review
updatedReview, err := svc.inner.GetSubmissionReview(ctx, existingReview.ID)
if err != nil {
return nil, err
}
return &api.SubmissionReview{
ID: updatedReview.ID,
SubmissionID: updatedReview.SubmissionID,
ReviewerID: int64(updatedReview.ReviewerID),
Recommend: updatedReview.Recommend,
Description: updatedReview.Description,
Outdated: updatedReview.Outdated,
CreatedAt: updatedReview.CreatedAt.Unix(),
UpdatedAt: updatedReview.UpdatedAt.Unix(),
}, nil
}
// Create new review
review := model.SubmissionReview{
SubmissionID: params.SubmissionID,
ReviewerID: userId,
Recommend: req.Recommend,
Description: req.Description,
Outdated: false,
}
createdReview, err := svc.inner.CreateSubmissionReview(ctx, review)
if err != nil {
return nil, err
}
return &api.SubmissionReview{
ID: createdReview.ID,
SubmissionID: createdReview.SubmissionID,
ReviewerID: int64(createdReview.ReviewerID),
Recommend: createdReview.Recommend,
Description: createdReview.Description,
Outdated: createdReview.Outdated,
CreatedAt: createdReview.CreatedAt.Unix(),
UpdatedAt: createdReview.UpdatedAt.Unix(),
}, nil
}
// UpdateSubmissionReview implements updateSubmissionReview operation.
//
// Update an existing review.
//
// PATCH /submissions/{SubmissionID}/reviews/{ReviewID}
func (svc *Service) UpdateSubmissionReview(ctx context.Context, req *api.SubmissionReviewCreate, params api.UpdateSubmissionReviewParams) (*api.SubmissionReview, error) {
userInfo, ok := ctx.Value("UserInfo").(UserInfoHandle)
if !ok {
return nil, ErrUserInfo
}
userId, err := userInfo.GetUserID()
if err != nil {
return nil, err
}
// Get the existing review
review, err := svc.inner.GetSubmissionReview(ctx, params.ReviewID)
if err != nil {
return nil, err
}
// Check if user is the owner of the review
if review.ReviewerID != userId {
return nil, ErrReviewNotOwner
}
// Check if submission is still in Submitted status
submission, err := svc.inner.GetSubmission(ctx, params.SubmissionID)
if err != nil {
return nil, err
}
if submission.StatusID != model.SubmissionStatusSubmitted {
return nil, ErrReviewNotSubmitted
}
// Update the review
update := service.NewSubmissionReviewUpdate()
update.SetRecommend(req.Recommend)
update.SetDescription(req.Description)
update.SetOutdated(false) // Clear outdated flag on edit
err = svc.inner.UpdateSubmissionReview(ctx, params.ReviewID, update)
if err != nil {
return nil, err
}
// Fetch updated review
updatedReview, err := svc.inner.GetSubmissionReview(ctx, params.ReviewID)
if err != nil {
return nil, err
}
return &api.SubmissionReview{
ID: updatedReview.ID,
SubmissionID: updatedReview.SubmissionID,
ReviewerID: int64(updatedReview.ReviewerID),
Recommend: updatedReview.Recommend,
Description: updatedReview.Description,
Outdated: updatedReview.Outdated,
CreatedAt: updatedReview.CreatedAt.Unix(),
UpdatedAt: updatedReview.UpdatedAt.Unix(),
}, nil
}

View File

@@ -20,6 +20,13 @@ var(
model.SubmissionStatusSubmitted,
model.SubmissionStatusUnderConstruction,
}
// limit submissions in the pipeline to one per target map
ActiveAcceptedSubmissionStatuses = []model.SubmissionStatus{
model.SubmissionStatusUploading,
model.SubmissionStatusValidated,
model.SubmissionStatusValidating,
model.SubmissionStatusAcceptedUnvalidated,
}
// Allow 5 submissions every 10 minutes
CreateSubmissionRateLimit int64 = 5
CreateSubmissionRecencyWindow = time.Second*600
@@ -229,9 +236,6 @@ func (svc *Service) ListSubmissions(ctx context.Context, params api.ListSubmissi
if asset_id, asset_id_ok := params.AssetID.Get(); asset_id_ok{
filter.SetAssetID(uint64(asset_id))
}
if asset_version, asset_version_ok := params.AssetVersion.Get(); asset_version_ok{
filter.SetAssetVersion(uint64(asset_version))
}
if uploaded_asset_id, uploaded_asset_id_ok := params.UploadedAssetID.Get(); uploaded_asset_id_ok{
filter.SetUploadedAssetID(uint64(uploaded_asset_id))
}
@@ -1034,24 +1038,19 @@ func (svc *Service) ActionSubmissionAccepted(ctx context.Context, params api.Act
// Release a set of uploaded maps.
//
// POST /release-submissions
func (svc *Service) ReleaseSubmissions(ctx context.Context, request []api.ReleaseInfo) (*api.OperationID, error) {
func (svc *Service) ReleaseSubmissions(ctx context.Context, request []api.ReleaseInfo) error {
userInfo, ok := ctx.Value("UserInfo").(UserInfoHandle)
if !ok {
return nil, ErrUserInfo
return ErrUserInfo
}
has_role, err := userInfo.HasRoleSubmissionRelease()
if err != nil {
return nil, err
return err
}
// check if caller has required role
if !has_role {
return nil, ErrPermissionDeniedNeedRoleSubmissionRelease
}
userId, err := userInfo.GetUserID()
if err != nil {
return nil, err
return ErrPermissionDeniedNeedRoleSubmissionRelease
}
idList := make([]int64, len(request))
@@ -1062,62 +1061,48 @@ func (svc *Service) ReleaseSubmissions(ctx context.Context, request []api.Releas
// fetch submissions
submissions, err := svc.inner.GetSubmissionList(ctx, idList)
if err != nil {
return nil, err
return err
}
// the submissions are not ordered the same as the idList!
id_to_submission := make(map[int64]*model.Submission, len(submissions))
// check each submission to make sure it is ready to release
for _,submission := range submissions{
if submission.StatusID != model.SubmissionStatusUploaded{
return nil, ErrReleaseInvalidStatus
return ErrReleaseInvalidStatus
}
if submission.UploadedAssetID == 0{
return nil, ErrReleaseNoUploadedAssetID
return ErrReleaseNoUploadedAssetID
}
id_to_submission[submission.ID] = &submission
}
// construct batch release nats message
release_submissions := make([]model.ReleaseSubmissionRequest, len(request))
for i, release_info := range request {
// from request
release_submissions[i].ReleaseDate = release_info.Date.Unix()
release_submissions[i].SubmissionID = release_info.SubmissionID
submission := id_to_submission[release_info.SubmissionID]
// from submission
release_submissions[i].ModelID = submission.ValidatedAssetID
release_submissions[i].ModelVersion = submission.ValidatedAssetVersion
// for map create
release_submissions[i].UploadedAssetID = submission.UploadedAssetID
release_submissions[i].DisplayName = submission.DisplayName
release_submissions[i].Creator = submission.Creator
release_submissions[i].GameID = submission.GameID
release_submissions[i].Submitter = submission.Submitter
for i,submission := range submissions{
date := request[i].Date.Unix()
// create each map with go-grpc
_, err := svc.inner.CreateMap(ctx, model.Map{
ID: int64(submission.UploadedAssetID),
DisplayName: submission.DisplayName,
Creator: submission.Creator,
GameID: submission.GameID,
Date: time.Unix(date, 0),
Submitter: submission.Submitter,
// Thumbnail: 0,
// AssetVersion: 0,
// LoadCount: 0,
// Modes: 0,
})
if err != nil {
return err
}
// update each status to Released
update := service.NewSubmissionUpdate()
update.SetStatusID(model.SubmissionStatusReleased)
err = svc.inner.UpdateSubmissionIfStatus(ctx, submission.ID, []model.SubmissionStatus{model.SubmissionStatusUploaded}, update)
if err != nil {
return err
}
}
// create a trackable long-running operation
operation, err := svc.inner.CreateOperation(ctx, model.Operation{
Owner: userId,
StatusID: model.OperationStatusCreated,
})
if err != nil {
return nil, err
}
// this is a map fix
err = svc.inner.NatsBatchReleaseSubmissions(
release_submissions,
operation.ID,
)
if err != nil {
return nil, err
}
return &api.OperationID{
OperationID: operation.ID,
}, nil
return nil
}
// CreateSubmissionAuditComment implements createSubmissionAuditComment operation.

View File

@@ -1,135 +0,0 @@
package web_api
import (
"context"
"strconv"
"git.itzana.me/strafesnet/maps-service/pkg/api"
"git.itzana.me/strafesnet/maps-service/pkg/roblox"
)
// BatchAssetThumbnails handles batch fetching of asset thumbnails
func (svc *Service) BatchAssetThumbnails(ctx context.Context, req *api.BatchAssetThumbnailsReq) (*api.BatchAssetThumbnailsOK, error) {
if len(req.AssetIds) == 0 {
return &api.BatchAssetThumbnailsOK{
Thumbnails: api.NewOptBatchAssetThumbnailsOKThumbnails(map[string]string{}),
}, nil
}
// Convert size string to enum
size := roblox.Size420x420
if req.Size.IsSet() {
sizeStr := req.Size.Value
switch api.BatchAssetThumbnailsReqSize(sizeStr) {
case api.BatchAssetThumbnailsReqSize150x150:
size = roblox.Size150x150
case api.BatchAssetThumbnailsReqSize768x432:
size = roblox.Size768x432
}
}
// Fetch thumbnails from service
thumbnails, err := svc.inner.GetAssetThumbnails(ctx, req.AssetIds, size)
if err != nil {
return nil, err
}
// Convert map[uint64]string to map[string]string for JSON
result := make(map[string]string, len(thumbnails))
for assetID, url := range thumbnails {
result[strconv.FormatUint(assetID, 10)] = url
}
return &api.BatchAssetThumbnailsOK{
Thumbnails: api.NewOptBatchAssetThumbnailsOKThumbnails(result),
}, nil
}
// GetAssetThumbnail handles single asset thumbnail fetch (with redirect)
func (svc *Service) GetAssetThumbnail(ctx context.Context, params api.GetAssetThumbnailParams) (*api.GetAssetThumbnailFound, error) {
// Convert size string to enum
size := roblox.Size420x420
if params.Size.IsSet() {
sizeStr := params.Size.Value
switch api.GetAssetThumbnailSize(sizeStr) {
case api.GetAssetThumbnailSize150x150:
size = roblox.Size150x150
case api.GetAssetThumbnailSize768x432:
size = roblox.Size768x432
}
}
// Fetch thumbnail
thumbnailURL, err := svc.inner.GetSingleAssetThumbnail(ctx, params.AssetID, size)
if err != nil {
return nil, err
}
// Return redirect response
return &api.GetAssetThumbnailFound{
Location: api.NewOptString(thumbnailURL),
}, nil
}
// BatchUserThumbnails handles batch fetching of user avatar thumbnails
func (svc *Service) BatchUserThumbnails(ctx context.Context, req *api.BatchUserThumbnailsReq) (*api.BatchUserThumbnailsOK, error) {
if len(req.UserIds) == 0 {
return &api.BatchUserThumbnailsOK{
Thumbnails: api.NewOptBatchUserThumbnailsOKThumbnails(map[string]string{}),
}, nil
}
// Convert size string to enum
size := roblox.Size150x150
if req.Size.IsSet() {
sizeStr := req.Size.Value
switch api.BatchUserThumbnailsReqSize(sizeStr) {
case api.BatchUserThumbnailsReqSize420x420:
size = roblox.Size420x420
case api.BatchUserThumbnailsReqSize768x432:
size = roblox.Size768x432
}
}
// Fetch thumbnails from service
thumbnails, err := svc.inner.GetUserAvatarThumbnails(ctx, req.UserIds, size)
if err != nil {
return nil, err
}
// Convert map[uint64]string to map[string]string for JSON
result := make(map[string]string, len(thumbnails))
for userID, url := range thumbnails {
result[strconv.FormatUint(userID, 10)] = url
}
return &api.BatchUserThumbnailsOK{
Thumbnails: api.NewOptBatchUserThumbnailsOKThumbnails(result),
}, nil
}
// GetUserThumbnail handles single user avatar thumbnail fetch (with redirect)
func (svc *Service) GetUserThumbnail(ctx context.Context, params api.GetUserThumbnailParams) (*api.GetUserThumbnailFound, error) {
// Convert size string to enum
size := roblox.Size150x150
if params.Size.IsSet() {
sizeStr := params.Size.Value
switch api.GetUserThumbnailSize(sizeStr) {
case api.GetUserThumbnailSize420x420:
size = roblox.Size420x420
case api.GetUserThumbnailSize768x432:
size = roblox.Size768x432
}
}
// Fetch thumbnail
thumbnailURL, err := svc.inner.GetSingleUserAvatarThumbnail(ctx, params.UserID, size)
if err != nil {
return nil, err
}
// Return redirect response
return &api.GetUserThumbnailFound{
Location: api.NewOptString(thumbnailURL),
}, nil
}

View File

@@ -1,33 +0,0 @@
package web_api
import (
"context"
"strconv"
"git.itzana.me/strafesnet/maps-service/pkg/api"
)
// BatchUsernames handles batch fetching of usernames
func (svc *Service) BatchUsernames(ctx context.Context, req *api.BatchUsernamesReq) (*api.BatchUsernamesOK, error) {
if len(req.UserIds) == 0 {
return &api.BatchUsernamesOK{
Usernames: api.NewOptBatchUsernamesOKUsernames(map[string]string{}),
}, nil
}
// Fetch usernames from service
usernames, err := svc.inner.GetUsernames(ctx, req.UserIds)
if err != nil {
return nil, err
}
// Convert map[uint64]string to map[string]string for JSON
result := make(map[string]string, len(usernames))
for userID, username := range usernames {
result[strconv.FormatUint(userID, 10)] = username
}
return &api.BatchUsernamesOK{
Usernames: api.NewOptBatchUsernamesOKUsernames(result),
}, nil
}

View File

@@ -4,18 +4,18 @@ version = "0.1.1"
edition = "2024"
[dependencies]
async-nats = "0.45.0"
async-nats = "0.42.0"
futures = "0.3.31"
rbx_asset = { version = "0.5.0", features = ["gzip", "rustls-tls"], default-features = false, registry = "strafesnet" }
rbx_binary = "2.0.0"
rbx_dom_weak = "4.0.0"
rbx_reflection_database = "2.0.1"
rbx_xml = "2.0.0"
regex = { version = "1.11.3", default-features = false }
rbx_asset = { version = "0.4.7", registry = "strafesnet" }
rbx_binary = "1.0.0"
rbx_dom_weak = "3.0.0"
rbx_reflection_database = "1.0.3"
rbx_xml = "1.0.0"
serde = { version = "1.0.215", features = ["derive"] }
serde_json = "1.0.133"
siphasher = "1.0.1"
tokio = { version = "1.41.1", features = ["macros", "rt-multi-thread", "signal"] }
heck = "0.5.0"
rust-grpc = { version = "1.6.1", registry = "strafesnet" }
tonic = "0.14.1"
lazy-regex = "3.4.1"
rust-grpc = { version = "1.2.1", registry = "strafesnet" }
tonic = "0.13.1"

View File

@@ -1,3 +1,24 @@
FROM alpine:3.21 AS runtime
COPY /target/x86_64-unknown-linux-musl/release/maps-validation /
ENTRYPOINT ["/maps-validation"]
# Using the `rust-musl-builder` as base image, instead of
# the official Rust toolchain
FROM registry.itzana.me/docker-proxy/clux/muslrust:1.86.0-stable AS chef
USER root
RUN cargo install cargo-chef
WORKDIR /app
FROM chef AS planner
COPY . .
RUN cargo chef prepare --recipe-path recipe.json
FROM chef AS builder
COPY --from=planner /app/recipe.json recipe.json
COPY api ./api
# Notice that we are specifying the --target flag!
RUN cargo chef cook --release --target x86_64-unknown-linux-musl --recipe-path recipe.json
COPY . .
RUN cargo build --release --target x86_64-unknown-linux-musl --bin maps-validation
FROM registry.itzana.me/docker-proxy/alpine:3.21 AS runtime
RUN addgroup -S myuser && adduser -S myuser -G myuser
COPY --from=builder /app/target/x86_64-unknown-linux-musl/release/maps-validation /usr/local/bin/
USER myuser
ENTRYPOINT ["/usr/local/bin/maps-validation"]

View File

@@ -1,6 +1,6 @@
[package]
name = "submissions-api"
version = "0.10.1"
version = "0.8.2"
edition = "2024"
publish = ["strafesnet"]
repository = "https://git.itzana.me/StrafesNET/maps-service"
@@ -12,11 +12,7 @@ authors = ["Rhys Lloyd <krakow20@gmail.com>"]
[dependencies]
chrono = { version = "0.4.41", features = ["serde"] }
reqwest = { version = "0", features = [
"json", "rustls-tls",
# default features
"charset", "http2", "system-proxy"
], default-features = false }
reqwest = { version = "0", features = ["json"] }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
serde_repr = "0.1.19"

View File

@@ -152,48 +152,6 @@ impl Context{
Ok(())
}
pub async fn get_mapfixes(&self,config:GetMapfixesRequest<'_>)->Result<MapfixesResponse,Error>{
let url_raw=format!("{}/mapfixes",self.0.base_url);
let mut url=reqwest::Url::parse(url_raw.as_str()).map_err(Error::Parse)?;
{
let mut query_pairs=url.query_pairs_mut();
query_pairs.append_pair("Page",config.Page.to_string().as_str());
query_pairs.append_pair("Limit",config.Limit.to_string().as_str());
if let Some(sort)=config.Sort{
query_pairs.append_pair("Sort",(sort as u8).to_string().as_str());
}
if let Some(display_name)=config.DisplayName{
query_pairs.append_pair("DisplayName",display_name);
}
if let Some(creator)=config.Creator{
query_pairs.append_pair("Creator",creator);
}
if let Some(game_id)=config.GameID{
query_pairs.append_pair("GameID",(game_id as u8).to_string().as_str());
}
if let Some(submitter)=config.Submitter{
query_pairs.append_pair("Submitter",submitter.to_string().as_str());
}
if let Some(asset_id)=config.AssetID{
query_pairs.append_pair("AssetID",asset_id.to_string().as_str());
}
if let Some(asset_version)=config.AssetVersion{
query_pairs.append_pair("AssetVersion",asset_version.to_string().as_str());
}
if let Some(uploaded_asset_id)=config.TargetAssetID{
query_pairs.append_pair("TargetAssetID",uploaded_asset_id.to_string().as_str());
}
if let Some(status_id)=config.StatusID{
query_pairs.append_pair("StatusID",(status_id as u8).to_string().as_str());
}
}
response_ok(
self.0.get(url).await.map_err(Error::Reqwest)?
).await.map_err(Error::Response)?
.json().await.map_err(Error::ReqwestJson)
}
pub async fn get_submissions(&self,config:GetSubmissionsRequest<'_>)->Result<SubmissionsResponse,Error>{
let url_raw=format!("{}/submissions",self.0.base_url);
let mut url=reqwest::Url::parse(url_raw.as_str()).map_err(Error::Parse)?;
@@ -220,9 +178,6 @@ impl Context{
if let Some(asset_id)=config.AssetID{
query_pairs.append_pair("AssetID",asset_id.to_string().as_str());
}
if let Some(asset_version)=config.AssetVersion{
query_pairs.append_pair("AssetVersion",asset_version.to_string().as_str());
}
if let Some(uploaded_asset_id)=config.UploadedAssetID{
query_pairs.append_pair("UploadedAssetID",uploaded_asset_id.to_string().as_str());
}
@@ -263,37 +218,7 @@ impl Context{
).await.map_err(Error::Response)?
.json().await.map_err(Error::ReqwestJson)
}
pub async fn get_mapfix_audit_events(&self,config:GetMapfixAuditEventsRequest)->Result<Vec<AuditEventReponse>,Error>{
let url_raw=format!("{}/mapfixes/{}/audit-events",self.0.base_url,config.MapfixID);
let mut url=reqwest::Url::parse(url_raw.as_str()).map_err(Error::Parse)?;
{
let mut query_pairs=url.query_pairs_mut();
query_pairs.append_pair("Page",config.Page.to_string().as_str());
query_pairs.append_pair("Limit",config.Limit.to_string().as_str());
}
response_ok(
self.0.get(url).await.map_err(Error::Reqwest)?
).await.map_err(Error::Response)?
.json().await.map_err(Error::ReqwestJson)
}
pub async fn get_submission_audit_events(&self,config:GetSubmissionAuditEventsRequest)->Result<Vec<AuditEventReponse>,Error>{
let url_raw=format!("{}/submissions/{}/audit-events",self.0.base_url,config.SubmissionID);
let mut url=reqwest::Url::parse(url_raw.as_str()).map_err(Error::Parse)?;
{
let mut query_pairs=url.query_pairs_mut();
query_pairs.append_pair("Page",config.Page.to_string().as_str());
query_pairs.append_pair("Limit",config.Limit.to_string().as_str());
}
response_ok(
self.0.get(url).await.map_err(Error::Reqwest)?
).await.map_err(Error::Response)?
.json().await.map_err(Error::ReqwestJson)
}
pub async fn release_submissions(&self,config:ReleaseRequest<'_>)->Result<OperationIDResponse,Error>{
pub async fn release_submissions(&self,config:ReleaseRequest<'_>)->Result<(),Error>{
let url_raw=format!("{}/release-submissions",self.0.base_url);
let url=reqwest::Url::parse(url_raw.as_str()).map_err(Error::Parse)?;
@@ -301,7 +226,8 @@ impl Context{
response_ok(
self.0.post(url,body).await.map_err(Error::Reqwest)?
).await.map_err(Error::Response)?
.json().await.map_err(Error::ReqwestJson)
).await.map_err(Error::Response)?;
Ok(())
}
}

View File

@@ -30,6 +30,7 @@ impl<Items> std::error::Error for SingleItemError<Items> where Items:std::fmt::D
pub type ScriptSingleItemError=SingleItemError<Vec<ScriptID>>;
pub type ScriptPolicySingleItemError=SingleItemError<Vec<ScriptPolicyID>>;
#[allow(dead_code)]
#[derive(Debug)]
pub struct UrlAndBody{
pub url:url::Url,
@@ -75,7 +76,7 @@ pub enum GameID{
FlyTrials=5,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug,serde::Serialize)]
pub struct CreateMapfixRequest<'a>{
pub OperationID:OperationID,
@@ -88,13 +89,13 @@ pub struct CreateMapfixRequest<'a>{
pub TargetAssetID:u64,
pub Description:&'a str,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug,serde::Deserialize)]
pub struct MapfixIDResponse{
pub MapfixID:MapfixID,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug,serde::Serialize)]
pub struct CreateSubmissionRequest<'a>{
pub OperationID:OperationID,
@@ -107,7 +108,7 @@ pub struct CreateSubmissionRequest<'a>{
pub Status:u32,
pub Roles:u32,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug,serde::Deserialize)]
pub struct SubmissionIDResponse{
pub SubmissionID:SubmissionID,
@@ -126,11 +127,11 @@ pub enum ResourceType{
Submission=2,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
pub struct GetScriptRequest{
pub ScriptID:ScriptID,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug,serde::Serialize)]
pub struct GetScriptsRequest<'a>{
pub Page:u32,
@@ -150,7 +151,7 @@ pub struct GetScriptsRequest<'a>{
pub struct HashRequest<'a>{
pub hash:&'a str,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug,serde::Deserialize)]
pub struct ScriptResponse{
pub ID:ScriptID,
@@ -160,7 +161,7 @@ pub struct ScriptResponse{
pub ResourceType:ResourceType,
pub ResourceID:ResourceID,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug,serde::Serialize)]
pub struct CreateScriptRequest<'a>{
pub Name:&'a str,
@@ -169,7 +170,7 @@ pub struct CreateScriptRequest<'a>{
#[serde(skip_serializing_if="Option::is_none")]
pub ResourceID:Option<ResourceID>,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug,serde::Deserialize)]
pub struct ScriptIDResponse{
pub ScriptID:ScriptID,
@@ -185,11 +186,11 @@ pub enum Policy{
Replace=4,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
pub struct GetScriptPolicyRequest{
pub ScriptPolicyID:ScriptPolicyID,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug,serde::Serialize)]
pub struct GetScriptPoliciesRequest<'a>{
pub Page:u32,
@@ -201,7 +202,7 @@ pub struct GetScriptPoliciesRequest<'a>{
#[serde(skip_serializing_if="Option::is_none")]
pub Policy:Option<Policy>,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug,serde::Deserialize)]
pub struct ScriptPolicyResponse{
pub ID:ScriptPolicyID,
@@ -209,20 +210,20 @@ pub struct ScriptPolicyResponse{
pub ToScriptID:ScriptID,
pub Policy:Policy
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug,serde::Serialize)]
pub struct CreateScriptPolicyRequest{
pub FromScriptID:ScriptID,
pub ToScriptID:ScriptID,
pub Policy:Policy,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug,serde::Deserialize)]
pub struct ScriptPolicyIDResponse{
pub ScriptPolicyID:ScriptPolicyID,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug,serde::Serialize)]
pub struct UpdateScriptPolicyRequest{
pub ID:ScriptPolicyID,
@@ -234,7 +235,7 @@ pub struct UpdateScriptPolicyRequest{
pub Policy:Option<Policy>,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct UpdateSubmissionModelRequest{
pub SubmissionID:SubmissionID,
@@ -251,73 +252,6 @@ pub enum Sort{
DateDescending=4,
}
#[derive(Clone,Debug,serde_repr::Serialize_repr,serde_repr::Deserialize_repr)]
#[repr(u8)]
pub enum MapfixStatus{
// Phase: Creation
UnderConstruction=0,
ChangesRequested=1,
// Phase: Review
Submitting=2,
Submitted=3,
// Phase: Testing
AcceptedUnvalidated=4, // pending script review, can re-trigger validation
Validating=5,
Validated=6,
Uploading=7,
Uploaded=8, // uploaded to the group, but pending release
Releasing=11,
// Phase: Final MapfixStatus
Rejected=9,
Released=10,
}
#[expect(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct GetMapfixesRequest<'a>{
pub Page:u32,
pub Limit:u32,
pub Sort:Option<Sort>,
pub DisplayName:Option<&'a str>,
pub Creator:Option<&'a str>,
pub GameID:Option<GameID>,
pub Submitter:Option<u64>,
pub AssetID:Option<u64>,
pub AssetVersion:Option<u64>,
pub TargetAssetID:Option<u64>,
pub StatusID:Option<MapfixStatus>,
}
#[expect(nonstandard_style)]
#[derive(Clone,Debug,serde::Serialize,serde::Deserialize)]
pub struct MapfixResponse{
pub ID:MapfixID,
pub DisplayName:String,
pub Creator:String,
pub GameID:u32,
pub CreatedAt:i64,
pub UpdatedAt:i64,
pub Submitter:u64,
pub AssetID:u64,
pub AssetVersion:u64,
pub ValidatedAssetID:Option<u64>,
pub ValidatedAssetVersion:Option<u64>,
pub Completed:bool,
pub TargetAssetID:u64,
pub StatusID:MapfixStatus,
pub Description:String,
}
#[expect(nonstandard_style)]
#[derive(Clone,Debug,serde::Deserialize)]
pub struct MapfixesResponse{
pub Total:u64,
pub Mapfixes:Vec<MapfixResponse>,
}
#[derive(Clone,Debug,serde_repr::Deserialize_repr)]
#[repr(u8)]
pub enum SubmissionStatus{
@@ -341,7 +275,7 @@ pub enum SubmissionStatus{
Released=10,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct GetSubmissionsRequest<'a>{
pub Page:u32,
@@ -352,12 +286,11 @@ pub struct GetSubmissionsRequest<'a>{
pub GameID:Option<GameID>,
pub Submitter:Option<u64>,
pub AssetID:Option<u64>,
pub AssetVersion:Option<u64>,
pub UploadedAssetID:Option<u64>,
pub StatusID:Option<SubmissionStatus>,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug,serde::Deserialize)]
pub struct SubmissionResponse{
pub ID:SubmissionID,
@@ -369,20 +302,18 @@ pub struct SubmissionResponse{
pub Submitter:u64,
pub AssetID:u64,
pub AssetVersion:u64,
pub ValidatedAssetID:Option<u64>,
pub ValidatedAssetVersion:Option<u64>,
pub UploadedAssetID:u64,
pub StatusID:SubmissionStatus,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug,serde::Deserialize)]
pub struct SubmissionsResponse{
pub Total:u64,
pub Submissions:Vec<SubmissionResponse>,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct GetMapsRequest<'a>{
pub Page:u32,
@@ -393,7 +324,7 @@ pub struct GetMapsRequest<'a>{
pub GameID:Option<GameID>,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug,serde::Deserialize)]
pub struct MapResponse{
pub ID:i64,
@@ -403,119 +334,7 @@ pub struct MapResponse{
pub Date:i64,
}
#[expect(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct GetMapfixAuditEventsRequest{
pub Page:u32,
pub Limit:u32,
pub MapfixID:i64,
}
#[expect(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct GetSubmissionAuditEventsRequest{
pub Page:u32,
pub Limit:u32,
pub SubmissionID:i64,
}
#[derive(Clone,Debug,serde_repr::Deserialize_repr)]
#[repr(u32)]
pub enum AuditEventType{
Action=0,
Comment=1,
ChangeModel=2,
ChangeValidatedModel=3,
ChangeDisplayName=4,
ChangeCreator=5,
Error=6,
CheckList=7,
}
#[derive(Clone,Debug,serde::Deserialize)]
pub struct AuditEventAction{
pub target_status:MapfixStatus,
}
#[derive(Clone,Debug,serde::Deserialize)]
pub struct AuditEventComment{
pub comment:String,
}
#[derive(Clone,Debug,serde::Deserialize)]
pub struct AuditEventChangeModel{
pub old_model_id:u64,
pub old_model_version:u64,
pub new_model_id:u64,
pub new_model_version:u64,
}
#[derive(Clone,Debug,serde::Deserialize)]
pub struct AuditEventChangeValidatedModel{
pub validated_model_id:u64,
pub validated_model_version:u64,
}
#[derive(Clone,Debug,serde::Deserialize)]
pub struct AuditEventChangeName{
pub old_name:String,
pub new_name:String,
}
#[derive(Clone,Debug,serde::Deserialize)]
pub struct AuditEventError{
pub error:String,
}
#[derive(Clone,Debug,serde::Deserialize)]
pub struct AuditEventCheck{
pub name:String,
pub summary:String,
pub passed:bool,
}
#[derive(Clone,Debug,serde::Deserialize)]
pub struct AuditEventCheckList{
pub check_list:Vec<AuditEventCheck>,
}
#[derive(Clone,Debug)]
pub enum AuditEventData{
Action(AuditEventAction),
Comment(AuditEventComment),
ChangeModel(AuditEventChangeModel),
ChangeValidatedModel(AuditEventChangeValidatedModel),
ChangeDisplayName(AuditEventChangeName),
ChangeCreator(AuditEventChangeName),
Error(AuditEventError),
CheckList(AuditEventCheckList),
}
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq,serde::Serialize,serde::Deserialize)]
pub struct AuditEventID(pub(crate)i64);
#[expect(nonstandard_style)]
#[derive(Clone,Debug,serde::Deserialize)]
pub struct AuditEventReponse{
pub ID:AuditEventID,
pub Date:i64,
pub User:u64,
pub Username:String,
pub ResourceType:ResourceType,
pub ResourceID:ResourceID,
pub EventType:AuditEventType,
EventData:serde_json::Value,
}
impl AuditEventReponse{
pub fn data(self)->serde_json::Result<AuditEventData>{
Ok(match self.EventType{
AuditEventType::Action=>AuditEventData::Action(serde_json::from_value(self.EventData)?),
AuditEventType::Comment=>AuditEventData::Comment(serde_json::from_value(self.EventData)?),
AuditEventType::ChangeModel=>AuditEventData::ChangeModel(serde_json::from_value(self.EventData)?),
AuditEventType::ChangeValidatedModel=>AuditEventData::ChangeValidatedModel(serde_json::from_value(self.EventData)?),
AuditEventType::ChangeDisplayName=>AuditEventData::ChangeDisplayName(serde_json::from_value(self.EventData)?),
AuditEventType::ChangeCreator=>AuditEventData::ChangeCreator(serde_json::from_value(self.EventData)?),
AuditEventType::Error=>AuditEventData::Error(serde_json::from_value(self.EventData)?),
AuditEventType::CheckList=>AuditEventData::CheckList(serde_json::from_value(self.EventData)?),
})
}
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug,serde::Serialize)]
pub struct Check{
pub Name:&'static str,
@@ -523,7 +342,7 @@ pub struct Check{
pub Passed:bool,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct ActionSubmissionSubmittedRequest{
pub SubmissionID:SubmissionID,
@@ -533,33 +352,33 @@ pub struct ActionSubmissionSubmittedRequest{
pub GameID:GameID,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct ActionSubmissionRequestChangesRequest{
pub SubmissionID:SubmissionID,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct ActionSubmissionUploadedRequest{
pub SubmissionID:SubmissionID,
pub UploadedAssetID:u64,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct ActionSubmissionAcceptedRequest{
pub SubmissionID:SubmissionID,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct CreateSubmissionAuditErrorRequest{
pub SubmissionID:SubmissionID,
pub ErrorMessage:String,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct CreateSubmissionAuditCheckListRequest<'a>{
pub SubmissionID:SubmissionID,
@@ -568,16 +387,8 @@ pub struct CreateSubmissionAuditCheckListRequest<'a>{
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq,serde::Serialize,serde::Deserialize)]
pub struct SubmissionID(pub(crate)i64);
impl SubmissionID{
pub const fn new(value:i64)->Self{
Self(value)
}
pub const fn value(&self)->i64{
self.0
}
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct UpdateMapfixModelRequest{
pub MapfixID:MapfixID,
@@ -585,7 +396,7 @@ pub struct UpdateMapfixModelRequest{
pub ModelVersion:u64,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct ActionMapfixSubmittedRequest{
pub MapfixID:MapfixID,
@@ -595,32 +406,32 @@ pub struct ActionMapfixSubmittedRequest{
pub GameID:GameID,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct ActionMapfixRequestChangesRequest{
pub MapfixID:MapfixID,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct ActionMapfixUploadedRequest{
pub MapfixID:MapfixID,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct ActionMapfixAcceptedRequest{
pub MapfixID:MapfixID,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct CreateMapfixAuditErrorRequest{
pub MapfixID:MapfixID,
pub ErrorMessage:String,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct CreateMapfixAuditCheckListRequest<'a>{
pub MapfixID:MapfixID,
@@ -629,16 +440,8 @@ pub struct CreateMapfixAuditCheckListRequest<'a>{
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq,serde::Serialize,serde::Deserialize)]
pub struct MapfixID(pub(crate)i64);
impl MapfixID{
pub const fn new(value:i64)->Self{
Self(value)
}
pub const fn value(&self)->i64{
self.0
}
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug)]
pub struct ActionOperationFailedRequest{
pub OperationID:OperationID,
@@ -665,7 +468,7 @@ impl Resource{
}
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
#[derive(Clone,Debug,serde::Serialize)]
pub struct ReleaseInfo{
pub SubmissionID:SubmissionID,
@@ -675,8 +478,3 @@ pub struct ReleaseInfo{
pub struct ReleaseRequest<'a>{
pub schedule:&'a [ReleaseInfo],
}
#[expect(nonstandard_style)]
#[derive(Clone,Debug,serde::Deserialize)]
pub struct OperationIDResponse{
pub OperationID:OperationID,
}

View File

@@ -6,7 +6,7 @@ use heck::{ToSnakeCase,ToTitleCase};
use rbx_dom_weak::Instance;
use rust_grpc::validator::Check;
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Debug)]
pub enum Error{
ModelInfoDownload(rbx_asset::cloud::GetError),
@@ -24,16 +24,7 @@ impl std::fmt::Display for Error{
}
impl std::error::Error for Error{}
macro_rules! lazy_regex{
($r:literal)=>{{
use regex::Regex;
use std::sync::LazyLock;
static RE:LazyLock<Regex>=LazyLock::new(||Regex::new($r).unwrap());
&RE
}};
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
pub struct CheckRequest{
ModelID:u64,
SkipChecks:bool,
@@ -56,20 +47,12 @@ impl From<crate::nats_types::CheckSubmissionRequest> for CheckRequest{
}
}
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq,Ord,PartialOrd)]
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq)]
struct ModeID(u64);
impl ModeID{
const MAIN:Self=Self(0);
const BONUS:Self=Self(1);
}
impl std::fmt::Display for ModeID{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
match self{
&ModeID::MAIN=>write!(f,"Main"),
&ModeID(mode_id)=>write!(f,"Bonus{mode_id}"),
}
}
}
enum Zone{
Start,
Finish,
@@ -79,7 +62,7 @@ struct ModeElement{
zone:Zone,
mode_id:ModeID,
}
#[expect(dead_code)]
#[allow(dead_code)]
pub enum IDParseError{
NoCaptures,
ParseInt(core::num::ParseIntError),
@@ -96,7 +79,7 @@ impl std::str::FromStr for ModeElement{
"BonusFinish"=>Ok(Self{zone:Zone::Finish,mode_id:ModeID::BONUS}),
"BonusAnticheat"=>Ok(Self{zone:Zone::Anticheat,mode_id:ModeID::BONUS}),
other=>{
let everything_pattern=lazy_regex!(r"^Bonus(\d+)Start$|^BonusStart(\d+)$|^Bonus(\d+)Finish$|^BonusFinish(\d+)$|^Bonus(\d+)Anticheat$|^BonusAnticheat(\d+)$");
let everything_pattern=lazy_regex::lazy_regex!(r"^Bonus(\d+)Start$|^BonusStart(\d+)$|^Bonus(\d+)Finish$|^BonusFinish(\d+)$|^Bonus(\d+)Anticheat$|^BonusAnticheat(\d+)$");
if let Some(captures)=everything_pattern.captures(other){
if let Some(mode_id)=captures.get(1).or(captures.get(2)){
return Ok(Self{
@@ -156,16 +139,16 @@ impl std::str::FromStr for StageElement{
type Err=IDParseError;
fn from_str(s:&str)->Result<Self,Self::Err>{
// Trigger ForceTrigger Teleport ForceTeleport SpawnAt ForceSpawnAt
let teleport_pattern=lazy_regex!(r"^(?:Force)?(Teleport|SpawnAt|Trigger)(\d+)$");
if let Some(captures)=teleport_pattern.captures(s){
let bonus_start_pattern=lazy_regex::lazy_regex!(r"^(?:Force)?(Teleport|SpawnAt|Trigger)(\d+)$");
if let Some(captures)=bonus_start_pattern.captures(s){
return Ok(StageElement{
behaviour:StageElementBehaviour::Teleport,
stage_id:StageID(captures[1].parse().map_err(IDParseError::ParseInt)?),
});
}
// Spawn
let spawn_pattern=lazy_regex!(r"^Spawn(\d+)$");
if let Some(captures)=spawn_pattern.captures(s){
let bonus_finish_pattern=lazy_regex::lazy_regex!(r"^Spawn(\d+)$");
if let Some(captures)=bonus_finish_pattern.captures(s){
return Ok(StageElement{
behaviour:StageElementBehaviour::Spawn,
stage_id:StageID(captures[1].parse().map_err(IDParseError::ParseInt)?),
@@ -197,15 +180,15 @@ struct WormholeElement{
impl std::str::FromStr for WormholeElement{
type Err=IDParseError;
fn from_str(s:&str)->Result<Self,Self::Err>{
let wormhole_in_pattern=lazy_regex!(r"^WormholeIn(\d+)$");
if let Some(captures)=wormhole_in_pattern.captures(s){
let bonus_start_pattern=lazy_regex::lazy_regex!(r"^WormholeIn(\d+)$");
if let Some(captures)=bonus_start_pattern.captures(s){
return Ok(Self{
behaviour:WormholeBehaviour::In,
wormhole_id:WormholeID(captures[1].parse().map_err(IDParseError::ParseInt)?),
});
}
let wormhole_out_pattern=lazy_regex!(r"^WormholeOut(\d+)$");
if let Some(captures)=wormhole_out_pattern.captures(s){
let bonus_finish_pattern=lazy_regex::lazy_regex!(r"^WormholeOut(\d+)$");
if let Some(captures)=bonus_finish_pattern.captures(s){
return Ok(Self{
behaviour:WormholeBehaviour::Out,
wormhole_id:WormholeID(captures[1].parse().map_err(IDParseError::ParseInt)?),
@@ -223,15 +206,6 @@ impl std::fmt::Display for WormholeElement{
}
}
fn count_sequential(modes:&HashMap<ModeID,Vec<&Instance>>)->usize{
for mode_id in 0..modes.len(){
if !modes.contains_key(&ModeID(mode_id as u64)){
return mode_id;
}
}
return modes.len();
}
/// Count various map elements
#[derive(Default)]
struct Counts<'a>{
@@ -251,24 +225,6 @@ pub struct ModelInfo<'a>{
counts:Counts<'a>,
unanchored_parts:Vec<&'a Instance>,
}
impl ModelInfo<'_>{
pub fn count_modes(&self)->Option<usize>{
let start_zones_count=self.counts.mode_start_counts.len();
let finish_zones_count=self.counts.mode_finish_counts.len();
let sequential_start_zones=count_sequential(&self.counts.mode_start_counts);
let sequential_finish_zones=count_sequential(&self.counts.mode_finish_counts);
// all counts must match
if start_zones_count==finish_zones_count
&& sequential_start_zones==sequential_finish_zones
&& start_zones_count==sequential_start_zones
&& finish_zones_count==sequential_finish_zones
{
Some(start_zones_count)
}else{
None
}
}
}
pub fn get_model_info<'a>(dom:&'a rbx_dom_weak::WeakDom,model_instance:&'a rbx_dom_weak::Instance)->ModelInfo<'a>{
// extract model info
@@ -281,7 +237,7 @@ pub fn get_model_info<'a>(dom:&'a rbx_dom_weak::WeakDom,model_instance:&'a rbx_d
let mut unanchored_parts=Vec::new();
let anchored_ustr=rbx_dom_weak::ustr("Anchored");
let db=rbx_reflection_database::get().unwrap();
let db=rbx_reflection_database::get();
let base_part=&db.classes["BasePart"];
let base_parts=dom.descendants_of(model_instance.referent()).filter(|&instance|
db.classes.get(instance.class.as_str()).is_some_and(|class|
@@ -442,7 +398,7 @@ pub struct MapInfoOwned{
pub creator:String,
pub game_id:GameID,
}
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Debug)]
pub enum IntoMapInfoOwnedError{
DisplayName(StringValueError),
@@ -490,8 +446,6 @@ struct MapCheck<'a>{
mode_finish_counts:SetDifferenceCheck<SetDifferenceCheckContextAtLeastOne<ModeID,Vec<&'a Instance>>>,
// Check for dangling MapAnticheat zones (no associated MapStart)
mode_anticheat_counts:SetDifferenceCheck<SetDifferenceCheckContextAllowNone<ModeID,Vec<&'a Instance>>>,
// Check that modes are sequential
modes_sequential:Result<(),Vec<ModeID>>,
// Spawn1 must exist
spawn1:Result<Exists,Absent>,
// Check for dangling Teleport# (no associated Spawn#)
@@ -560,25 +514,6 @@ impl<'a> ModelInfo<'a>{
let mode_anticheat_counts=SetDifferenceCheckContextAllowNone::new(self.counts.mode_anticheat_counts)
.check(&self.counts.mode_start_counts);
// There must not be non-sequential modes. If Bonus100 exists, Bonuses 1-99 had better also exist.
let modes_sequential={
let sequential=count_sequential(&self.counts.mode_start_counts);
if sequential==self.counts.mode_start_counts.len(){
Ok(())
}else{
let mut non_sequential=Vec::with_capacity(self.counts.mode_start_counts.len()-sequential);
for (&mode_id,_) in &self.counts.mode_start_counts{
let ModeID(mode_id_u64)=mode_id;
if !(mode_id_u64<sequential as u64){
non_sequential.push(mode_id);
}
}
// sort so it's prettier when it prints out
non_sequential.sort();
Err(non_sequential)
}
};
// There must be exactly one start zone for every mode in the map.
let mode_start_counts=DuplicateCheckContext(self.counts.mode_start_counts).check(|c|1<c.len());
@@ -615,7 +550,6 @@ impl<'a> ModelInfo<'a>{
mode_start_counts,
mode_finish_counts,
mode_anticheat_counts,
modes_sequential,
spawn1,
teleport_counts,
spawn_counts,
@@ -639,7 +573,6 @@ impl MapCheck<'_>{
mode_start_counts:DuplicateCheck(Ok(())),
mode_finish_counts:SetDifferenceCheck(Ok(())),
mode_anticheat_counts:SetDifferenceCheck(Ok(())),
modes_sequential:Ok(()),
spawn1:Ok(Exists),
teleport_counts:SetDifferenceCheck(Ok(())),
spawn_counts:DuplicateCheck(Ok(())),
@@ -813,15 +746,6 @@ impl MapCheck<'_>{
}
}
};
let sequential_modes=match &self.modes_sequential{
Ok(())=>passed!("SequentialModes"),
Err(context)=>{
let non_sequential=context.len();
let plural_non_sequential=if non_sequential==1{"mode"}else{"modes"};
let comma_separated=Separated::new(", ",||context);
summary_format!("SequentialModes","{non_sequential} {plural_non_sequential} should use a lower ModeID (no gaps): {comma_separated}")
}
};
let spawn1=match &self.spawn1{
Ok(Exists)=>passed!("Spawn1"),
Err(Absent)=>summary_format!("Spawn1","Model has no Spawn1"),
@@ -900,7 +824,6 @@ impl MapCheck<'_>{
extra_finish,
missing_finish,
dangling_anticheat,
sequential_modes,
spawn1,
dangling_teleport,
duplicate_spawns,

View File

@@ -1,7 +1,7 @@
use crate::check::CheckListAndVersion;
use crate::nats_types::CheckMapfixRequest;
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Debug)]
pub enum Error{
Check(crate::check::Error),

View File

@@ -1,7 +1,7 @@
use crate::check::CheckListAndVersion;
use crate::nats_types::CheckSubmissionRequest;
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Debug)]
pub enum Error{
Check(crate::check::Error),

View File

@@ -1,7 +1,7 @@
use crate::download::download_asset_version;
use crate::rbx_util::{get_root_instance,get_mapinfo,read_dom,MapInfo,ReadDomError,GetRootInstanceError,GameID};
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Debug)]
pub enum Error{
CreatorTypeMustBeUser,
@@ -17,11 +17,11 @@ impl std::fmt::Display for Error{
}
impl std::error::Error for Error{}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
pub struct CreateRequest{
pub ModelID:u64,
}
#[expect(nonstandard_style)]
#[allow(nonstandard_style)]
pub struct CreateResult{
pub AssetOwner:u64,
pub DisplayName:Option<String>,

View File

@@ -1,7 +1,7 @@
use crate::nats_types::CreateMapfixRequest;
use crate::create::CreateRequest;
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Debug)]
pub enum Error{
Create(crate::create::Error),

View File

@@ -2,7 +2,7 @@ use crate::nats_types::CreateSubmissionRequest;
use crate::create::CreateRequest;
use crate::rbx_util::GameID;
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Debug)]
pub enum Error{
Create(crate::create::Error),

View File

@@ -1,4 +1,4 @@
#[expect(dead_code)]
#[allow(dead_code)]
#[derive(Debug)]
pub enum Error{
ModelLocationDownload(rbx_asset::cloud::GetError),

View File

@@ -18,9 +18,6 @@ impl Service{
endpoint!(set_status_submitted,SubmittedRequest,NullResponse);
endpoint!(set_status_request_changes,MapfixId,NullResponse);
endpoint!(set_status_validated,MapfixId,NullResponse);
endpoint!(set_status_not_validated,MapfixId,NullResponse);
endpoint!(set_status_failed,MapfixId,NullResponse);
endpoint!(set_status_uploaded,MapfixId,NullResponse);
endpoint!(set_status_not_uploaded,MapfixId,NullResponse);
endpoint!(set_status_released,MapfixReleaseRequest,NullResponse);
endpoint!(set_status_not_released,MapfixId,NullResponse);
}

View File

@@ -11,6 +11,5 @@ impl Service{
)->Self{
Self{client}
}
endpoint!(success,OperationSuccessRequest,NullResponse);
endpoint!(fail,OperationFailRequest,NullResponse);
}

View File

@@ -18,8 +18,6 @@ impl Service{
endpoint!(set_status_submitted,SubmittedRequest,NullResponse);
endpoint!(set_status_request_changes,SubmissionId,NullResponse);
endpoint!(set_status_validated,SubmissionId,NullResponse);
endpoint!(set_status_not_validated,SubmissionId,NullResponse);
endpoint!(set_status_failed,SubmissionId,NullResponse);
endpoint!(set_status_uploaded,StatusUploadedRequest,NullResponse);
endpoint!(set_status_not_uploaded,SubmissionId,NullResponse);
endpoint!(set_status_released,SubmissionReleaseRequest,NullResponse);
}

View File

@@ -13,15 +13,13 @@ mod check_submission;
mod create;
mod create_mapfix;
mod create_submission;
mod release;
mod release_mapfix;
mod release_submissions_batch;
mod upload_mapfix;
mod upload_submission;
mod validator;
mod validate_mapfix;
mod validate_submission;
#[allow(dead_code)]
#[derive(Debug)]
pub enum StartupError{
API(tonic::transport::Error),
@@ -49,44 +47,24 @@ async fn main()->Result<(),StartupError>{
},
Err(e)=>panic!("{e}: ROBLOX_GROUP_ID env required"),
};
let load_asset_version_place_id=std::env::var("LOAD_ASSET_VERSION_PLACE_ID").expect("LOAD_ASSET_VERSION_PLACE_ID env required").parse().expect("LOAD_ASSET_VERSION_PLACE_ID int parse failed");
let load_asset_version_universe_id=std::env::var("LOAD_ASSET_VERSION_UNIVERSE_ID").expect("LOAD_ASSET_VERSION_UNIVERSE_ID env required").parse().expect("LOAD_ASSET_VERSION_UNIVERSE_ID int parse failed");
// create / upload models through STRAFESNET_CI2 account
let cookie=std::env::var("RBXCOOKIE").expect("RBXCOOKIE env required");
let cookie_context=rbx_asset::cookie::Context::new(rbx_asset::cookie::Cookie::new(cookie));
// download models through cloud api (STRAFESNET_CI2 account)
// download models through cloud api
let api_key=std::env::var("RBX_API_KEY").expect("RBX_API_KEY env required");
let cloud_context=rbx_asset::cloud::Context::new(rbx_asset::cloud::ApiKey::new(api_key));
// luau execution cloud api (StrafesNET group)
let api_key=std::env::var("RBX_API_KEY_LUAU_EXECUTION").expect("RBX_API_KEY_LUAU_EXECUTION env required");
let cloud_context_luau_execution=rbx_asset::cloud::Context::new(rbx_asset::cloud::ApiKey::new(api_key));
// maps-service api
let api_host_internal=std::env::var("API_HOST_INTERNAL").expect("API_HOST_INTERNAL env required");
let endpoint=tonic::transport::Endpoint::new(api_host_internal).map_err(StartupError::API)?;
let channel=endpoint.connect_lazy();
let mapfixes=crate::grpc::mapfixes::Service::new(crate::grpc::mapfixes::ValidatorMapfixesServiceClient::new(channel.clone()));
let operations=crate::grpc::operations::Service::new(crate::grpc::operations::ValidatorOperationsServiceClient::new(channel.clone()));
let scripts=crate::grpc::scripts::Service::new(crate::grpc::scripts::ValidatorScriptsServiceClient::new(channel.clone()));
let script_policy=crate::grpc::script_policy::Service::new(crate::grpc::script_policy::ValidatorScriptPolicyServiceClient::new(channel.clone()));
let submissions=crate::grpc::submissions::Service::new(crate::grpc::submissions::ValidatorSubmissionsServiceClient::new(channel.clone()));
let load_asset_version_runtime=rbx_asset::cloud::LuauSessionLatestRequest{
place_id:load_asset_version_place_id,
universe_id:load_asset_version_universe_id,
};
let message_handler=message_handler::MessageHandler{
cloud_context,
cookie_context,
cloud_context_luau_execution,
group_id,
load_asset_version_runtime,
mapfixes,
operations,
scripts,
script_policy,
submissions,
};
let mapfixes=crate::grpc::mapfixes::ValidatorMapfixesServiceClient::new(channel.clone());
let operations=crate::grpc::operations::ValidatorOperationsServiceClient::new(channel.clone());
let scripts=crate::grpc::scripts::ValidatorScriptsServiceClient::new(channel.clone());
let script_policy=crate::grpc::script_policy::ValidatorScriptPolicyServiceClient::new(channel.clone());
let submissions=crate::grpc::submissions::ValidatorSubmissionsServiceClient::new(channel);
let message_handler=message_handler::MessageHandler::new(cloud_context,cookie_context,group_id,mapfixes,operations,scripts,script_policy,submissions);
// nats
let nats_host=std::env::var("NATS_HOST").expect("NATS_HOST env required");

Some files were not shown because too many files have changed in this diff Show More