Merge branch 'next'
This commit is contained in:
commit
7daf8f6437
38 changed files with 1451 additions and 456 deletions
|
@ -3,6 +3,6 @@
|
||||||
members = [
|
members = [
|
||||||
"planetwars-rules",
|
"planetwars-rules",
|
||||||
"planetwars-matchrunner",
|
"planetwars-matchrunner",
|
||||||
"planetwars-cli",
|
|
||||||
"planetwars-server",
|
"planetwars-server",
|
||||||
|
"planetwars-client",
|
||||||
]
|
]
|
||||||
|
|
18
planetwars-client/Cargo.toml
Normal file
18
planetwars-client/Cargo.toml
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
[package]
|
||||||
|
name = "planetwars-client"
|
||||||
|
version = "0.0.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
tokio = { version = "1.15", features = ["full"] }
|
||||||
|
tokio-stream = "0.1.9"
|
||||||
|
prost = "0.10"
|
||||||
|
tonic = "0.7.2"
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
toml = "0.5"
|
||||||
|
planetwars-matchrunner = { path = "../planetwars-matchrunner" }
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
tonic-build = "0.7.2"
|
9
planetwars-client/build.rs
Normal file
9
planetwars-client/build.rs
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
extern crate tonic_build;
|
||||||
|
|
||||||
|
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
tonic_build::configure()
|
||||||
|
.build_server(false)
|
||||||
|
.build_client(true)
|
||||||
|
.compile(&["../proto/bot_api.proto"], &["../proto"])?;
|
||||||
|
Ok(())
|
||||||
|
}
|
2
planetwars-client/simplebot.toml
Normal file
2
planetwars-client/simplebot.toml
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
name = "simplebot"
|
||||||
|
command = ["python", "../simplebot/simplebot.py"]
|
72
planetwars-client/src/main.rs
Normal file
72
planetwars-client/src/main.rs
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
pub mod pb {
|
||||||
|
tonic::include_proto!("grpc.planetwars.bot_api");
|
||||||
|
}
|
||||||
|
|
||||||
|
use pb::bot_api_service_client::BotApiServiceClient;
|
||||||
|
use planetwars_matchrunner::bot_runner::Bot;
|
||||||
|
use serde::Deserialize;
|
||||||
|
use std::{path::PathBuf, time::Duration};
|
||||||
|
use tokio::sync::mpsc;
|
||||||
|
use tokio_stream::wrappers::UnboundedReceiverStream;
|
||||||
|
use tonic::{metadata::MetadataValue, transport::Channel, Request, Status};
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct BotConfig {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
name: String,
|
||||||
|
command: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() {
|
||||||
|
let content = std::fs::read_to_string("simplebot.toml").unwrap();
|
||||||
|
let bot_config: BotConfig = toml::from_str(&content).unwrap();
|
||||||
|
|
||||||
|
let channel = Channel::from_static("http://localhost:50051")
|
||||||
|
.connect()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let created_match = create_match(channel.clone()).await.unwrap();
|
||||||
|
run_player(bot_config, created_match.player_key, channel).await;
|
||||||
|
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn create_match(channel: Channel) -> Result<pb::CreatedMatch, Status> {
|
||||||
|
let mut client = BotApiServiceClient::new(channel);
|
||||||
|
let res = client
|
||||||
|
.create_match(Request::new(pb::MatchRequest {
|
||||||
|
opponent_name: "simplebot".to_string(),
|
||||||
|
}))
|
||||||
|
.await;
|
||||||
|
res.map(|response| response.into_inner())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn run_player(bot_config: BotConfig, player_key: String, channel: Channel) {
|
||||||
|
let mut client = BotApiServiceClient::with_interceptor(channel, |mut req: Request<()>| {
|
||||||
|
let player_key: MetadataValue<_> = player_key.parse().unwrap();
|
||||||
|
req.metadata_mut().insert("player_key", player_key);
|
||||||
|
Ok(req)
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut bot_process = Bot {
|
||||||
|
working_dir: PathBuf::from("."),
|
||||||
|
argv: bot_config.command,
|
||||||
|
}
|
||||||
|
.spawn_process();
|
||||||
|
|
||||||
|
let (tx, rx) = mpsc::unbounded_channel();
|
||||||
|
let mut stream = client
|
||||||
|
.connect_bot(UnboundedReceiverStream::new(rx))
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.into_inner();
|
||||||
|
while let Some(message) = stream.message().await.unwrap() {
|
||||||
|
let moves = bot_process.communicate(&message.content).await.unwrap();
|
||||||
|
tx.send(pb::PlayerRequestResponse {
|
||||||
|
request_id: message.request_id,
|
||||||
|
content: moves.as_bytes().to_vec(),
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,5 +1,4 @@
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::pin::Pin;
|
use std::pin::Pin;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
|
@ -19,8 +18,9 @@ use crate::BotSpec;
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct DockerBotSpec {
|
pub struct DockerBotSpec {
|
||||||
pub image: String,
|
pub image: String,
|
||||||
pub code_path: PathBuf,
|
pub binds: Option<Vec<String>>,
|
||||||
pub argv: Vec<String>,
|
pub argv: Option<Vec<String>>,
|
||||||
|
pub working_dir: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
|
@ -42,14 +42,12 @@ async fn spawn_docker_process(
|
||||||
params: &DockerBotSpec,
|
params: &DockerBotSpec,
|
||||||
) -> Result<ContainerProcess, bollard::errors::Error> {
|
) -> Result<ContainerProcess, bollard::errors::Error> {
|
||||||
let docker = Docker::connect_with_socket_defaults()?;
|
let docker = Docker::connect_with_socket_defaults()?;
|
||||||
let bot_code_dir = std::fs::canonicalize(¶ms.code_path).unwrap();
|
|
||||||
let code_dir_str = bot_code_dir.as_os_str().to_str().unwrap();
|
|
||||||
|
|
||||||
let memory_limit = 512 * 1024 * 1024; // 512MB
|
let memory_limit = 512 * 1024 * 1024; // 512MB
|
||||||
let config = container::Config {
|
let config = container::Config {
|
||||||
image: Some(params.image.clone()),
|
image: Some(params.image.clone()),
|
||||||
host_config: Some(bollard::models::HostConfig {
|
host_config: Some(bollard::models::HostConfig {
|
||||||
binds: Some(vec![format!("{}:{}", code_dir_str, "/workdir")]),
|
binds: params.binds.clone(),
|
||||||
network_mode: Some("none".to_string()),
|
network_mode: Some("none".to_string()),
|
||||||
memory: Some(memory_limit),
|
memory: Some(memory_limit),
|
||||||
memory_swap: Some(memory_limit),
|
memory_swap: Some(memory_limit),
|
||||||
|
@ -59,8 +57,8 @@ async fn spawn_docker_process(
|
||||||
// cpu_quota: Some(10_000),
|
// cpu_quota: Some(10_000),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}),
|
}),
|
||||||
working_dir: Some("/workdir".to_string()),
|
working_dir: params.working_dir.clone(),
|
||||||
cmd: Some(params.argv.clone()),
|
cmd: params.argv.clone(),
|
||||||
attach_stdin: Some(true),
|
attach_stdin: Some(true),
|
||||||
attach_stdout: Some(true),
|
attach_stdout: Some(true),
|
||||||
attach_stderr: Some(true),
|
attach_stderr: Some(true),
|
||||||
|
|
|
@ -6,9 +6,11 @@ edition = "2021"
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
futures = "0.3"
|
||||||
tokio = { version = "1.15", features = ["full"] }
|
tokio = { version = "1.15", features = ["full"] }
|
||||||
|
tokio-stream = "0.1.9"
|
||||||
hyper = "0.14"
|
hyper = "0.14"
|
||||||
axum = { version = "0.4", features = ["json", "headers", "multipart"] }
|
axum = { version = "0.5", features = ["json", "headers", "multipart"] }
|
||||||
diesel = { version = "1.4.4", features = ["postgres", "chrono"] }
|
diesel = { version = "1.4.4", features = ["postgres", "chrono"] }
|
||||||
diesel-derive-enum = { version = "1.1", features = ["postgres"] }
|
diesel-derive-enum = { version = "1.1", features = ["postgres"] }
|
||||||
bb8 = "0.7"
|
bb8 = "0.7"
|
||||||
|
@ -26,9 +28,16 @@ toml = "0.5"
|
||||||
planetwars-matchrunner = { path = "../planetwars-matchrunner" }
|
planetwars-matchrunner = { path = "../planetwars-matchrunner" }
|
||||||
config = { version = "0.12", features = ["toml"] }
|
config = { version = "0.12", features = ["toml"] }
|
||||||
thiserror = "1.0.31"
|
thiserror = "1.0.31"
|
||||||
|
sha2 = "0.10"
|
||||||
|
tokio-util = { version="0.7.3", features=["io"] }
|
||||||
|
prost = "0.10"
|
||||||
|
tonic = "0.7.2"
|
||||||
|
|
||||||
# TODO: remove me
|
# TODO: remove me
|
||||||
shlex = "1.1"
|
shlex = "1.1"
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
tonic-build = "0.7.2"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
parking_lot = "0.11"
|
parking_lot = "0.11"
|
||||||
|
|
9
planetwars-server/build.rs
Normal file
9
planetwars-server/build.rs
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
extern crate tonic_build;
|
||||||
|
|
||||||
|
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
tonic_build::configure()
|
||||||
|
.build_server(true)
|
||||||
|
.build_client(false)
|
||||||
|
.compile(&["../proto/bot_api.proto"], &["../proto"])?;
|
||||||
|
Ok(())
|
||||||
|
}
|
|
@ -1 +1,13 @@
|
||||||
database_url = "postgresql://planetwars:planetwars@localhost/planetwars"
|
database_url = "postgresql://planetwars:planetwars@localhost/planetwars"
|
||||||
|
|
||||||
|
python_runner_image = "python:3.10-slim-buster"
|
||||||
|
container_registry_url = "localhost:9001"
|
||||||
|
|
||||||
|
bots_directory = "./data/bots"
|
||||||
|
match_logs_directory = "./data/matches"
|
||||||
|
maps_directory = "./data/maps"
|
||||||
|
|
||||||
|
registry_directory = "./data/registry"
|
||||||
|
registry_admin_password ="verysecretadminpassword"
|
||||||
|
|
||||||
|
ranker_enabled = false
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
ALTER TABLE match_players ALTER COLUMN code_bundle_id SET NOT NULL;
|
|
@ -0,0 +1 @@
|
||||||
|
ALTER TABLE match_players ALTER COLUMN code_bundle_id DROP NOT NULL;
|
|
@ -0,0 +1,6 @@
|
||||||
|
ALTER TABLE match_players RENAME COLUMN bot_version_id TO code_bundle_id;
|
||||||
|
|
||||||
|
ALTER TABLE bot_versions DROP COLUMN container_digest;
|
||||||
|
ALTER TABLE bot_versions RENAME COLUMN code_bundle_path TO path;
|
||||||
|
ALTER TABLE bot_versions ALTER COLUMN path SET NOT NULL;
|
||||||
|
ALTER TABLE bot_versions RENAME TO code_bundles;
|
|
@ -0,0 +1,6 @@
|
||||||
|
ALTER TABLE code_bundles RENAME TO bot_versions;
|
||||||
|
ALTER TABLE bot_versions RENAME COLUMN path to code_bundle_path;
|
||||||
|
ALTER TABLE bot_versions ALTER COLUMN code_bundle_path DROP NOT NULL;
|
||||||
|
ALTER TABLE bot_versions ADD COLUMN container_digest TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE match_players RENAME COLUMN code_bundle_id TO bot_version_id;
|
|
@ -1,7 +1,7 @@
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::schema::{bots, code_bundles};
|
use crate::schema::{bot_versions, bots};
|
||||||
use chrono;
|
use chrono;
|
||||||
|
|
||||||
#[derive(Insertable)]
|
#[derive(Insertable)]
|
||||||
|
@ -44,38 +44,40 @@ pub fn find_all_bots(conn: &PgConnection) -> QueryResult<Vec<Bot>> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Insertable)]
|
#[derive(Insertable)]
|
||||||
#[table_name = "code_bundles"]
|
#[table_name = "bot_versions"]
|
||||||
pub struct NewCodeBundle<'a> {
|
pub struct NewBotVersion<'a> {
|
||||||
pub bot_id: Option<i32>,
|
pub bot_id: Option<i32>,
|
||||||
pub path: &'a str,
|
pub code_bundle_path: Option<&'a str>,
|
||||||
|
pub container_digest: Option<&'a str>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Queryable, Serialize, Deserialize, Debug)]
|
#[derive(Queryable, Serialize, Deserialize, Clone, Debug)]
|
||||||
pub struct CodeBundle {
|
pub struct BotVersion {
|
||||||
pub id: i32,
|
pub id: i32,
|
||||||
pub bot_id: Option<i32>,
|
pub bot_id: Option<i32>,
|
||||||
pub path: String,
|
pub code_bundle_path: Option<String>,
|
||||||
pub created_at: chrono::NaiveDateTime,
|
pub created_at: chrono::NaiveDateTime,
|
||||||
|
pub container_digest: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_code_bundle(
|
pub fn create_bot_version(
|
||||||
new_code_bundle: &NewCodeBundle,
|
new_bot_version: &NewBotVersion,
|
||||||
conn: &PgConnection,
|
conn: &PgConnection,
|
||||||
) -> QueryResult<CodeBundle> {
|
) -> QueryResult<BotVersion> {
|
||||||
diesel::insert_into(code_bundles::table)
|
diesel::insert_into(bot_versions::table)
|
||||||
.values(new_code_bundle)
|
.values(new_bot_version)
|
||||||
.get_result(conn)
|
.get_result(conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_bot_code_bundles(bot_id: i32, conn: &PgConnection) -> QueryResult<Vec<CodeBundle>> {
|
pub fn find_bot_versions(bot_id: i32, conn: &PgConnection) -> QueryResult<Vec<BotVersion>> {
|
||||||
code_bundles::table
|
bot_versions::table
|
||||||
.filter(code_bundles::bot_id.eq(bot_id))
|
.filter(bot_versions::bot_id.eq(bot_id))
|
||||||
.get_results(conn)
|
.get_results(conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn active_code_bundle(bot_id: i32, conn: &PgConnection) -> QueryResult<CodeBundle> {
|
pub fn active_bot_version(bot_id: i32, conn: &PgConnection) -> QueryResult<BotVersion> {
|
||||||
code_bundles::table
|
bot_versions::table
|
||||||
.filter(code_bundles::bot_id.eq(bot_id))
|
.filter(bot_versions::bot_id.eq(bot_id))
|
||||||
.order(code_bundles::created_at.desc())
|
.order(bot_versions::created_at.desc())
|
||||||
.first(conn)
|
.first(conn)
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,9 +6,9 @@ use diesel::{
|
||||||
};
|
};
|
||||||
use diesel::{Connection, GroupedBy, PgConnection, QueryResult};
|
use diesel::{Connection, GroupedBy, PgConnection, QueryResult};
|
||||||
|
|
||||||
use crate::schema::{bots, code_bundles, match_players, matches};
|
use crate::schema::{bot_versions, bots, match_players, matches};
|
||||||
|
|
||||||
use super::bots::{Bot, CodeBundle};
|
use super::bots::{Bot, BotVersion};
|
||||||
|
|
||||||
#[derive(Insertable)]
|
#[derive(Insertable)]
|
||||||
#[table_name = "matches"]
|
#[table_name = "matches"]
|
||||||
|
@ -25,7 +25,7 @@ pub struct NewMatchPlayer {
|
||||||
/// player id within the match
|
/// player id within the match
|
||||||
pub player_id: i32,
|
pub player_id: i32,
|
||||||
/// id of the bot behind this player
|
/// id of the bot behind this player
|
||||||
pub code_bundle_id: i32,
|
pub bot_version_id: Option<i32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Queryable, Identifiable)]
|
#[derive(Queryable, Identifiable)]
|
||||||
|
@ -44,11 +44,11 @@ pub struct MatchBase {
|
||||||
pub struct MatchPlayer {
|
pub struct MatchPlayer {
|
||||||
pub match_id: i32,
|
pub match_id: i32,
|
||||||
pub player_id: i32,
|
pub player_id: i32,
|
||||||
pub code_bundle_id: i32,
|
pub code_bundle_id: Option<i32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct MatchPlayerData {
|
pub struct MatchPlayerData {
|
||||||
pub code_bundle_id: i32,
|
pub code_bundle_id: Option<i32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_match(
|
pub fn create_match(
|
||||||
|
@ -67,7 +67,7 @@ pub fn create_match(
|
||||||
.map(|(num, player_data)| NewMatchPlayer {
|
.map(|(num, player_data)| NewMatchPlayer {
|
||||||
match_id: match_base.id,
|
match_id: match_base.id,
|
||||||
player_id: num as i32,
|
player_id: num as i32,
|
||||||
code_bundle_id: player_data.code_bundle_id,
|
bot_version_id: player_data.code_bundle_id,
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
@ -92,8 +92,11 @@ pub fn list_matches(conn: &PgConnection) -> QueryResult<Vec<FullMatchData>> {
|
||||||
let matches = matches::table.get_results::<MatchBase>(conn)?;
|
let matches = matches::table.get_results::<MatchBase>(conn)?;
|
||||||
|
|
||||||
let match_players = MatchPlayer::belonging_to(&matches)
|
let match_players = MatchPlayer::belonging_to(&matches)
|
||||||
.inner_join(code_bundles::table)
|
.left_join(
|
||||||
.left_join(bots::table.on(code_bundles::bot_id.eq(bots::id.nullable())))
|
bot_versions::table
|
||||||
|
.on(match_players::bot_version_id.eq(bot_versions::id.nullable())),
|
||||||
|
)
|
||||||
|
.left_join(bots::table.on(bot_versions::bot_id.eq(bots::id.nullable())))
|
||||||
.load::<FullMatchPlayerData>(conn)?
|
.load::<FullMatchPlayerData>(conn)?
|
||||||
.grouped_by(&matches);
|
.grouped_by(&matches);
|
||||||
|
|
||||||
|
@ -120,7 +123,7 @@ pub struct FullMatchData {
|
||||||
// #[primary_key(base.match_id, base::player_id)]
|
// #[primary_key(base.match_id, base::player_id)]
|
||||||
pub struct FullMatchPlayerData {
|
pub struct FullMatchPlayerData {
|
||||||
pub base: MatchPlayer,
|
pub base: MatchPlayer,
|
||||||
pub code_bundle: CodeBundle,
|
pub bot_version: Option<BotVersion>,
|
||||||
pub bot: Option<Bot>,
|
pub bot: Option<Bot>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -142,8 +145,11 @@ pub fn find_match(id: i32, conn: &PgConnection) -> QueryResult<FullMatchData> {
|
||||||
let match_base = matches::table.find(id).get_result::<MatchBase>(conn)?;
|
let match_base = matches::table.find(id).get_result::<MatchBase>(conn)?;
|
||||||
|
|
||||||
let match_players = MatchPlayer::belonging_to(&match_base)
|
let match_players = MatchPlayer::belonging_to(&match_base)
|
||||||
.inner_join(code_bundles::table)
|
.left_join(
|
||||||
.left_join(bots::table.on(code_bundles::bot_id.eq(bots::id.nullable())))
|
bot_versions::table
|
||||||
|
.on(match_players::bot_version_id.eq(bot_versions::id.nullable())),
|
||||||
|
)
|
||||||
|
.left_join(bots::table.on(bot_versions::bot_id.eq(bots::id.nullable())))
|
||||||
.load::<FullMatchPlayerData>(conn)?;
|
.load::<FullMatchPlayerData>(conn)?;
|
||||||
|
|
||||||
let res = FullMatchData {
|
let res = FullMatchData {
|
||||||
|
@ -160,14 +166,17 @@ pub fn find_match_base(id: i32, conn: &PgConnection) -> QueryResult<MatchBase> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum MatchResult {
|
pub enum MatchResult {
|
||||||
Finished { winner: Option<i32> }
|
Finished { winner: Option<i32> },
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn save_match_result(id: i32, result: MatchResult, conn: &PgConnection) -> QueryResult<()> {
|
pub fn save_match_result(id: i32, result: MatchResult, conn: &PgConnection) -> QueryResult<()> {
|
||||||
let MatchResult::Finished { winner } = result;
|
let MatchResult::Finished { winner } = result;
|
||||||
|
|
||||||
diesel::update(matches::table.find(id))
|
diesel::update(matches::table.find(id))
|
||||||
.set((matches::winner.eq(winner), matches::state.eq(MatchState::Finished)))
|
.set((
|
||||||
|
matches::winner.eq(winner),
|
||||||
|
matches::state.eq(MatchState::Finished),
|
||||||
|
))
|
||||||
.execute(conn)?;
|
.execute(conn)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
|
@ -8,33 +8,63 @@ pub mod routes;
|
||||||
pub mod schema;
|
pub mod schema;
|
||||||
pub mod util;
|
pub mod util;
|
||||||
|
|
||||||
use std::net::SocketAddr;
|
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::{fs, net::SocketAddr};
|
||||||
|
|
||||||
use bb8::{Pool, PooledConnection};
|
use bb8::{Pool, PooledConnection};
|
||||||
use bb8_diesel::{self, DieselConnectionManager};
|
use bb8_diesel::{self, DieselConnectionManager};
|
||||||
use config::ConfigError;
|
use config::ConfigError;
|
||||||
use diesel::{Connection, PgConnection};
|
use diesel::{Connection, PgConnection};
|
||||||
use modules::ranking::run_ranker;
|
use modules::ranking::run_ranker;
|
||||||
use serde::Deserialize;
|
use modules::registry::registry_service;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use axum::{
|
use axum::{
|
||||||
async_trait,
|
async_trait,
|
||||||
extract::{Extension, FromRequest, RequestParts},
|
extract::{Extension, FromRequest, RequestParts},
|
||||||
http::StatusCode,
|
http::StatusCode,
|
||||||
routing::{get, post},
|
routing::{get, post},
|
||||||
AddExtensionLayer, Router,
|
Router,
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO: make these configurable
|
|
||||||
const BOTS_DIR: &str = "./data/bots";
|
|
||||||
const MATCHES_DIR: &str = "./data/matches";
|
|
||||||
const MAPS_DIR: &str = "./data/maps";
|
|
||||||
const SIMPLEBOT_PATH: &str = "../simplebot/simplebot.py";
|
|
||||||
|
|
||||||
type ConnectionPool = bb8::Pool<DieselConnectionManager<PgConnection>>;
|
type ConnectionPool = bb8::Pool<DieselConnectionManager<PgConnection>>;
|
||||||
|
|
||||||
pub async fn seed_simplebot(pool: &ConnectionPool) {
|
// this should probably be modularized a bit as the config grows
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
pub struct GlobalConfig {
|
||||||
|
/// url for the postgres database
|
||||||
|
pub database_url: String,
|
||||||
|
|
||||||
|
/// which image to use for running python bots
|
||||||
|
pub python_runner_image: String,
|
||||||
|
|
||||||
|
/// url for the internal container registry
|
||||||
|
/// this will be used when running bots
|
||||||
|
pub container_registry_url: String,
|
||||||
|
|
||||||
|
/// directory where bot code will be stored
|
||||||
|
pub bots_directory: String,
|
||||||
|
/// directory where match logs will be stored
|
||||||
|
pub match_logs_directory: String,
|
||||||
|
/// directory where map files will be stored
|
||||||
|
pub maps_directory: String,
|
||||||
|
|
||||||
|
/// base directory for registry data
|
||||||
|
pub registry_directory: String,
|
||||||
|
/// secret admin password for internal docker login
|
||||||
|
/// used to pull bots when running matches
|
||||||
|
pub registry_admin_password: String,
|
||||||
|
|
||||||
|
/// Whether to run the ranker
|
||||||
|
pub ranker_enabled: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: do we still need this? Is there a better way?
|
||||||
|
const SIMPLEBOT_PATH: &str = "../simplebot/simplebot.py";
|
||||||
|
|
||||||
|
pub async fn seed_simplebot(config: &GlobalConfig, pool: &ConnectionPool) {
|
||||||
let conn = pool.get().await.expect("could not get database connection");
|
let conn = pool.get().await.expect("could not get database connection");
|
||||||
// This transaction is expected to fail when simplebot already exists.
|
// This transaction is expected to fail when simplebot already exists.
|
||||||
let _res = conn.transaction::<(), diesel::result::Error, _>(|| {
|
let _res = conn.transaction::<(), diesel::result::Error, _>(|| {
|
||||||
|
@ -50,7 +80,7 @@ pub async fn seed_simplebot(pool: &ConnectionPool) {
|
||||||
let simplebot_code =
|
let simplebot_code =
|
||||||
std::fs::read_to_string(SIMPLEBOT_PATH).expect("could not read simplebot code");
|
std::fs::read_to_string(SIMPLEBOT_PATH).expect("could not read simplebot code");
|
||||||
|
|
||||||
modules::bots::save_code_bundle(&simplebot_code, Some(simplebot.id), &conn)?;
|
modules::bots::save_code_string(&simplebot_code, Some(simplebot.id), &conn, config)?;
|
||||||
|
|
||||||
println!("initialized simplebot");
|
println!("initialized simplebot");
|
||||||
|
|
||||||
|
@ -60,13 +90,26 @@ pub async fn seed_simplebot(pool: &ConnectionPool) {
|
||||||
|
|
||||||
pub type DbPool = Pool<DieselConnectionManager<PgConnection>>;
|
pub type DbPool = Pool<DieselConnectionManager<PgConnection>>;
|
||||||
|
|
||||||
pub async fn prepare_db(database_url: &str) -> DbPool {
|
pub async fn prepare_db(config: &GlobalConfig) -> DbPool {
|
||||||
let manager = DieselConnectionManager::<PgConnection>::new(database_url);
|
let manager = DieselConnectionManager::<PgConnection>::new(&config.database_url);
|
||||||
let pool = bb8::Pool::builder().build(manager).await.unwrap();
|
let pool = bb8::Pool::builder().build(manager).await.unwrap();
|
||||||
seed_simplebot(&pool).await;
|
seed_simplebot(config, &pool).await;
|
||||||
pool
|
pool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// create all directories required for further operation
|
||||||
|
fn init_directories(config: &GlobalConfig) -> std::io::Result<()> {
|
||||||
|
fs::create_dir_all(&config.bots_directory)?;
|
||||||
|
fs::create_dir_all(&config.maps_directory)?;
|
||||||
|
fs::create_dir_all(&config.match_logs_directory)?;
|
||||||
|
|
||||||
|
let registry_path = PathBuf::from(&config.registry_directory);
|
||||||
|
fs::create_dir_all(registry_path.join("sha256"))?;
|
||||||
|
fs::create_dir_all(registry_path.join("manifests"))?;
|
||||||
|
fs::create_dir_all(registry_path.join("uploads"))?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn api() -> Router {
|
pub fn api() -> Router {
|
||||||
Router::new()
|
Router::new()
|
||||||
.route("/register", post(routes::users::register))
|
.route("/register", post(routes::users::register))
|
||||||
|
@ -82,10 +125,7 @@ pub fn api() -> Router {
|
||||||
"/bots/:bot_id/upload",
|
"/bots/:bot_id/upload",
|
||||||
post(routes::bots::upload_code_multipart),
|
post(routes::bots::upload_code_multipart),
|
||||||
)
|
)
|
||||||
.route(
|
.route("/matches", get(routes::matches::list_matches))
|
||||||
"/matches",
|
|
||||||
get(routes::matches::list_matches).post(routes::matches::play_match),
|
|
||||||
)
|
|
||||||
.route("/matches/:match_id", get(routes::matches::get_match_data))
|
.route("/matches/:match_id", get(routes::matches::get_match_data))
|
||||||
.route(
|
.route(
|
||||||
"/matches/:match_id/log",
|
"/matches/:match_id/log",
|
||||||
|
@ -96,7 +136,7 @@ pub fn api() -> Router {
|
||||||
.route("/save_bot", post(routes::bots::save_bot))
|
.route("/save_bot", post(routes::bots::save_bot))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_config() -> Result<Configuration, ConfigError> {
|
pub fn get_config() -> Result<GlobalConfig, ConfigError> {
|
||||||
config::Config::builder()
|
config::Config::builder()
|
||||||
.add_source(config::File::with_name("configuration.toml"))
|
.add_source(config::File::with_name("configuration.toml"))
|
||||||
.add_source(config::Environment::with_prefix("PLANETWARS"))
|
.add_source(config::Environment::with_prefix("PLANETWARS"))
|
||||||
|
@ -104,15 +144,35 @@ pub fn get_config() -> Result<Configuration, ConfigError> {
|
||||||
.try_deserialize()
|
.try_deserialize()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn run_app() {
|
async fn run_registry(config: Arc<GlobalConfig>, db_pool: DbPool) {
|
||||||
let configuration = get_config().unwrap();
|
// TODO: put in config
|
||||||
let db_pool = prepare_db(&configuration.database_url).await;
|
let addr = SocketAddr::from(([127, 0, 0, 1], 9001));
|
||||||
|
|
||||||
tokio::spawn(run_ranker(db_pool.clone()));
|
axum::Server::bind(&addr)
|
||||||
|
.serve(
|
||||||
|
registry_service()
|
||||||
|
.layer(Extension(db_pool))
|
||||||
|
.layer(Extension(config))
|
||||||
|
.into_make_service(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn run_app() {
|
||||||
|
let global_config = Arc::new(get_config().unwrap());
|
||||||
|
let db_pool = prepare_db(&global_config).await;
|
||||||
|
init_directories(&global_config).unwrap();
|
||||||
|
|
||||||
|
if global_config.ranker_enabled {
|
||||||
|
tokio::spawn(run_ranker(global_config.clone(), db_pool.clone()));
|
||||||
|
}
|
||||||
|
tokio::spawn(run_registry(global_config.clone(), db_pool.clone()));
|
||||||
|
|
||||||
let api_service = Router::new()
|
let api_service = Router::new()
|
||||||
.nest("/api", api())
|
.nest("/api", api())
|
||||||
.layer(AddExtensionLayer::new(db_pool))
|
.layer(Extension(db_pool))
|
||||||
|
.layer(Extension(global_config))
|
||||||
.into_make_service();
|
.into_make_service();
|
||||||
|
|
||||||
// TODO: put in config
|
// TODO: put in config
|
||||||
|
@ -121,11 +181,6 @@ pub async fn run_app() {
|
||||||
axum::Server::bind(&addr).serve(api_service).await.unwrap();
|
axum::Server::bind(&addr).serve(api_service).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
pub struct Configuration {
|
|
||||||
pub database_url: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
// we can also write a custom extractor that grabs a connection from the pool
|
// we can also write a custom extractor that grabs a connection from the pool
|
||||||
// which setup is appropriate depends on your application
|
// which setup is appropriate depends on your application
|
||||||
pub struct DatabaseConnection(PooledConnection<'static, DieselConnectionManager<PgConnection>>);
|
pub struct DatabaseConnection(PooledConnection<'static, DieselConnectionManager<PgConnection>>);
|
||||||
|
|
283
planetwars-server/src/modules/bot_api.rs
Normal file
283
planetwars-server/src/modules/bot_api.rs
Normal file
|
@ -0,0 +1,283 @@
|
||||||
|
pub mod pb {
|
||||||
|
tonic::include_proto!("grpc.planetwars.bot_api");
|
||||||
|
}
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::net::SocketAddr;
|
||||||
|
use std::sync::{Arc, Mutex};
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use runner::match_context::{EventBus, PlayerHandle, RequestError, RequestMessage};
|
||||||
|
use runner::match_log::MatchLogger;
|
||||||
|
use tokio::sync::{mpsc, oneshot};
|
||||||
|
use tokio_stream::wrappers::UnboundedReceiverStream;
|
||||||
|
use tonic;
|
||||||
|
use tonic::transport::Server;
|
||||||
|
use tonic::{Request, Response, Status, Streaming};
|
||||||
|
|
||||||
|
use planetwars_matchrunner as runner;
|
||||||
|
|
||||||
|
use crate::db;
|
||||||
|
use crate::util::gen_alphanumeric;
|
||||||
|
use crate::ConnectionPool;
|
||||||
|
use crate::GlobalConfig;
|
||||||
|
|
||||||
|
use super::matches::{MatchPlayer, RunMatch};
|
||||||
|
|
||||||
|
pub struct BotApiServer {
|
||||||
|
conn_pool: ConnectionPool,
|
||||||
|
runner_config: Arc<GlobalConfig>,
|
||||||
|
router: PlayerRouter,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Routes players to their handler
|
||||||
|
#[derive(Clone)]
|
||||||
|
struct PlayerRouter {
|
||||||
|
routing_table: Arc<Mutex<HashMap<String, SyncThingData>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PlayerRouter {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
PlayerRouter {
|
||||||
|
routing_table: Arc::new(Mutex::new(HashMap::new())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for PlayerRouter {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: implement a way to expire entries
|
||||||
|
impl PlayerRouter {
|
||||||
|
fn put(&self, player_key: String, entry: SyncThingData) {
|
||||||
|
let mut routing_table = self.routing_table.lock().unwrap();
|
||||||
|
routing_table.insert(player_key, entry);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn take(&self, player_key: &str) -> Option<SyncThingData> {
|
||||||
|
// TODO: this design does not allow for reconnects. Is this desired?
|
||||||
|
let mut routing_table = self.routing_table.lock().unwrap();
|
||||||
|
routing_table.remove(player_key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tonic::async_trait]
|
||||||
|
impl pb::bot_api_service_server::BotApiService for BotApiServer {
|
||||||
|
type ConnectBotStream = UnboundedReceiverStream<Result<pb::PlayerRequest, Status>>;
|
||||||
|
|
||||||
|
async fn connect_bot(
|
||||||
|
&self,
|
||||||
|
req: Request<Streaming<pb::PlayerRequestResponse>>,
|
||||||
|
) -> Result<Response<Self::ConnectBotStream>, Status> {
|
||||||
|
// TODO: clean up errors
|
||||||
|
let player_key = req
|
||||||
|
.metadata()
|
||||||
|
.get("player_key")
|
||||||
|
.ok_or_else(|| Status::unauthenticated("no player_key provided"))?;
|
||||||
|
|
||||||
|
let player_key_str = player_key
|
||||||
|
.to_str()
|
||||||
|
.map_err(|_| Status::invalid_argument("unreadable string"))?;
|
||||||
|
|
||||||
|
let sync_data = self
|
||||||
|
.router
|
||||||
|
.take(player_key_str)
|
||||||
|
.ok_or_else(|| Status::not_found("player_key not found"))?;
|
||||||
|
|
||||||
|
let stream = req.into_inner();
|
||||||
|
|
||||||
|
sync_data.tx.send(stream).unwrap();
|
||||||
|
Ok(Response::new(UnboundedReceiverStream::new(
|
||||||
|
sync_data.server_messages,
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn create_match(
|
||||||
|
&self,
|
||||||
|
req: Request<pb::MatchRequest>,
|
||||||
|
) -> Result<Response<pb::CreatedMatch>, Status> {
|
||||||
|
// TODO: unify with matchrunner module
|
||||||
|
let conn = self.conn_pool.get().await.unwrap();
|
||||||
|
|
||||||
|
let match_request = req.get_ref();
|
||||||
|
|
||||||
|
let opponent_bot = db::bots::find_bot_by_name(&match_request.opponent_name, &conn)
|
||||||
|
.map_err(|_| Status::not_found("opponent not found"))?;
|
||||||
|
let opponent_bot_version = db::bots::active_bot_version(opponent_bot.id, &conn)
|
||||||
|
.map_err(|_| Status::not_found("no opponent version found"))?;
|
||||||
|
|
||||||
|
let player_key = gen_alphanumeric(32);
|
||||||
|
|
||||||
|
let remote_bot_spec = Box::new(RemoteBotSpec {
|
||||||
|
player_key: player_key.clone(),
|
||||||
|
router: self.router.clone(),
|
||||||
|
});
|
||||||
|
let run_match = RunMatch::from_players(
|
||||||
|
self.runner_config.clone(),
|
||||||
|
vec![
|
||||||
|
MatchPlayer::BotSpec {
|
||||||
|
spec: remote_bot_spec,
|
||||||
|
},
|
||||||
|
MatchPlayer::BotVersion {
|
||||||
|
bot: Some(opponent_bot),
|
||||||
|
version: opponent_bot_version,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
);
|
||||||
|
let (created_match, _) = run_match
|
||||||
|
.run(self.conn_pool.clone())
|
||||||
|
.await
|
||||||
|
.expect("failed to create match");
|
||||||
|
|
||||||
|
Ok(Response::new(pb::CreatedMatch {
|
||||||
|
match_id: created_match.base.id,
|
||||||
|
player_key,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: please rename me
|
||||||
|
struct SyncThingData {
|
||||||
|
tx: oneshot::Sender<Streaming<pb::PlayerRequestResponse>>,
|
||||||
|
server_messages: mpsc::UnboundedReceiver<Result<pb::PlayerRequest, Status>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct RemoteBotSpec {
|
||||||
|
player_key: String,
|
||||||
|
router: PlayerRouter,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tonic::async_trait]
|
||||||
|
impl runner::BotSpec for RemoteBotSpec {
|
||||||
|
async fn run_bot(
|
||||||
|
&self,
|
||||||
|
player_id: u32,
|
||||||
|
event_bus: Arc<Mutex<EventBus>>,
|
||||||
|
_match_logger: MatchLogger,
|
||||||
|
) -> Box<dyn PlayerHandle> {
|
||||||
|
let (tx, rx) = oneshot::channel();
|
||||||
|
let (server_msg_snd, server_msg_recv) = mpsc::unbounded_channel();
|
||||||
|
self.router.put(
|
||||||
|
self.player_key.clone(),
|
||||||
|
SyncThingData {
|
||||||
|
tx,
|
||||||
|
server_messages: server_msg_recv,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
let fut = tokio::time::timeout(Duration::from_secs(10), rx);
|
||||||
|
match fut.await {
|
||||||
|
Ok(Ok(client_messages)) => {
|
||||||
|
// let client_messages = rx.await.unwrap();
|
||||||
|
tokio::spawn(handle_bot_messages(
|
||||||
|
player_id,
|
||||||
|
event_bus.clone(),
|
||||||
|
client_messages,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
// ensure router cleanup
|
||||||
|
self.router.take(&self.player_key);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// If the player did not connect, the receiving half of `sender`
|
||||||
|
// will be dropped here, resulting in a time-out for every turn.
|
||||||
|
// This is fine for now, but
|
||||||
|
// TODO: provide a formal mechanism for player startup failure
|
||||||
|
Box::new(RemoteBotHandle {
|
||||||
|
sender: server_msg_snd,
|
||||||
|
player_id,
|
||||||
|
event_bus,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn handle_bot_messages(
|
||||||
|
player_id: u32,
|
||||||
|
event_bus: Arc<Mutex<EventBus>>,
|
||||||
|
mut messages: Streaming<pb::PlayerRequestResponse>,
|
||||||
|
) {
|
||||||
|
while let Some(message) = messages.message().await.unwrap() {
|
||||||
|
let request_id = (player_id, message.request_id as u32);
|
||||||
|
event_bus
|
||||||
|
.lock()
|
||||||
|
.unwrap()
|
||||||
|
.resolve_request(request_id, Ok(message.content));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct RemoteBotHandle {
|
||||||
|
sender: mpsc::UnboundedSender<Result<pb::PlayerRequest, Status>>,
|
||||||
|
player_id: u32,
|
||||||
|
event_bus: Arc<Mutex<EventBus>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PlayerHandle for RemoteBotHandle {
|
||||||
|
fn send_request(&mut self, r: RequestMessage) {
|
||||||
|
let res = self.sender.send(Ok(pb::PlayerRequest {
|
||||||
|
request_id: r.request_id as i32,
|
||||||
|
content: r.content,
|
||||||
|
}));
|
||||||
|
match res {
|
||||||
|
Ok(()) => {
|
||||||
|
// schedule a timeout. See comments at method implementation
|
||||||
|
tokio::spawn(schedule_timeout(
|
||||||
|
(self.player_id, r.request_id),
|
||||||
|
r.timeout,
|
||||||
|
self.event_bus.clone(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Err(_send_error) => {
|
||||||
|
// cannot contact the remote bot anymore;
|
||||||
|
// directly mark all requests as timed out.
|
||||||
|
// TODO: create a dedicated error type for this.
|
||||||
|
// should it be logged?
|
||||||
|
println!("send error: {:?}", _send_error);
|
||||||
|
self.event_bus
|
||||||
|
.lock()
|
||||||
|
.unwrap()
|
||||||
|
.resolve_request((self.player_id, r.request_id), Err(RequestError::Timeout));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: this will spawn a task for every request, which might not be ideal.
|
||||||
|
// Some alternatives:
|
||||||
|
// - create a single task that manages all time-outs.
|
||||||
|
// - intersperse timeouts with incoming client messages
|
||||||
|
// - push timeouts upwards, into the matchrunner logic (before we hit the playerhandle).
|
||||||
|
// This was initially not done to allow timer start to be delayed until the message actually arrived
|
||||||
|
// with the player. Is this still needed, or is there a different way to do this?
|
||||||
|
//
|
||||||
|
async fn schedule_timeout(
|
||||||
|
request_id: (u32, u32),
|
||||||
|
duration: Duration,
|
||||||
|
event_bus: Arc<Mutex<EventBus>>,
|
||||||
|
) {
|
||||||
|
tokio::time::sleep(duration).await;
|
||||||
|
event_bus
|
||||||
|
.lock()
|
||||||
|
.unwrap()
|
||||||
|
.resolve_request(request_id, Err(RequestError::Timeout));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn run_bot_api(runner_config: Arc<GlobalConfig>, pool: ConnectionPool) {
|
||||||
|
let router = PlayerRouter::new();
|
||||||
|
let server = BotApiServer {
|
||||||
|
router,
|
||||||
|
conn_pool: pool,
|
||||||
|
runner_config,
|
||||||
|
};
|
||||||
|
|
||||||
|
let addr = SocketAddr::from(([127, 0, 0, 1], 50051));
|
||||||
|
Server::builder()
|
||||||
|
.add_service(pb::bot_api_service_server::BotApiServiceServer::new(server))
|
||||||
|
.serve(addr)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
}
|
|
@ -2,22 +2,25 @@ use std::path::PathBuf;
|
||||||
|
|
||||||
use diesel::{PgConnection, QueryResult};
|
use diesel::{PgConnection, QueryResult};
|
||||||
|
|
||||||
use crate::{db, util::gen_alphanumeric, BOTS_DIR};
|
use crate::{db, util::gen_alphanumeric, GlobalConfig};
|
||||||
|
|
||||||
pub fn save_code_bundle(
|
/// Save a string containing bot code as a code bundle.
|
||||||
|
pub fn save_code_string(
|
||||||
bot_code: &str,
|
bot_code: &str,
|
||||||
bot_id: Option<i32>,
|
bot_id: Option<i32>,
|
||||||
conn: &PgConnection,
|
conn: &PgConnection,
|
||||||
) -> QueryResult<db::bots::CodeBundle> {
|
config: &GlobalConfig,
|
||||||
|
) -> QueryResult<db::bots::BotVersion> {
|
||||||
let bundle_name = gen_alphanumeric(16);
|
let bundle_name = gen_alphanumeric(16);
|
||||||
|
|
||||||
let code_bundle_dir = PathBuf::from(BOTS_DIR).join(&bundle_name);
|
let code_bundle_dir = PathBuf::from(&config.bots_directory).join(&bundle_name);
|
||||||
std::fs::create_dir(&code_bundle_dir).unwrap();
|
std::fs::create_dir(&code_bundle_dir).unwrap();
|
||||||
std::fs::write(code_bundle_dir.join("bot.py"), bot_code).unwrap();
|
std::fs::write(code_bundle_dir.join("bot.py"), bot_code).unwrap();
|
||||||
|
|
||||||
let new_code_bundle = db::bots::NewCodeBundle {
|
let new_code_bundle = db::bots::NewBotVersion {
|
||||||
bot_id,
|
bot_id,
|
||||||
path: &bundle_name,
|
code_bundle_path: Some(&bundle_name),
|
||||||
|
container_digest: None,
|
||||||
};
|
};
|
||||||
db::bots::create_code_bundle(&new_code_bundle, conn)
|
db::bots::create_bot_version(&new_code_bundle, conn)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use std::path::PathBuf;
|
use std::{path::PathBuf, sync::Arc};
|
||||||
|
|
||||||
use diesel::{PgConnection, QueryResult};
|
use diesel::{PgConnection, QueryResult};
|
||||||
use planetwars_matchrunner::{self as runner, docker_runner::DockerBotSpec, BotSpec, MatchConfig};
|
use planetwars_matchrunner::{self as runner, docker_runner::DockerBotSpec, BotSpec, MatchConfig};
|
||||||
|
@ -11,77 +11,126 @@ use crate::{
|
||||||
matches::{MatchData, MatchResult},
|
matches::{MatchData, MatchResult},
|
||||||
},
|
},
|
||||||
util::gen_alphanumeric,
|
util::gen_alphanumeric,
|
||||||
ConnectionPool, BOTS_DIR, MAPS_DIR, MATCHES_DIR,
|
ConnectionPool, GlobalConfig,
|
||||||
};
|
};
|
||||||
|
|
||||||
const PYTHON_IMAGE: &str = "python:3.10-slim-buster";
|
pub struct RunMatch {
|
||||||
|
|
||||||
pub struct RunMatch<'a> {
|
|
||||||
log_file_name: String,
|
log_file_name: String,
|
||||||
player_code_bundles: Vec<&'a db::bots::CodeBundle>,
|
players: Vec<MatchPlayer>,
|
||||||
match_id: Option<i32>,
|
config: Arc<GlobalConfig>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> RunMatch<'a> {
|
pub enum MatchPlayer {
|
||||||
pub fn from_players(player_code_bundles: Vec<&'a db::bots::CodeBundle>) -> Self {
|
BotVersion {
|
||||||
|
bot: Option<db::bots::Bot>,
|
||||||
|
version: db::bots::BotVersion,
|
||||||
|
},
|
||||||
|
BotSpec {
|
||||||
|
spec: Box<dyn BotSpec>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RunMatch {
|
||||||
|
pub fn from_players(config: Arc<GlobalConfig>, players: Vec<MatchPlayer>) -> Self {
|
||||||
let log_file_name = format!("{}.log", gen_alphanumeric(16));
|
let log_file_name = format!("{}.log", gen_alphanumeric(16));
|
||||||
RunMatch {
|
RunMatch {
|
||||||
|
config,
|
||||||
log_file_name,
|
log_file_name,
|
||||||
player_code_bundles,
|
players,
|
||||||
match_id: None,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn runner_config(&self) -> runner::MatchConfig {
|
fn into_runner_config(self) -> runner::MatchConfig {
|
||||||
runner::MatchConfig {
|
runner::MatchConfig {
|
||||||
map_path: PathBuf::from(MAPS_DIR).join("hex.json"),
|
map_path: PathBuf::from(&self.config.maps_directory).join("hex.json"),
|
||||||
map_name: "hex".to_string(),
|
map_name: "hex".to_string(),
|
||||||
log_path: PathBuf::from(MATCHES_DIR).join(&self.log_file_name),
|
log_path: PathBuf::from(&self.config.match_logs_directory).join(&self.log_file_name),
|
||||||
players: self
|
players: self
|
||||||
.player_code_bundles
|
.players
|
||||||
.iter()
|
.into_iter()
|
||||||
.map(|b| runner::MatchPlayer {
|
.map(|player| runner::MatchPlayer {
|
||||||
bot_spec: code_bundle_to_botspec(b),
|
bot_spec: match player {
|
||||||
|
MatchPlayer::BotVersion { bot, version } => {
|
||||||
|
bot_version_to_botspec(&self.config, bot.as_ref(), &version)
|
||||||
|
}
|
||||||
|
MatchPlayer::BotSpec { spec } => spec,
|
||||||
|
},
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn store_in_database(&mut self, db_conn: &PgConnection) -> QueryResult<MatchData> {
|
pub async fn run(
|
||||||
// don't store the same match twice
|
self,
|
||||||
assert!(self.match_id.is_none());
|
conn_pool: ConnectionPool,
|
||||||
|
) -> QueryResult<(MatchData, JoinHandle<MatchOutcome>)> {
|
||||||
|
let match_data = {
|
||||||
|
// TODO: it would be nice to get an already-open connection here when possible.
|
||||||
|
// Maybe we need an additional abstraction, bundling a connection and connection pool?
|
||||||
|
let db_conn = conn_pool.get().await.expect("could not get a connection");
|
||||||
|
self.store_in_database(&db_conn)?
|
||||||
|
};
|
||||||
|
|
||||||
|
let runner_config = self.into_runner_config();
|
||||||
|
let handle = tokio::spawn(run_match_task(conn_pool, runner_config, match_data.base.id));
|
||||||
|
|
||||||
|
Ok((match_data, handle))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn store_in_database(&self, db_conn: &PgConnection) -> QueryResult<MatchData> {
|
||||||
let new_match_data = db::matches::NewMatch {
|
let new_match_data = db::matches::NewMatch {
|
||||||
state: db::matches::MatchState::Playing,
|
state: db::matches::MatchState::Playing,
|
||||||
log_path: &self.log_file_name,
|
log_path: &self.log_file_name,
|
||||||
};
|
};
|
||||||
let new_match_players = self
|
let new_match_players = self
|
||||||
.player_code_bundles
|
.players
|
||||||
.iter()
|
.iter()
|
||||||
.map(|b| db::matches::MatchPlayerData {
|
.map(|p| db::matches::MatchPlayerData {
|
||||||
code_bundle_id: b.id,
|
code_bundle_id: match p {
|
||||||
|
MatchPlayer::BotVersion { version, .. } => Some(version.id),
|
||||||
|
MatchPlayer::BotSpec { .. } => None,
|
||||||
|
},
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let match_data = db::matches::create_match(&new_match_data, &new_match_players, &db_conn)?;
|
db::matches::create_match(&new_match_data, &new_match_players, db_conn)
|
||||||
self.match_id = Some(match_data.base.id);
|
|
||||||
Ok(match_data)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spawn(self, pool: ConnectionPool) -> JoinHandle<MatchOutcome> {
|
|
||||||
let match_id = self.match_id.expect("match must be saved before running");
|
|
||||||
let runner_config = self.runner_config();
|
|
||||||
tokio::spawn(run_match_task(pool, runner_config, match_id))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn code_bundle_to_botspec(code_bundle: &db::bots::CodeBundle) -> Box<dyn BotSpec> {
|
pub fn bot_version_to_botspec(
|
||||||
let bundle_path = PathBuf::from(BOTS_DIR).join(&code_bundle.path);
|
runner_config: &GlobalConfig,
|
||||||
|
bot: Option<&db::bots::Bot>,
|
||||||
|
bot_version: &db::bots::BotVersion,
|
||||||
|
) -> Box<dyn BotSpec> {
|
||||||
|
if let Some(code_bundle_path) = &bot_version.code_bundle_path {
|
||||||
|
python_docker_bot_spec(runner_config, code_bundle_path)
|
||||||
|
} else if let (Some(container_digest), Some(bot)) = (&bot_version.container_digest, bot) {
|
||||||
|
Box::new(DockerBotSpec {
|
||||||
|
image: format!(
|
||||||
|
"{}/{}@{}",
|
||||||
|
runner_config.container_registry_url, bot.name, container_digest
|
||||||
|
),
|
||||||
|
binds: None,
|
||||||
|
argv: None,
|
||||||
|
working_dir: None,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
// TODO: ideally this would not be possible
|
||||||
|
panic!("bad bot version")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn python_docker_bot_spec(config: &GlobalConfig, code_bundle_path: &str) -> Box<dyn BotSpec> {
|
||||||
|
let code_bundle_rel_path = PathBuf::from(&config.bots_directory).join(code_bundle_path);
|
||||||
|
let code_bundle_abs_path = std::fs::canonicalize(&code_bundle_rel_path).unwrap();
|
||||||
|
let code_bundle_path_str = code_bundle_abs_path.as_os_str().to_str().unwrap();
|
||||||
|
|
||||||
|
// TODO: it would be good to simplify this configuration
|
||||||
Box::new(DockerBotSpec {
|
Box::new(DockerBotSpec {
|
||||||
code_path: bundle_path,
|
image: config.python_runner_image.clone(),
|
||||||
image: PYTHON_IMAGE.to_string(),
|
binds: Some(vec![format!("{}:{}", code_bundle_path_str, "/workdir")]),
|
||||||
argv: vec!["python".to_string(), "bot.py".to_string()],
|
argv: Some(vec!["python".to_string(), "bot.py".to_string()]),
|
||||||
|
working_dir: Some("/workdir".to_string()),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -104,5 +153,5 @@ async fn run_match_task(
|
||||||
|
|
||||||
db::matches::save_match_result(match_id, result, &conn).expect("could not save match result");
|
db::matches::save_match_result(match_id, result, &conn).expect("could not save match result");
|
||||||
|
|
||||||
return outcome;
|
outcome
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
// This module implements general domain logic, not directly
|
// This module implements general domain logic, not directly
|
||||||
// tied to the database or API layers.
|
// tied to the database or API layers.
|
||||||
|
pub mod bot_api;
|
||||||
pub mod bots;
|
pub mod bots;
|
||||||
pub mod matches;
|
pub mod matches;
|
||||||
pub mod ranking;
|
pub mod ranking;
|
||||||
|
pub mod registry;
|
||||||
|
|
|
@ -1,17 +1,18 @@
|
||||||
use crate::{db::bots::Bot, DbPool};
|
use crate::{db::bots::Bot, DbPool, GlobalConfig};
|
||||||
|
|
||||||
use crate::db;
|
use crate::db;
|
||||||
use crate::modules::matches::RunMatch;
|
use crate::modules::matches::{MatchPlayer, RunMatch};
|
||||||
use diesel::{PgConnection, QueryResult};
|
use diesel::{PgConnection, QueryResult};
|
||||||
use rand::seq::SliceRandom;
|
use rand::seq::SliceRandom;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
use std::sync::Arc;
|
||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
use tokio;
|
use tokio;
|
||||||
|
|
||||||
const RANKER_INTERVAL: u64 = 60;
|
const RANKER_INTERVAL: u64 = 60;
|
||||||
|
|
||||||
pub async fn run_ranker(db_pool: DbPool) {
|
pub async fn run_ranker(config: Arc<GlobalConfig>, db_pool: DbPool) {
|
||||||
// TODO: make this configurable
|
// TODO: make this configurable
|
||||||
// play at most one match every n seconds
|
// play at most one match every n seconds
|
||||||
let mut interval = tokio::time::interval(Duration::from_secs(RANKER_INTERVAL));
|
let mut interval = tokio::time::interval(Duration::from_secs(RANKER_INTERVAL));
|
||||||
|
@ -30,30 +31,30 @@ pub async fn run_ranker(db_pool: DbPool) {
|
||||||
let mut rng = &mut rand::thread_rng();
|
let mut rng = &mut rand::thread_rng();
|
||||||
bots.choose_multiple(&mut rng, 2).cloned().collect()
|
bots.choose_multiple(&mut rng, 2).cloned().collect()
|
||||||
};
|
};
|
||||||
play_ranking_match(selected_bots, db_pool.clone()).await;
|
play_ranking_match(config.clone(), selected_bots, db_pool.clone()).await;
|
||||||
recalculate_ratings(&db_conn).expect("could not recalculate ratings");
|
recalculate_ratings(&db_conn).expect("could not recalculate ratings");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn play_ranking_match(selected_bots: Vec<Bot>, db_pool: DbPool) {
|
async fn play_ranking_match(config: Arc<GlobalConfig>, selected_bots: Vec<Bot>, db_pool: DbPool) {
|
||||||
let db_conn = db_pool.get().await.expect("could not get db pool");
|
let db_conn = db_pool.get().await.expect("could not get db pool");
|
||||||
let mut code_bundles = Vec::new();
|
let mut players = Vec::new();
|
||||||
for bot in &selected_bots {
|
for bot in &selected_bots {
|
||||||
let code_bundle = db::bots::active_code_bundle(bot.id, &db_conn)
|
let version = db::bots::active_bot_version(bot.id, &db_conn)
|
||||||
.expect("could not get active code bundle");
|
.expect("could not get active bot version");
|
||||||
code_bundles.push(code_bundle);
|
let player = MatchPlayer::BotVersion {
|
||||||
|
bot: Some(bot.clone()),
|
||||||
|
version,
|
||||||
|
};
|
||||||
|
players.push(player);
|
||||||
}
|
}
|
||||||
|
|
||||||
let code_bundle_refs = code_bundles.iter().collect::<Vec<_>>();
|
let (_, handle) = RunMatch::from_players(config, players)
|
||||||
|
.run(db_pool.clone())
|
||||||
let mut run_match = RunMatch::from_players(code_bundle_refs);
|
|
||||||
run_match
|
|
||||||
.store_in_database(&db_conn)
|
|
||||||
.expect("could not store match in db");
|
|
||||||
run_match
|
|
||||||
.spawn(db_pool.clone())
|
|
||||||
.await
|
.await
|
||||||
.expect("running match failed");
|
.expect("failed to run match");
|
||||||
|
// wait for match to complete, so that only one ranking match can be running
|
||||||
|
let _outcome = handle.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn recalculate_ratings(db_conn: &PgConnection) -> QueryResult<()> {
|
fn recalculate_ratings(db_conn: &PgConnection) -> QueryResult<()> {
|
||||||
|
|
440
planetwars-server/src/modules/registry.rs
Normal file
440
planetwars-server/src/modules/registry.rs
Normal file
|
@ -0,0 +1,440 @@
|
||||||
|
// TODO: this module is functional, but it needs a good refactor for proper error handling.
|
||||||
|
|
||||||
|
use axum::body::{Body, StreamBody};
|
||||||
|
use axum::extract::{BodyStream, FromRequest, Path, Query, RequestParts, TypedHeader};
|
||||||
|
use axum::headers::authorization::Basic;
|
||||||
|
use axum::headers::Authorization;
|
||||||
|
use axum::response::{IntoResponse, Response};
|
||||||
|
use axum::routing::{get, head, post, put};
|
||||||
|
use axum::{async_trait, Extension, Router};
|
||||||
|
use futures::StreamExt;
|
||||||
|
use hyper::StatusCode;
|
||||||
|
use serde::Serialize;
|
||||||
|
use sha2::{Digest, Sha256};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use tokio::io::AsyncWriteExt;
|
||||||
|
use tokio_util::io::ReaderStream;
|
||||||
|
|
||||||
|
use crate::db::bots::NewBotVersion;
|
||||||
|
use crate::util::gen_alphanumeric;
|
||||||
|
use crate::{db, DatabaseConnection, GlobalConfig};
|
||||||
|
|
||||||
|
use crate::db::users::{authenticate_user, Credentials, User};
|
||||||
|
|
||||||
|
pub fn registry_service() -> Router {
|
||||||
|
Router::new()
|
||||||
|
// The docker API requires this trailing slash
|
||||||
|
.nest("/v2/", registry_api_v2())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn registry_api_v2() -> Router {
|
||||||
|
Router::new()
|
||||||
|
.route("/", get(get_root))
|
||||||
|
.route(
|
||||||
|
"/:name/manifests/:reference",
|
||||||
|
get(get_manifest).put(put_manifest),
|
||||||
|
)
|
||||||
|
.route(
|
||||||
|
"/:name/blobs/:digest",
|
||||||
|
head(check_blob_exists).get(get_blob),
|
||||||
|
)
|
||||||
|
.route("/:name/blobs/uploads/", post(create_upload))
|
||||||
|
.route(
|
||||||
|
"/:name/blobs/uploads/:uuid",
|
||||||
|
put(put_upload).patch(patch_upload),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const ADMIN_USERNAME: &str = "admin";
|
||||||
|
|
||||||
|
type AuthorizationHeader = TypedHeader<Authorization<Basic>>;
|
||||||
|
|
||||||
|
enum RegistryAuth {
|
||||||
|
User(User),
|
||||||
|
Admin,
|
||||||
|
}
|
||||||
|
|
||||||
|
enum RegistryAuthError {
|
||||||
|
NoAuthHeader,
|
||||||
|
InvalidCredentials,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoResponse for RegistryAuthError {
|
||||||
|
fn into_response(self) -> Response {
|
||||||
|
// TODO: create enum for registry errors
|
||||||
|
let err = RegistryErrors {
|
||||||
|
errors: vec![RegistryError {
|
||||||
|
code: "UNAUTHORIZED".to_string(),
|
||||||
|
message: "please log in".to_string(),
|
||||||
|
detail: serde_json::Value::Null,
|
||||||
|
}],
|
||||||
|
};
|
||||||
|
|
||||||
|
(
|
||||||
|
StatusCode::UNAUTHORIZED,
|
||||||
|
[
|
||||||
|
("Docker-Distribution-API-Version", "registry/2.0"),
|
||||||
|
("WWW-Authenticate", "Basic"),
|
||||||
|
],
|
||||||
|
serde_json::to_string(&err).unwrap(),
|
||||||
|
)
|
||||||
|
.into_response()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl<B> FromRequest<B> for RegistryAuth
|
||||||
|
where
|
||||||
|
B: Send,
|
||||||
|
{
|
||||||
|
type Rejection = RegistryAuthError;
|
||||||
|
|
||||||
|
async fn from_request(req: &mut RequestParts<B>) -> Result<Self, Self::Rejection> {
|
||||||
|
let TypedHeader(Authorization(basic)) = AuthorizationHeader::from_request(req)
|
||||||
|
.await
|
||||||
|
.map_err(|_| RegistryAuthError::NoAuthHeader)?;
|
||||||
|
|
||||||
|
// TODO: Into<Credentials> would be nice
|
||||||
|
let credentials = Credentials {
|
||||||
|
username: basic.username(),
|
||||||
|
password: basic.password(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let Extension(config) = Extension::<Arc<GlobalConfig>>::from_request(req)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
if credentials.username == ADMIN_USERNAME {
|
||||||
|
if credentials.password == config.registry_admin_password {
|
||||||
|
Ok(RegistryAuth::Admin)
|
||||||
|
} else {
|
||||||
|
Err(RegistryAuthError::InvalidCredentials)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let db_conn = DatabaseConnection::from_request(req).await.unwrap();
|
||||||
|
let user = authenticate_user(&credentials, &db_conn)
|
||||||
|
.ok_or(RegistryAuthError::InvalidCredentials)?;
|
||||||
|
|
||||||
|
Ok(RegistryAuth::User(user))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Since async file io just calls spawn_blocking internally, it does not really make sense
|
||||||
|
// to make this an async function
|
||||||
|
fn file_sha256_digest(path: &std::path::Path) -> std::io::Result<String> {
|
||||||
|
let mut file = std::fs::File::open(path)?;
|
||||||
|
let mut hasher = Sha256::new();
|
||||||
|
let _n = std::io::copy(&mut file, &mut hasher)?;
|
||||||
|
Ok(format!("{:x}", hasher.finalize()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the index of the last byte in a file
|
||||||
|
async fn last_byte_pos(file: &tokio::fs::File) -> std::io::Result<u64> {
|
||||||
|
let n_bytes = file.metadata().await?.len();
|
||||||
|
let pos = if n_bytes == 0 { 0 } else { n_bytes - 1 };
|
||||||
|
Ok(pos)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_root(_auth: RegistryAuth) -> impl IntoResponse {
|
||||||
|
// root should return 200 OK to confirm api compliance
|
||||||
|
Response::builder()
|
||||||
|
.status(StatusCode::OK)
|
||||||
|
.header("Docker-Distribution-API-Version", "registry/2.0")
|
||||||
|
.body(Body::empty())
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct RegistryErrors {
|
||||||
|
errors: Vec<RegistryError>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct RegistryError {
|
||||||
|
code: String,
|
||||||
|
message: String,
|
||||||
|
detail: serde_json::Value,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn check_blob_exists(
|
||||||
|
db_conn: DatabaseConnection,
|
||||||
|
auth: RegistryAuth,
|
||||||
|
Path((repository_name, raw_digest)): Path<(String, String)>,
|
||||||
|
Extension(config): Extension<Arc<GlobalConfig>>,
|
||||||
|
) -> Result<impl IntoResponse, StatusCode> {
|
||||||
|
check_access(&repository_name, &auth, &db_conn)?;
|
||||||
|
|
||||||
|
let digest = raw_digest.strip_prefix("sha256:").unwrap();
|
||||||
|
let blob_path = PathBuf::from(&config.registry_directory)
|
||||||
|
.join("sha256")
|
||||||
|
.join(&digest);
|
||||||
|
if blob_path.exists() {
|
||||||
|
let metadata = std::fs::metadata(&blob_path).unwrap();
|
||||||
|
Ok((StatusCode::OK, [("Content-Length", metadata.len())]))
|
||||||
|
} else {
|
||||||
|
Err(StatusCode::NOT_FOUND)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_blob(
|
||||||
|
db_conn: DatabaseConnection,
|
||||||
|
auth: RegistryAuth,
|
||||||
|
Path((repository_name, raw_digest)): Path<(String, String)>,
|
||||||
|
Extension(config): Extension<Arc<GlobalConfig>>,
|
||||||
|
) -> Result<impl IntoResponse, StatusCode> {
|
||||||
|
check_access(&repository_name, &auth, &db_conn)?;
|
||||||
|
|
||||||
|
let digest = raw_digest.strip_prefix("sha256:").unwrap();
|
||||||
|
let blob_path = PathBuf::from(&config.registry_directory)
|
||||||
|
.join("sha256")
|
||||||
|
.join(&digest);
|
||||||
|
if !blob_path.exists() {
|
||||||
|
return Err(StatusCode::NOT_FOUND);
|
||||||
|
}
|
||||||
|
let file = tokio::fs::File::open(&blob_path).await.unwrap();
|
||||||
|
let reader_stream = ReaderStream::new(file);
|
||||||
|
let stream_body = StreamBody::new(reader_stream);
|
||||||
|
Ok(stream_body)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn create_upload(
|
||||||
|
db_conn: DatabaseConnection,
|
||||||
|
auth: RegistryAuth,
|
||||||
|
Path(repository_name): Path<String>,
|
||||||
|
Extension(config): Extension<Arc<GlobalConfig>>,
|
||||||
|
) -> Result<impl IntoResponse, StatusCode> {
|
||||||
|
check_access(&repository_name, &auth, &db_conn)?;
|
||||||
|
|
||||||
|
let uuid = gen_alphanumeric(16);
|
||||||
|
tokio::fs::File::create(
|
||||||
|
PathBuf::from(&config.registry_directory)
|
||||||
|
.join("uploads")
|
||||||
|
.join(&uuid),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
Ok(Response::builder()
|
||||||
|
.status(StatusCode::ACCEPTED)
|
||||||
|
.header(
|
||||||
|
"Location",
|
||||||
|
format!("/v2/{}/blobs/uploads/{}", repository_name, uuid),
|
||||||
|
)
|
||||||
|
.header("Docker-Upload-UUID", uuid)
|
||||||
|
.header("Range", "bytes=0-0")
|
||||||
|
.body(Body::empty())
|
||||||
|
.unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn patch_upload(
|
||||||
|
db_conn: DatabaseConnection,
|
||||||
|
auth: RegistryAuth,
|
||||||
|
Path((repository_name, uuid)): Path<(String, String)>,
|
||||||
|
mut stream: BodyStream,
|
||||||
|
Extension(config): Extension<Arc<GlobalConfig>>,
|
||||||
|
) -> Result<impl IntoResponse, StatusCode> {
|
||||||
|
check_access(&repository_name, &auth, &db_conn)?;
|
||||||
|
|
||||||
|
// TODO: support content range header in request
|
||||||
|
let upload_path = PathBuf::from(&config.registry_directory)
|
||||||
|
.join("uploads")
|
||||||
|
.join(&uuid);
|
||||||
|
let mut file = tokio::fs::OpenOptions::new()
|
||||||
|
.read(false)
|
||||||
|
.write(true)
|
||||||
|
.append(true)
|
||||||
|
.create(false)
|
||||||
|
.open(upload_path)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
while let Some(Ok(chunk)) = stream.next().await {
|
||||||
|
file.write_all(&chunk).await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
let last_byte = last_byte_pos(&file).await.unwrap();
|
||||||
|
|
||||||
|
Ok(Response::builder()
|
||||||
|
.status(StatusCode::ACCEPTED)
|
||||||
|
.header(
|
||||||
|
"Location",
|
||||||
|
format!("/v2/{}/blobs/uploads/{}", repository_name, uuid),
|
||||||
|
)
|
||||||
|
.header("Docker-Upload-UUID", uuid)
|
||||||
|
// range indicating current progress of the upload
|
||||||
|
.header("Range", format!("0-{}", last_byte))
|
||||||
|
.body(Body::empty())
|
||||||
|
.unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
use serde::Deserialize;
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct UploadParams {
|
||||||
|
digest: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn put_upload(
|
||||||
|
db_conn: DatabaseConnection,
|
||||||
|
auth: RegistryAuth,
|
||||||
|
Path((repository_name, uuid)): Path<(String, String)>,
|
||||||
|
Query(params): Query<UploadParams>,
|
||||||
|
mut stream: BodyStream,
|
||||||
|
Extension(config): Extension<Arc<GlobalConfig>>,
|
||||||
|
) -> Result<impl IntoResponse, StatusCode> {
|
||||||
|
check_access(&repository_name, &auth, &db_conn)?;
|
||||||
|
|
||||||
|
let upload_path = PathBuf::from(&config.registry_directory)
|
||||||
|
.join("uploads")
|
||||||
|
.join(&uuid);
|
||||||
|
let mut file = tokio::fs::OpenOptions::new()
|
||||||
|
.read(false)
|
||||||
|
.write(true)
|
||||||
|
.append(true)
|
||||||
|
.create(false)
|
||||||
|
.open(&upload_path)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let range_begin = last_byte_pos(&file).await.unwrap();
|
||||||
|
while let Some(Ok(chunk)) = stream.next().await {
|
||||||
|
file.write_all(&chunk).await.unwrap();
|
||||||
|
}
|
||||||
|
file.flush().await.unwrap();
|
||||||
|
let range_end = last_byte_pos(&file).await.unwrap();
|
||||||
|
|
||||||
|
let expected_digest = params.digest.strip_prefix("sha256:").unwrap();
|
||||||
|
let digest = file_sha256_digest(&upload_path).unwrap();
|
||||||
|
if digest != expected_digest {
|
||||||
|
// TODO: return a docker error body
|
||||||
|
return Err(StatusCode::BAD_REQUEST);
|
||||||
|
}
|
||||||
|
|
||||||
|
let target_path = PathBuf::from(&config.registry_directory)
|
||||||
|
.join("sha256")
|
||||||
|
.join(&digest);
|
||||||
|
tokio::fs::rename(&upload_path, &target_path).await.unwrap();
|
||||||
|
|
||||||
|
Ok(Response::builder()
|
||||||
|
.status(StatusCode::CREATED)
|
||||||
|
.header(
|
||||||
|
"Location",
|
||||||
|
format!("/v2/{}/blobs/{}", repository_name, digest),
|
||||||
|
)
|
||||||
|
.header("Docker-Upload-UUID", uuid)
|
||||||
|
// content range for bytes that were in the body of this request
|
||||||
|
.header("Content-Range", format!("{}-{}", range_begin, range_end))
|
||||||
|
.header("Docker-Content-Digest", params.digest)
|
||||||
|
.body(Body::empty())
|
||||||
|
.unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_manifest(
|
||||||
|
db_conn: DatabaseConnection,
|
||||||
|
auth: RegistryAuth,
|
||||||
|
Path((repository_name, reference)): Path<(String, String)>,
|
||||||
|
Extension(config): Extension<Arc<GlobalConfig>>,
|
||||||
|
) -> Result<impl IntoResponse, StatusCode> {
|
||||||
|
check_access(&repository_name, &auth, &db_conn)?;
|
||||||
|
|
||||||
|
let manifest_path = PathBuf::from(&config.registry_directory)
|
||||||
|
.join("manifests")
|
||||||
|
.join(&repository_name)
|
||||||
|
.join(&reference)
|
||||||
|
.with_extension("json");
|
||||||
|
let data = tokio::fs::read(&manifest_path).await.unwrap();
|
||||||
|
|
||||||
|
let manifest: serde_json::Map<String, serde_json::Value> =
|
||||||
|
serde_json::from_slice(&data).unwrap();
|
||||||
|
let media_type = manifest.get("mediaType").unwrap().as_str().unwrap();
|
||||||
|
Ok(Response::builder()
|
||||||
|
.status(StatusCode::OK)
|
||||||
|
.header("Content-Type", media_type)
|
||||||
|
.body(axum::body::Full::from(data))
|
||||||
|
.unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn put_manifest(
|
||||||
|
db_conn: DatabaseConnection,
|
||||||
|
auth: RegistryAuth,
|
||||||
|
Path((repository_name, reference)): Path<(String, String)>,
|
||||||
|
mut stream: BodyStream,
|
||||||
|
Extension(config): Extension<Arc<GlobalConfig>>,
|
||||||
|
) -> Result<impl IntoResponse, StatusCode> {
|
||||||
|
let bot = check_access(&repository_name, &auth, &db_conn)?;
|
||||||
|
|
||||||
|
let repository_dir = PathBuf::from(&config.registry_directory)
|
||||||
|
.join("manifests")
|
||||||
|
.join(&repository_name);
|
||||||
|
|
||||||
|
tokio::fs::create_dir_all(&repository_dir).await.unwrap();
|
||||||
|
|
||||||
|
let mut hasher = Sha256::new();
|
||||||
|
let manifest_path = repository_dir.join(&reference).with_extension("json");
|
||||||
|
{
|
||||||
|
let mut file = tokio::fs::OpenOptions::new()
|
||||||
|
.write(true)
|
||||||
|
.create(true)
|
||||||
|
.truncate(true)
|
||||||
|
.open(&manifest_path)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
while let Some(Ok(chunk)) = stream.next().await {
|
||||||
|
hasher.update(&chunk);
|
||||||
|
file.write_all(&chunk).await.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let digest = hasher.finalize();
|
||||||
|
// TODO: store content-adressable manifests separately
|
||||||
|
let content_digest = format!("sha256:{:x}", digest);
|
||||||
|
let digest_path = repository_dir.join(&content_digest).with_extension("json");
|
||||||
|
tokio::fs::copy(manifest_path, digest_path).await.unwrap();
|
||||||
|
|
||||||
|
// Register the new image as a bot version
|
||||||
|
// TODO: how should tags be handled?
|
||||||
|
let new_version = NewBotVersion {
|
||||||
|
bot_id: Some(bot.id),
|
||||||
|
code_bundle_path: None,
|
||||||
|
container_digest: Some(&content_digest),
|
||||||
|
};
|
||||||
|
db::bots::create_bot_version(&new_version, &db_conn).expect("could not save bot version");
|
||||||
|
|
||||||
|
Ok(Response::builder()
|
||||||
|
.status(StatusCode::CREATED)
|
||||||
|
.header(
|
||||||
|
"Location",
|
||||||
|
format!("/v2/{}/manifests/{}", repository_name, reference),
|
||||||
|
)
|
||||||
|
.header("Docker-Content-Digest", content_digest)
|
||||||
|
.body(Body::empty())
|
||||||
|
.unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Ensure that the accessed repository exists
|
||||||
|
/// and the user is allowed to access it.
|
||||||
|
/// Returns the associated bot.
|
||||||
|
fn check_access(
|
||||||
|
repository_name: &str,
|
||||||
|
auth: &RegistryAuth,
|
||||||
|
db_conn: &DatabaseConnection,
|
||||||
|
) -> Result<db::bots::Bot, StatusCode> {
|
||||||
|
use diesel::OptionalExtension;
|
||||||
|
|
||||||
|
// TODO: it would be nice to provide the found repository
|
||||||
|
// to the route handlers
|
||||||
|
let bot = db::bots::find_bot_by_name(repository_name, db_conn)
|
||||||
|
.optional()
|
||||||
|
.expect("could not run query")
|
||||||
|
.ok_or(StatusCode::NOT_FOUND)?;
|
||||||
|
|
||||||
|
match &auth {
|
||||||
|
RegistryAuth::Admin => Ok(bot),
|
||||||
|
RegistryAuth::User(user) => {
|
||||||
|
if bot.owner_id == Some(user.id) {
|
||||||
|
Ok(bot)
|
||||||
|
} else {
|
||||||
|
Err(StatusCode::FORBIDDEN)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,7 +1,7 @@
|
||||||
use axum::extract::{Multipart, Path};
|
use axum::extract::{Multipart, Path};
|
||||||
use axum::http::StatusCode;
|
use axum::http::StatusCode;
|
||||||
use axum::response::{IntoResponse, Response};
|
use axum::response::{IntoResponse, Response};
|
||||||
use axum::{body, Json};
|
use axum::{body, Extension, Json};
|
||||||
use diesel::OptionalExtension;
|
use diesel::OptionalExtension;
|
||||||
use rand::distributions::Alphanumeric;
|
use rand::distributions::Alphanumeric;
|
||||||
use rand::Rng;
|
use rand::Rng;
|
||||||
|
@ -9,13 +9,14 @@ use serde::{Deserialize, Serialize};
|
||||||
use serde_json::{self, json, value::Value as JsonValue};
|
use serde_json::{self, json, value::Value as JsonValue};
|
||||||
use std::io::Cursor;
|
use std::io::Cursor;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
use std::sync::Arc;
|
||||||
use thiserror;
|
use thiserror;
|
||||||
|
|
||||||
use crate::db::bots::{self, CodeBundle};
|
use crate::db::bots::{self, BotVersion};
|
||||||
use crate::db::ratings::{RankedBot, self};
|
use crate::db::ratings::{self, RankedBot};
|
||||||
use crate::db::users::User;
|
use crate::db::users::User;
|
||||||
use crate::modules::bots::save_code_bundle;
|
use crate::modules::bots::save_code_string;
|
||||||
use crate::{DatabaseConnection, BOTS_DIR};
|
use crate::{DatabaseConnection, GlobalConfig};
|
||||||
use bots::Bot;
|
use bots::Bot;
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
@ -96,6 +97,7 @@ pub async fn save_bot(
|
||||||
Json(params): Json<SaveBotParams>,
|
Json(params): Json<SaveBotParams>,
|
||||||
user: User,
|
user: User,
|
||||||
conn: DatabaseConnection,
|
conn: DatabaseConnection,
|
||||||
|
Extension(config): Extension<Arc<GlobalConfig>>,
|
||||||
) -> Result<Json<Bot>, SaveBotError> {
|
) -> Result<Json<Bot>, SaveBotError> {
|
||||||
let res = bots::find_bot_by_name(¶ms.bot_name, &conn)
|
let res = bots::find_bot_by_name(¶ms.bot_name, &conn)
|
||||||
.optional()
|
.optional()
|
||||||
|
@ -119,8 +121,8 @@ pub async fn save_bot(
|
||||||
bots::create_bot(&new_bot, &conn).expect("could not create bot")
|
bots::create_bot(&new_bot, &conn).expect("could not create bot")
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let _code_bundle =
|
let _code_bundle = save_code_string(¶ms.code, Some(bot.id), &conn, &config)
|
||||||
save_code_bundle(¶ms.code, Some(bot.id), &conn).expect("failed to save code bundle");
|
.expect("failed to save code bundle");
|
||||||
Ok(Json(bot))
|
Ok(Json(bot))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -148,8 +150,8 @@ pub async fn get_bot(
|
||||||
Path(bot_id): Path<i32>,
|
Path(bot_id): Path<i32>,
|
||||||
) -> Result<Json<JsonValue>, StatusCode> {
|
) -> Result<Json<JsonValue>, StatusCode> {
|
||||||
let bot = bots::find_bot(bot_id, &conn).map_err(|_| StatusCode::NOT_FOUND)?;
|
let bot = bots::find_bot(bot_id, &conn).map_err(|_| StatusCode::NOT_FOUND)?;
|
||||||
let bundles = bots::find_bot_code_bundles(bot.id, &conn)
|
let bundles =
|
||||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
bots::find_bot_versions(bot.id, &conn).map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||||
Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
"bot": bot,
|
"bot": bot,
|
||||||
"bundles": bundles,
|
"bundles": bundles,
|
||||||
|
@ -183,8 +185,9 @@ pub async fn upload_code_multipart(
|
||||||
user: User,
|
user: User,
|
||||||
Path(bot_id): Path<i32>,
|
Path(bot_id): Path<i32>,
|
||||||
mut multipart: Multipart,
|
mut multipart: Multipart,
|
||||||
) -> Result<Json<CodeBundle>, StatusCode> {
|
Extension(config): Extension<Arc<GlobalConfig>>,
|
||||||
let bots_dir = PathBuf::from(BOTS_DIR);
|
) -> Result<Json<BotVersion>, StatusCode> {
|
||||||
|
let bots_dir = PathBuf::from(&config.bots_directory);
|
||||||
|
|
||||||
let bot = bots::find_bot(bot_id, &conn).map_err(|_| StatusCode::NOT_FOUND)?;
|
let bot = bots::find_bot(bot_id, &conn).map_err(|_| StatusCode::NOT_FOUND)?;
|
||||||
|
|
||||||
|
@ -213,12 +216,13 @@ pub async fn upload_code_multipart(
|
||||||
.extract(bots_dir.join(&folder_name))
|
.extract(bots_dir.join(&folder_name))
|
||||||
.map_err(|_| StatusCode::BAD_REQUEST)?;
|
.map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||||
|
|
||||||
let bundle = bots::NewCodeBundle {
|
let bot_version = bots::NewBotVersion {
|
||||||
bot_id: Some(bot.id),
|
bot_id: Some(bot.id),
|
||||||
path: &folder_name,
|
code_bundle_path: Some(&folder_name),
|
||||||
|
container_digest: None,
|
||||||
};
|
};
|
||||||
let code_bundle =
|
let code_bundle =
|
||||||
bots::create_code_bundle(&bundle, &conn).expect("Failed to create code bundle");
|
bots::create_bot_version(&bot_version, &conn).expect("Failed to create code bundle");
|
||||||
|
|
||||||
Ok(Json(code_bundle))
|
Ok(Json(code_bundle))
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::db;
|
use crate::db;
|
||||||
use crate::db::matches::{FullMatchData, FullMatchPlayerData};
|
use crate::db::matches::{FullMatchData, FullMatchPlayerData};
|
||||||
use crate::modules::bots::save_code_bundle;
|
use crate::modules::bots::save_code_string;
|
||||||
use crate::modules::matches::RunMatch;
|
use crate::modules::matches::{MatchPlayer, RunMatch};
|
||||||
use crate::ConnectionPool;
|
use crate::ConnectionPool;
|
||||||
|
use crate::GlobalConfig;
|
||||||
use axum::extract::Extension;
|
use axum::extract::Extension;
|
||||||
use axum::Json;
|
use axum::Json;
|
||||||
use hyper::StatusCode;
|
use hyper::StatusCode;
|
||||||
|
@ -30,6 +33,7 @@ pub struct SubmitBotResponse {
|
||||||
pub async fn submit_bot(
|
pub async fn submit_bot(
|
||||||
Json(params): Json<SubmitBotParams>,
|
Json(params): Json<SubmitBotParams>,
|
||||||
Extension(pool): Extension<ConnectionPool>,
|
Extension(pool): Extension<ConnectionPool>,
|
||||||
|
Extension(config): Extension<Arc<GlobalConfig>>,
|
||||||
) -> Result<Json<SubmitBotResponse>, StatusCode> {
|
) -> Result<Json<SubmitBotResponse>, StatusCode> {
|
||||||
let conn = pool.get().await.expect("could not get database connection");
|
let conn = pool.get().await.expect("could not get database connection");
|
||||||
|
|
||||||
|
@ -37,20 +41,32 @@ pub async fn submit_bot(
|
||||||
.opponent_name
|
.opponent_name
|
||||||
.unwrap_or_else(|| DEFAULT_OPPONENT_NAME.to_string());
|
.unwrap_or_else(|| DEFAULT_OPPONENT_NAME.to_string());
|
||||||
|
|
||||||
let opponent =
|
let opponent_bot =
|
||||||
db::bots::find_bot_by_name(&opponent_name, &conn).map_err(|_| StatusCode::BAD_REQUEST)?;
|
db::bots::find_bot_by_name(&opponent_name, &conn).map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||||
let opponent_code_bundle =
|
let opponent_bot_version = db::bots::active_bot_version(opponent_bot.id, &conn)
|
||||||
db::bots::active_code_bundle(opponent.id, &conn).map_err(|_| StatusCode::BAD_REQUEST)?;
|
.map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||||
|
|
||||||
let player_code_bundle = save_code_bundle(¶ms.code, None, &conn)
|
let player_bot_version = save_code_string(¶ms.code, None, &conn, &config)
|
||||||
// TODO: can we recover from this?
|
// TODO: can we recover from this?
|
||||||
.expect("could not save bot code");
|
.expect("could not save bot code");
|
||||||
|
|
||||||
let mut run_match = RunMatch::from_players(vec![&player_code_bundle, &opponent_code_bundle]);
|
let run_match = RunMatch::from_players(
|
||||||
let match_data = run_match
|
config,
|
||||||
.store_in_database(&conn)
|
vec![
|
||||||
.expect("failed to save match");
|
MatchPlayer::BotVersion {
|
||||||
run_match.spawn(pool.clone());
|
bot: None,
|
||||||
|
version: player_bot_version.clone(),
|
||||||
|
},
|
||||||
|
MatchPlayer::BotVersion {
|
||||||
|
bot: Some(opponent_bot.clone()),
|
||||||
|
version: opponent_bot_version.clone(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
);
|
||||||
|
let (match_data, _) = run_match
|
||||||
|
.run(pool.clone())
|
||||||
|
.await
|
||||||
|
.expect("failed to run match");
|
||||||
|
|
||||||
// TODO: avoid clones
|
// TODO: avoid clones
|
||||||
let full_match_data = FullMatchData {
|
let full_match_data = FullMatchData {
|
||||||
|
@ -58,13 +74,13 @@ pub async fn submit_bot(
|
||||||
match_players: vec![
|
match_players: vec![
|
||||||
FullMatchPlayerData {
|
FullMatchPlayerData {
|
||||||
base: match_data.match_players[0].clone(),
|
base: match_data.match_players[0].clone(),
|
||||||
code_bundle: player_code_bundle,
|
bot_version: Some(player_bot_version),
|
||||||
bot: None,
|
bot: None,
|
||||||
},
|
},
|
||||||
FullMatchPlayerData {
|
FullMatchPlayerData {
|
||||||
base: match_data.match_players[1].clone(),
|
base: match_data.match_players[1].clone(),
|
||||||
code_bundle: opponent_code_bundle,
|
bot_version: Some(opponent_bot_version),
|
||||||
bot: Some(opponent),
|
bot: Some(opponent_bot),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,102 +1,13 @@
|
||||||
use std::path::PathBuf;
|
use axum::{extract::Path, Extension, Json};
|
||||||
|
|
||||||
use axum::{
|
|
||||||
extract::{Extension, Path},
|
|
||||||
Json,
|
|
||||||
};
|
|
||||||
use hyper::StatusCode;
|
use hyper::StatusCode;
|
||||||
use planetwars_matchrunner::{docker_runner::DockerBotSpec, run_match, MatchConfig, MatchPlayer};
|
|
||||||
use rand::{distributions::Alphanumeric, Rng};
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::{path::PathBuf, sync::Arc};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::{
|
db::matches::{self, MatchState},
|
||||||
bots,
|
DatabaseConnection, GlobalConfig,
|
||||||
matches::{self, MatchState},
|
|
||||||
users::User,
|
|
||||||
},
|
|
||||||
ConnectionPool, DatabaseConnection, BOTS_DIR, MAPS_DIR, MATCHES_DIR,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
|
||||||
pub struct MatchParams {
|
|
||||||
// Just bot ids for now
|
|
||||||
players: Vec<i32>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn play_match(
|
|
||||||
_user: User,
|
|
||||||
Extension(pool): Extension<ConnectionPool>,
|
|
||||||
Json(params): Json<MatchParams>,
|
|
||||||
) -> Result<(), StatusCode> {
|
|
||||||
let conn = pool.get().await.expect("could not get database connection");
|
|
||||||
let map_path = PathBuf::from(MAPS_DIR).join("hex.json");
|
|
||||||
|
|
||||||
let slug: String = rand::thread_rng()
|
|
||||||
.sample_iter(&Alphanumeric)
|
|
||||||
.take(16)
|
|
||||||
.map(char::from)
|
|
||||||
.collect();
|
|
||||||
let log_file_name = format!("{}.log", slug);
|
|
||||||
|
|
||||||
let mut players = Vec::new();
|
|
||||||
let mut bot_ids = Vec::new();
|
|
||||||
for bot_name in params.players {
|
|
||||||
let bot = bots::find_bot(bot_name, &conn).map_err(|_| StatusCode::BAD_REQUEST)?;
|
|
||||||
let code_bundle =
|
|
||||||
bots::active_code_bundle(bot.id, &conn).map_err(|_| StatusCode::BAD_REQUEST)?;
|
|
||||||
|
|
||||||
let bundle_path = PathBuf::from(BOTS_DIR).join(&code_bundle.path);
|
|
||||||
let bot_config: BotConfig = std::fs::read_to_string(bundle_path.join("botconfig.toml"))
|
|
||||||
.and_then(|config_str| toml::from_str(&config_str).map_err(|e| e.into()))
|
|
||||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
|
||||||
|
|
||||||
players.push(MatchPlayer {
|
|
||||||
bot_spec: Box::new(DockerBotSpec {
|
|
||||||
code_path: PathBuf::from(BOTS_DIR).join(code_bundle.path),
|
|
||||||
image: "python:3.10-slim-buster".to_string(),
|
|
||||||
argv: shlex::split(&bot_config.run_command)
|
|
||||||
.ok_or(StatusCode::INTERNAL_SERVER_ERROR)?,
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
|
|
||||||
bot_ids.push(matches::MatchPlayerData {
|
|
||||||
code_bundle_id: code_bundle.id,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let match_config = MatchConfig {
|
|
||||||
map_name: "hex".to_string(),
|
|
||||||
map_path,
|
|
||||||
log_path: PathBuf::from(MATCHES_DIR).join(&log_file_name),
|
|
||||||
players,
|
|
||||||
};
|
|
||||||
|
|
||||||
tokio::spawn(run_match_task(
|
|
||||||
match_config,
|
|
||||||
log_file_name,
|
|
||||||
bot_ids,
|
|
||||||
pool.clone(),
|
|
||||||
));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn run_match_task(
|
|
||||||
config: MatchConfig,
|
|
||||||
log_file_name: String,
|
|
||||||
match_players: Vec<matches::MatchPlayerData>,
|
|
||||||
pool: ConnectionPool,
|
|
||||||
) {
|
|
||||||
let match_data = matches::NewMatch {
|
|
||||||
state: MatchState::Finished,
|
|
||||||
log_path: &log_file_name,
|
|
||||||
};
|
|
||||||
|
|
||||||
run_match(config).await;
|
|
||||||
let conn = pool.get().await.expect("could not get database connection");
|
|
||||||
matches::create_match(&match_data, &match_players, &conn).expect("could not create match");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
pub struct ApiMatch {
|
pub struct ApiMatch {
|
||||||
id: i32,
|
id: i32,
|
||||||
|
@ -107,7 +18,7 @@ pub struct ApiMatch {
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
pub struct ApiMatchPlayer {
|
pub struct ApiMatchPlayer {
|
||||||
code_bundle_id: i32,
|
bot_version_id: Option<i32>,
|
||||||
bot_id: Option<i32>,
|
bot_id: Option<i32>,
|
||||||
bot_name: Option<String>,
|
bot_name: Option<String>,
|
||||||
}
|
}
|
||||||
|
@ -127,7 +38,7 @@ pub fn match_data_to_api(data: matches::FullMatchData) -> ApiMatch {
|
||||||
.match_players
|
.match_players
|
||||||
.iter()
|
.iter()
|
||||||
.map(|_p| ApiMatchPlayer {
|
.map(|_p| ApiMatchPlayer {
|
||||||
code_bundle_id: _p.code_bundle.id,
|
bot_version_id: _p.bot_version.as_ref().map(|cb| cb.id),
|
||||||
bot_id: _p.bot.as_ref().map(|b| b.id),
|
bot_id: _p.bot.as_ref().map(|b| b.id),
|
||||||
bot_name: _p.bot.as_ref().map(|b| b.name.clone()),
|
bot_name: _p.bot.as_ref().map(|b| b.name.clone()),
|
||||||
})
|
})
|
||||||
|
@ -135,15 +46,6 @@ pub fn match_data_to_api(data: matches::FullMatchData) -> ApiMatch {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: this is duplicated from planetwars-cli
|
|
||||||
// clean this up and move to matchrunner crate
|
|
||||||
#[derive(Serialize, Deserialize)]
|
|
||||||
pub struct BotConfig {
|
|
||||||
pub name: String,
|
|
||||||
pub run_command: String,
|
|
||||||
pub build_command: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_match_data(
|
pub async fn get_match_data(
|
||||||
Path(match_id): Path<i32>,
|
Path(match_id): Path<i32>,
|
||||||
conn: DatabaseConnection,
|
conn: DatabaseConnection,
|
||||||
|
@ -157,10 +59,11 @@ pub async fn get_match_data(
|
||||||
pub async fn get_match_log(
|
pub async fn get_match_log(
|
||||||
Path(match_id): Path<i32>,
|
Path(match_id): Path<i32>,
|
||||||
conn: DatabaseConnection,
|
conn: DatabaseConnection,
|
||||||
|
Extension(config): Extension<Arc<GlobalConfig>>,
|
||||||
) -> Result<Vec<u8>, StatusCode> {
|
) -> Result<Vec<u8>, StatusCode> {
|
||||||
let match_base =
|
let match_base =
|
||||||
matches::find_match_base(match_id, &conn).map_err(|_| StatusCode::NOT_FOUND)?;
|
matches::find_match_base(match_id, &conn).map_err(|_| StatusCode::NOT_FOUND)?;
|
||||||
let log_path = PathBuf::from(MATCHES_DIR).join(&match_base.log_path);
|
let log_path = PathBuf::from(&config.match_logs_directory).join(&match_base.log_path);
|
||||||
let log_contents = std::fs::read(log_path).map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
let log_contents = std::fs::read(log_path).map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||||
Ok(log_contents)
|
Ok(log_contents)
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ use axum::extract::{FromRequest, RequestParts, TypedHeader};
|
||||||
use axum::headers::authorization::Bearer;
|
use axum::headers::authorization::Bearer;
|
||||||
use axum::headers::Authorization;
|
use axum::headers::Authorization;
|
||||||
use axum::http::StatusCode;
|
use axum::http::StatusCode;
|
||||||
use axum::response::{Headers, IntoResponse, Response};
|
use axum::response::{IntoResponse, Response};
|
||||||
use axum::{async_trait, Json};
|
use axum::{async_trait, Json};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
@ -163,9 +163,9 @@ pub async fn login(conn: DatabaseConnection, params: Json<LoginParams>) -> Respo
|
||||||
Some(user) => {
|
Some(user) => {
|
||||||
let session = sessions::create_session(&user, &conn);
|
let session = sessions::create_session(&user, &conn);
|
||||||
let user_data: UserData = user.into();
|
let user_data: UserData = user.into();
|
||||||
let headers = Headers(vec![("Token", &session.token)]);
|
let headers = [("Token", &session.token)];
|
||||||
|
|
||||||
(headers, Json(user_data)).into_response()
|
(StatusCode::OK, headers, Json(user_data)).into_response()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,19 @@
|
||||||
// This file is autogenerated by diesel
|
// This file is autogenerated by diesel
|
||||||
#![allow(unused_imports)]
|
#![allow(unused_imports)]
|
||||||
|
|
||||||
|
table! {
|
||||||
|
use diesel::sql_types::*;
|
||||||
|
use crate::db_types::*;
|
||||||
|
|
||||||
|
bot_versions (id) {
|
||||||
|
id -> Int4,
|
||||||
|
bot_id -> Nullable<Int4>,
|
||||||
|
code_bundle_path -> Nullable<Text>,
|
||||||
|
created_at -> Timestamp,
|
||||||
|
container_digest -> Nullable<Text>,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
table! {
|
table! {
|
||||||
use diesel::sql_types::*;
|
use diesel::sql_types::*;
|
||||||
use crate::db_types::*;
|
use crate::db_types::*;
|
||||||
|
@ -12,18 +25,6 @@ table! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
table! {
|
|
||||||
use diesel::sql_types::*;
|
|
||||||
use crate::db_types::*;
|
|
||||||
|
|
||||||
code_bundles (id) {
|
|
||||||
id -> Int4,
|
|
||||||
bot_id -> Nullable<Int4>,
|
|
||||||
path -> Text,
|
|
||||||
created_at -> Timestamp,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
table! {
|
table! {
|
||||||
use diesel::sql_types::*;
|
use diesel::sql_types::*;
|
||||||
use crate::db_types::*;
|
use crate::db_types::*;
|
||||||
|
@ -31,7 +32,7 @@ table! {
|
||||||
match_players (match_id, player_id) {
|
match_players (match_id, player_id) {
|
||||||
match_id -> Int4,
|
match_id -> Int4,
|
||||||
player_id -> Int4,
|
player_id -> Int4,
|
||||||
code_bundle_id -> Int4,
|
bot_version_id -> Nullable<Int4>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -81,16 +82,16 @@ table! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
joinable!(bot_versions -> bots (bot_id));
|
||||||
joinable!(bots -> users (owner_id));
|
joinable!(bots -> users (owner_id));
|
||||||
joinable!(code_bundles -> bots (bot_id));
|
joinable!(match_players -> bot_versions (bot_version_id));
|
||||||
joinable!(match_players -> code_bundles (code_bundle_id));
|
|
||||||
joinable!(match_players -> matches (match_id));
|
joinable!(match_players -> matches (match_id));
|
||||||
joinable!(ratings -> bots (bot_id));
|
joinable!(ratings -> bots (bot_id));
|
||||||
joinable!(sessions -> users (user_id));
|
joinable!(sessions -> users (user_id));
|
||||||
|
|
||||||
allow_tables_to_appear_in_same_query!(
|
allow_tables_to_appear_in_same_query!(
|
||||||
|
bot_versions,
|
||||||
bots,
|
bots,
|
||||||
code_bundles,
|
|
||||||
match_players,
|
match_players,
|
||||||
matches,
|
matches,
|
||||||
ratings,
|
ratings,
|
||||||
|
|
36
proto/bot_api.proto
Normal file
36
proto/bot_api.proto
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package grpc.planetwars.bot_api;
|
||||||
|
|
||||||
|
message Hello {
|
||||||
|
string hello_message = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message HelloResponse {
|
||||||
|
string response = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message PlayerRequest {
|
||||||
|
int32 request_id = 1;
|
||||||
|
bytes content = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message PlayerRequestResponse {
|
||||||
|
int32 request_id = 1;
|
||||||
|
bytes content = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message MatchRequest {
|
||||||
|
string opponent_name = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message CreatedMatch {
|
||||||
|
int32 match_id = 1;
|
||||||
|
string player_key = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
service BotApiService {
|
||||||
|
rpc CreateMatch(MatchRequest) returns (CreatedMatch);
|
||||||
|
// server sends requests to the player, player responds
|
||||||
|
rpc ConnectBot(stream PlayerRequestResponse) returns (stream PlayerRequest);
|
||||||
|
}
|
BIN
web/pw-visualizer/assets/res/earth.png
Normal file
BIN
web/pw-visualizer/assets/res/earth.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 13 KiB |
BIN
web/pw-visualizer/assets/res/ship.png
Normal file
BIN
web/pw-visualizer/assets/res/ship.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 4.8 KiB |
|
@ -2,20 +2,20 @@
|
||||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||||
|
|
||||||
<svg
|
<svg
|
||||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
width="10.231839cm"
|
||||||
xmlns:cc="http://creativecommons.org/ns#"
|
height="19.597593cm"
|
||||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
viewBox="0 0 102.31839 195.97593"
|
||||||
xmlns:svg="http://www.w3.org/2000/svg"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
|
||||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
|
||||||
width="100mm"
|
|
||||||
height="100mm"
|
|
||||||
viewBox="0 0 100 99.999999"
|
|
||||||
version="1.1"
|
version="1.1"
|
||||||
id="svg8"
|
id="svg8"
|
||||||
sodipodi:docname="ship.svg"
|
sodipodi:docname="ship.svg"
|
||||||
inkscape:version="0.92.4 (f8dce91, 2019-08-02)">
|
inkscape:version="1.1.2 (0a00cf5339, 2022-02-04)"
|
||||||
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:svg="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||||
|
xmlns:cc="http://creativecommons.org/ns#"
|
||||||
|
xmlns:dc="http://purl.org/dc/elements/1.1/">
|
||||||
<defs
|
<defs
|
||||||
id="defs2" />
|
id="defs2" />
|
||||||
<sodipodi:namedview
|
<sodipodi:namedview
|
||||||
|
@ -26,8 +26,8 @@
|
||||||
inkscape:pageopacity="0.0"
|
inkscape:pageopacity="0.0"
|
||||||
inkscape:pageshadow="2"
|
inkscape:pageshadow="2"
|
||||||
inkscape:zoom="1.0993438"
|
inkscape:zoom="1.0993438"
|
||||||
inkscape:cx="676.08563"
|
inkscape:cx="424.34405"
|
||||||
inkscape:cy="474.10966"
|
inkscape:cy="384.32017"
|
||||||
inkscape:document-units="mm"
|
inkscape:document-units="mm"
|
||||||
inkscape:current-layer="layer1"
|
inkscape:current-layer="layer1"
|
||||||
showgrid="true"
|
showgrid="true"
|
||||||
|
@ -35,15 +35,19 @@
|
||||||
fit-margin-left="0"
|
fit-margin-left="0"
|
||||||
fit-margin-right="0"
|
fit-margin-right="0"
|
||||||
fit-margin-bottom="0"
|
fit-margin-bottom="0"
|
||||||
inkscape:window-width="2560"
|
inkscape:window-width="1920"
|
||||||
inkscape:window-height="1417"
|
inkscape:window-height="1048"
|
||||||
inkscape:window-x="0"
|
inkscape:window-x="0"
|
||||||
inkscape:window-y="0"
|
inkscape:window-y="32"
|
||||||
inkscape:window-maximized="0"
|
inkscape:window-maximized="1"
|
||||||
gridtolerance="10">
|
gridtolerance="10"
|
||||||
|
inkscape:pagecheckerboard="0"
|
||||||
|
units="cm">
|
||||||
<inkscape:grid
|
<inkscape:grid
|
||||||
type="xygrid"
|
type="xygrid"
|
||||||
id="grid894" />
|
id="grid894"
|
||||||
|
originx="-160.50747"
|
||||||
|
originy="118.75037" />
|
||||||
</sodipodi:namedview>
|
</sodipodi:namedview>
|
||||||
<metadata
|
<metadata
|
||||||
id="metadata5">
|
id="metadata5">
|
||||||
|
@ -53,7 +57,6 @@
|
||||||
<dc:format>image/svg+xml</dc:format>
|
<dc:format>image/svg+xml</dc:format>
|
||||||
<dc:type
|
<dc:type
|
||||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||||
<dc:title></dc:title>
|
|
||||||
</cc:Work>
|
</cc:Work>
|
||||||
</rdf:RDF>
|
</rdf:RDF>
|
||||||
</metadata>
|
</metadata>
|
||||||
|
@ -61,17 +64,17 @@
|
||||||
inkscape:label="Layer 1"
|
inkscape:label="Layer 1"
|
||||||
inkscape:groupmode="layer"
|
inkscape:groupmode="layer"
|
||||||
id="layer1"
|
id="layer1"
|
||||||
transform="translate(229.05372,-117.27915)">
|
transform="translate(68.546255,1.4712222)">
|
||||||
<ellipse
|
<ellipse
|
||||||
ry="79.47506"
|
ry="79.47506"
|
||||||
rx="48.672089"
|
rx="48.672089"
|
||||||
cy="39.779182"
|
cy="39.779182"
|
||||||
cx="439.0813"
|
cx="439.0813"
|
||||||
id="ellipse888"
|
id="ellipse888"
|
||||||
style="opacity:1;fill:#00ffff;fill-opacity:1;stroke:none;stroke-width:6.61458302;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" />
|
style="opacity:1;fill:#00ffff;fill-opacity:1;stroke:none;stroke-width:6.61458;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" />
|
||||||
<path
|
<path
|
||||||
style="opacity:1;fill:#000000;fill-opacity:1;stroke:none;stroke-width:24.99999809;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
style="opacity:1;fill:#000000;fill-opacity:1;stroke:none;stroke-width:25;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||||
d="M 800 -448.82031 C 800 -448.82031 640 -342.04689 640 -92.046875 C 640 -16.101728 661.67774 51.924976 695.88672 97.775391 C 640.96482 152.90966 593.39426 234.74166 610 267.95312 C 620.00003 287.95314 720.00001 297.95311 730 287.95312 C 751.8315 266.12161 757.39662 198.03742 758.92773 149.62305 C 772.03579 155.04778 785.80002 157.95312 800 157.95312 C 814.19998 157.95312 827.96422 155.04778 841.07227 149.62305 C 842.60338 198.03742 848.1685 266.12161 870 287.95312 C 879.99999 297.95311 979.99997 287.95314 990 267.95312 C 1006.6057 234.74166 959.03518 152.90966 904.11328 97.775391 C 938.32226 51.924976 959.99999 -16.101728 960 -92.046875 C 960.00002 -342.04689 800 -448.82031 800 -448.82031 z M 800 -352.04688 C 800 -352.04688 908.96486 -279.33252 908.96484 -109.07617 C 908.96484 -15.046189 860.17918 61.179688 800 61.179688 C 739.82082 61.179688 691.03515 -15.046189 691.03516 -109.07617 C 691.03516 -279.33252 800 -352.04687 800 -352.04688 z "
|
d="m 800,-448.82031 c 0,0 -160,106.77342 -160,356.773435 0,75.945147 21.67774,143.971851 55.88672,189.822266 C 640.96482,152.90966 593.39426,234.74166 610,267.95312 c 10.00003,20.00002 110.00001,29.99999 120,20 21.8315,-21.83151 27.39662,-89.9157 28.92773,-138.33007 13.10806,5.42473 26.87229,8.33007 41.07227,8.33007 14.19998,0 27.96422,-2.90534 41.07227,-8.33007 1.53111,48.41437 7.09623,116.49856 28.92773,138.33007 9.99999,9.99999 109.99997,2e-5 120,-20 C 1006.6057,234.74166 959.03518,152.90966 904.11328,97.775391 938.32226,51.924976 959.99999,-16.101728 960,-92.046875 960.00002,-342.04689 800,-448.82031 800,-448.82031 Z m 0,96.77343 c 0,0 108.96486,72.71436 108.96484,242.97071 0,94.029981 -48.78566,170.255858 -108.96484,170.255858 -60.17918,0 -108.96485,-76.225877 -108.96484,-170.255858 C 691.03516,-279.33252 800,-352.04687 800,-352.04688 Z"
|
||||||
transform="matrix(0.26458333,0,0,0.26458333,-229.05372,117.27915)"
|
transform="matrix(0.26458333,0,0,0.26458333,-229.05372,117.27915)"
|
||||||
id="path4600" />
|
id="path4600" />
|
||||||
</g>
|
</g>
|
||||||
|
|
Before Width: | Height: | Size: 3.3 KiB After Width: | Height: | Size: 3.3 KiB |
|
@ -10,5 +10,4 @@ uniform sampler2D u_texture;
|
||||||
|
|
||||||
void main() {
|
void main() {
|
||||||
gl_FragColor = texture2D(u_texture, v_texCoord);
|
gl_FragColor = texture2D(u_texture, v_texCoord);
|
||||||
// gl_FragColor = vec4(0.7, 0.7, 0.0, 1.0);
|
|
||||||
}
|
}
|
||||||
|
|
21
web/pw-visualizer/assets/shaders/frag/masked_image.glsl
Normal file
21
web/pw-visualizer/assets/shaders/frag/masked_image.glsl
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
#ifdef GL_ES
|
||||||
|
precision mediump float;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
// Passed in from the vertex shader.
|
||||||
|
varying vec2 v_texCoord;
|
||||||
|
|
||||||
|
uniform float u_step_interval;
|
||||||
|
uniform float u_time;
|
||||||
|
uniform vec3 u_color;
|
||||||
|
uniform vec3 u_color_next;
|
||||||
|
|
||||||
|
|
||||||
|
// The texture.
|
||||||
|
uniform sampler2D u_texture;
|
||||||
|
|
||||||
|
void main() {
|
||||||
|
float alpha = texture2D(u_texture, v_texCoord).a;
|
||||||
|
vec3 color = mix(u_color, u_color_next, u_time);
|
||||||
|
gl_FragColor = vec4(color, alpha);
|
||||||
|
}
|
|
@ -4,9 +4,14 @@ export {default as earthSvg} from "../assets/res/earth.svg";
|
||||||
export {default as marsSvg} from "../assets/res/mars.svg";
|
export {default as marsSvg} from "../assets/res/mars.svg";
|
||||||
export {default as venusSvg} from "../assets/res/venus.svg";
|
export {default as venusSvg} from "../assets/res/venus.svg";
|
||||||
|
|
||||||
|
export {default as earthPng} from "../assets/res/earth.png";
|
||||||
|
export {default as shipPng} from "../assets/res/ship.png";
|
||||||
|
|
||||||
export {default as fontPng} from "../assets/res/font.png";
|
export {default as fontPng} from "../assets/res/font.png";
|
||||||
|
|
||||||
export {default as imageFragmentShader} from "../assets/shaders/frag/image.glsl?url";
|
export {default as imageFragmentShader} from "../assets/shaders/frag/image.glsl?url";
|
||||||
|
export {default as maskedImageFragmentShader} from "../assets/shaders/frag/masked_image.glsl?url";
|
||||||
|
|
||||||
export {default as simpleFragmentShader} from "../assets/shaders/frag/simple.glsl?url";
|
export {default as simpleFragmentShader} from "../assets/shaders/frag/simple.glsl?url";
|
||||||
export {default as vorFragmentShader} from "../assets/shaders/frag/vor.glsl?url";
|
export {default as vorFragmentShader} from "../assets/shaders/frag/vor.glsl?url";
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,4 @@
|
||||||
import { Game } from "planetwars-rs";
|
import { Game } from "planetwars-rs";
|
||||||
// import { memory } from "planetwars-rs/planetwars_rs_bg";
|
|
||||||
// const memory = planetwars_bg.memory;
|
|
||||||
import type { Dictionary } from './webgl/util';
|
import type { Dictionary } from './webgl/util';
|
||||||
import type { BBox } from "./voronoi/voronoi-core";
|
import type { BBox } from "./voronoi/voronoi-core";
|
||||||
|
|
||||||
|
@ -8,8 +6,6 @@ import {
|
||||||
Resizer,
|
Resizer,
|
||||||
resizeCanvasToDisplaySize,
|
resizeCanvasToDisplaySize,
|
||||||
FPSCounter,
|
FPSCounter,
|
||||||
url_to_mesh,
|
|
||||||
Mesh,
|
|
||||||
} from "./webgl/util";
|
} from "./webgl/util";
|
||||||
import {
|
import {
|
||||||
Shader,
|
Shader,
|
||||||
|
@ -22,12 +18,13 @@ import {
|
||||||
UniformMatrix3fv,
|
UniformMatrix3fv,
|
||||||
UniformBool,
|
UniformBool,
|
||||||
} from "./webgl/shader";
|
} from "./webgl/shader";
|
||||||
import { Renderer } from "./webgl/renderer";
|
import { DefaultRenderable, Renderer } from "./webgl/renderer";
|
||||||
import { VertexBuffer, IndexBuffer } from "./webgl/buffer";
|
import { VertexBuffer, IndexBuffer } from "./webgl/buffer";
|
||||||
import { VertexBufferLayout, VertexArray } from "./webgl/vertexBufferLayout";
|
import { VertexBufferLayout, VertexArray } from "./webgl/vertexBufferLayout";
|
||||||
import { defaultLabelFactory, LabelFactory, Align, Label } from "./webgl/text";
|
import { defaultLabelFactory, LabelFactory, Align, Label } from "./webgl/text";
|
||||||
import { VoronoiBuilder } from "./voronoi/voronoi";
|
import { VoronoiBuilder } from "./voronoi/voronoi";
|
||||||
import * as assets from "./assets";
|
import * as assets from "./assets";
|
||||||
|
import { Texture } from "./webgl/texture";
|
||||||
|
|
||||||
|
|
||||||
function to_bbox(box: number[]): BBox {
|
function to_bbox(box: number[]): BBox {
|
||||||
|
@ -39,14 +36,6 @@ function to_bbox(box: number[]): BBox {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// function f32v(ptr: number, size: number): Float32Array {
|
|
||||||
// return new Float32Array(memory.buffer, ptr, size);
|
|
||||||
// }
|
|
||||||
|
|
||||||
// function i32v(ptr: number, size: number): Int32Array {
|
|
||||||
// return new Int32Array(memory.buffer, ptr, size);
|
|
||||||
// }
|
|
||||||
|
|
||||||
export function set_game_name(name: string) {
|
export function set_game_name(name: string) {
|
||||||
ELEMENTS["name"].innerHTML = name;
|
ELEMENTS["name"].innerHTML = name;
|
||||||
}
|
}
|
||||||
|
@ -133,6 +122,7 @@ export class GameInstance {
|
||||||
shader: Shader;
|
shader: Shader;
|
||||||
vor_shader: Shader;
|
vor_shader: Shader;
|
||||||
image_shader: Shader;
|
image_shader: Shader;
|
||||||
|
masked_image_shader: Shader;
|
||||||
|
|
||||||
text_factory: LabelFactory;
|
text_factory: LabelFactory;
|
||||||
planet_labels: Label[];
|
planet_labels: Label[];
|
||||||
|
@ -140,6 +130,7 @@ export class GameInstance {
|
||||||
|
|
||||||
ship_ibo: IndexBuffer;
|
ship_ibo: IndexBuffer;
|
||||||
ship_vao: VertexArray;
|
ship_vao: VertexArray;
|
||||||
|
ship_texture: Texture;
|
||||||
// TODO: find a better way
|
// TODO: find a better way
|
||||||
max_num_ships: number;
|
max_num_ships: number;
|
||||||
|
|
||||||
|
@ -159,8 +150,9 @@ export class GameInstance {
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
game: Game,
|
game: Game,
|
||||||
meshes: Mesh[],
|
planets_textures: Texture[],
|
||||||
ship_mesh: Mesh,
|
ship_texture: Texture,
|
||||||
|
font_texture: Texture,
|
||||||
shaders: Dictionary<ShaderFactory>
|
shaders: Dictionary<ShaderFactory>
|
||||||
) {
|
) {
|
||||||
this.game = game;
|
this.game = game;
|
||||||
|
@ -174,11 +166,14 @@ export class GameInstance {
|
||||||
this.vor_shader = shaders["vor"].create_shader(GL, {
|
this.vor_shader = shaders["vor"].create_shader(GL, {
|
||||||
PLANETS: "" + planets.length,
|
PLANETS: "" + planets.length,
|
||||||
});
|
});
|
||||||
|
this.masked_image_shader = shaders["masked_image"].create_shader(GL);
|
||||||
|
|
||||||
this.text_factory = defaultLabelFactory(GL, this.image_shader);
|
this.text_factory = defaultLabelFactory(GL, font_texture, this.image_shader);
|
||||||
this.planet_labels = [];
|
this.planet_labels = [];
|
||||||
this.ship_labels = [];
|
this.ship_labels = [];
|
||||||
|
|
||||||
|
this.ship_texture = ship_texture
|
||||||
|
|
||||||
this.resizer = new Resizer(CANVAS, [...game.get_viewbox()], true);
|
this.resizer = new Resizer(CANVAS, [...game.get_viewbox()], true);
|
||||||
this.renderer = new Renderer();
|
this.renderer = new Renderer();
|
||||||
this.game.update_turn(0);
|
this.game.update_turn(0);
|
||||||
|
@ -188,15 +183,8 @@ export class GameInstance {
|
||||||
|
|
||||||
// List of [(x, y, r)] for all planets
|
// List of [(x, y, r)] for all planets
|
||||||
this._create_voronoi(planets);
|
this._create_voronoi(planets);
|
||||||
this._create_planets(planets, meshes);
|
this._create_planets(planets, planets_textures);
|
||||||
|
|
||||||
// create_shipes
|
|
||||||
this.ship_ibo = new IndexBuffer(GL, ship_mesh.cells);
|
|
||||||
const ship_positions = new VertexBuffer(GL, ship_mesh.positions);
|
|
||||||
const ship_layout = new VertexBufferLayout();
|
|
||||||
ship_layout.push(GL.FLOAT, 3, 4, "a_position");
|
|
||||||
this.ship_vao = new VertexArray();
|
|
||||||
this.ship_vao.addBuffer(ship_positions, ship_layout);
|
|
||||||
this.max_num_ships = 0;
|
this.max_num_ships = 0;
|
||||||
|
|
||||||
// Set slider correctly
|
// Set slider correctly
|
||||||
|
@ -233,46 +221,52 @@ export class GameInstance {
|
||||||
this.renderer.addRenderable(this.vor_builder.getRenderable(), LAYERS.vor);
|
this.renderer.addRenderable(this.vor_builder.getRenderable(), LAYERS.vor);
|
||||||
}
|
}
|
||||||
|
|
||||||
_create_planets(planets: Float32Array, meshes: Mesh[]) {
|
_create_planets(planets: Float32Array, planets_textures: Texture[]) {
|
||||||
for (let i = 0; i < this.planet_count; i++) {
|
for (let i = 0; i < this.planet_count; i++) {
|
||||||
{
|
{
|
||||||
const transform = new UniformMatrix3fv([
|
const transform = new UniformMatrix3fv([
|
||||||
1,
|
1, 0, 0,
|
||||||
0,
|
0, 1, 0,
|
||||||
0,
|
-planets[i * 3], -planets[i * 3 + 1], 1, // TODO: why are negations needed?
|
||||||
0,
|
|
||||||
1,
|
|
||||||
0,
|
|
||||||
-planets[i * 3],
|
|
||||||
-planets[i * 3 + 1],
|
|
||||||
1,
|
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const indexBuffer = new IndexBuffer(
|
const gl = GL;
|
||||||
GL,
|
const ib = new IndexBuffer(gl, [
|
||||||
meshes[i % meshes.length].cells
|
0, 1, 2,
|
||||||
);
|
1, 2, 3
|
||||||
const positionBuffer = new VertexBuffer(
|
]);
|
||||||
GL,
|
const vb_pos = new VertexBuffer(gl, [
|
||||||
meshes[i % meshes.length].positions
|
-1, 1,
|
||||||
);
|
1, 1,
|
||||||
|
-1, -1,
|
||||||
|
1, -1
|
||||||
|
]);
|
||||||
|
const vb_tex = new VertexBuffer(gl, [
|
||||||
|
0, 0,
|
||||||
|
1, 0,
|
||||||
|
0, 1,
|
||||||
|
1, 1]);
|
||||||
|
|
||||||
|
const layout_pos = new VertexBufferLayout();
|
||||||
|
// 2?
|
||||||
|
layout_pos.push(gl.FLOAT, 2, 4, "a_position");
|
||||||
|
|
||||||
|
const layout_tex = new VertexBufferLayout();
|
||||||
|
layout_tex.push(gl.FLOAT, 2, 4, "a_texCoord");
|
||||||
|
|
||||||
const layout = new VertexBufferLayout();
|
|
||||||
layout.push(GL.FLOAT, 3, 4, "a_position");
|
|
||||||
const vao = new VertexArray();
|
const vao = new VertexArray();
|
||||||
vao.addBuffer(positionBuffer, layout);
|
vao.addBuffer(vb_pos, layout_pos);
|
||||||
|
vao.addBuffer(vb_tex, layout_tex);
|
||||||
|
|
||||||
|
const uniforms = {
|
||||||
|
u_trans: transform,
|
||||||
|
u_trans_next: transform,
|
||||||
|
};
|
||||||
|
|
||||||
|
const renderable = new DefaultRenderable(ib, vao, this.masked_image_shader, [planets_textures[0]], uniforms);
|
||||||
|
|
||||||
|
this.renderer.addRenderable(renderable, LAYERS.planet);
|
||||||
|
|
||||||
this.renderer.addToDraw(
|
|
||||||
indexBuffer,
|
|
||||||
vao,
|
|
||||||
this.shader,
|
|
||||||
{
|
|
||||||
u_trans: transform,
|
|
||||||
u_trans_next: transform,
|
|
||||||
},
|
|
||||||
[],
|
|
||||||
LAYERS.planet
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -350,16 +344,39 @@ export class GameInstance {
|
||||||
const ship_colours = this.game.get_ship_colours();
|
const ship_colours = this.game.get_ship_colours();
|
||||||
|
|
||||||
for (let i = this.max_num_ships; i < ship_counts.length; i++) {
|
for (let i = this.max_num_ships; i < ship_counts.length; i++) {
|
||||||
this.renderer.addToDraw(
|
const gl = GL;
|
||||||
this.ship_ibo,
|
const ib = new IndexBuffer(gl, [
|
||||||
this.ship_vao,
|
0, 1, 2,
|
||||||
this.shader,
|
1, 2, 3
|
||||||
{},
|
]);
|
||||||
[],
|
const ratio = this.ship_texture.getWidth() / this.ship_texture.getHeight();
|
||||||
LAYERS.ship
|
const vb_pos = new VertexBuffer(gl, [
|
||||||
);
|
-ratio, 1,
|
||||||
|
ratio, 1,
|
||||||
|
-ratio, -1,
|
||||||
|
ratio, -1
|
||||||
|
]);
|
||||||
|
const vb_tex = new VertexBuffer(gl, [
|
||||||
|
0, 0,
|
||||||
|
1, 0,
|
||||||
|
0, 1,
|
||||||
|
1, 1,
|
||||||
|
]);
|
||||||
|
|
||||||
|
const layout_pos = new VertexBufferLayout();
|
||||||
|
layout_pos.push(gl.FLOAT, 2, 4, "a_position");
|
||||||
|
|
||||||
|
const layout_tex = new VertexBufferLayout();
|
||||||
|
layout_tex.push(gl.FLOAT, 2, 4, "a_texCoord");
|
||||||
|
|
||||||
|
const vao = new VertexArray();
|
||||||
|
vao.addBuffer(vb_pos, layout_pos);
|
||||||
|
vao.addBuffer(vb_tex, layout_tex);
|
||||||
|
|
||||||
|
const renderable = new DefaultRenderable(ib, vao, this.masked_image_shader, [this.ship_texture], {});
|
||||||
|
this.renderer.addRenderable(renderable, LAYERS.ship);
|
||||||
const label = this.text_factory.build(GL);
|
const label = this.text_factory.build(GL);
|
||||||
|
|
||||||
this.ship_labels.push(label);
|
this.ship_labels.push(label);
|
||||||
this.renderer.addRenderable(label.getRenderable(), LAYERS.ship_label);
|
this.renderer.addRenderable(label.getRenderable(), LAYERS.ship_label);
|
||||||
}
|
}
|
||||||
|
@ -430,25 +447,30 @@ export class GameInstance {
|
||||||
this.use_vor = false;
|
this.use_vor = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const shaders_to_update = [
|
||||||
|
this.shader,
|
||||||
|
this.image_shader,
|
||||||
|
this.masked_image_shader,
|
||||||
|
];
|
||||||
|
|
||||||
|
|
||||||
// If not playing, still reder with different viewbox, so people can still pan etc.
|
// If not playing, still reder with different viewbox, so people can still pan etc.
|
||||||
if (!this.playing) {
|
if (!this.playing) {
|
||||||
this.last_time = time;
|
this.last_time = time;
|
||||||
|
|
||||||
this.shader.uniform(
|
shaders_to_update.forEach((shader) => {
|
||||||
GL,
|
shader.uniform(
|
||||||
"u_viewbox",
|
GL,
|
||||||
new Uniform4f(this.resizer.get_viewbox())
|
"u_viewbox",
|
||||||
);
|
new Uniform4f(this.resizer.get_viewbox())
|
||||||
|
);
|
||||||
|
})
|
||||||
|
|
||||||
this.vor_shader.uniform(
|
this.vor_shader.uniform(
|
||||||
GL,
|
GL,
|
||||||
"u_viewbox",
|
"u_viewbox",
|
||||||
new Uniform4f(this.resizer.get_viewbox())
|
new Uniform4f(this.resizer.get_viewbox())
|
||||||
);
|
);
|
||||||
this.image_shader.uniform(
|
|
||||||
GL,
|
|
||||||
"u_viewbox",
|
|
||||||
new Uniform4f(this.resizer.get_viewbox())
|
|
||||||
);
|
|
||||||
|
|
||||||
this.renderer.render(GL);
|
this.renderer.render(GL);
|
||||||
return;
|
return;
|
||||||
|
@ -481,39 +503,24 @@ export class GameInstance {
|
||||||
this.vor_shader.uniform(GL, "u_resolution", new Uniform2f(RESOLUTION));
|
this.vor_shader.uniform(GL, "u_resolution", new Uniform2f(RESOLUTION));
|
||||||
this.vor_shader.uniform(GL, "u_vor", new UniformBool(this.use_vor));
|
this.vor_shader.uniform(GL, "u_vor", new UniformBool(this.use_vor));
|
||||||
|
|
||||||
this.shader.uniform(
|
shaders_to_update.forEach((shader) => {
|
||||||
GL,
|
shader.uniform(
|
||||||
"u_time",
|
GL,
|
||||||
new Uniform1f((time - this.last_time) / ms_per_frame)
|
"u_time",
|
||||||
);
|
new Uniform1f((time - this.last_time) / ms_per_frame)
|
||||||
this.shader.uniform(
|
);
|
||||||
GL,
|
shader.uniform(
|
||||||
"u_mouse",
|
GL,
|
||||||
new Uniform2f(this.resizer.get_mouse_pos())
|
"u_mouse",
|
||||||
);
|
new Uniform2f(this.resizer.get_mouse_pos())
|
||||||
this.shader.uniform(
|
);
|
||||||
GL,
|
shader.uniform(
|
||||||
"u_viewbox",
|
GL,
|
||||||
new Uniform4f(this.resizer.get_viewbox())
|
"u_viewbox",
|
||||||
);
|
new Uniform4f(this.resizer.get_viewbox())
|
||||||
this.shader.uniform(GL, "u_resolution", new Uniform2f(RESOLUTION));
|
);
|
||||||
|
shader.uniform(GL, "u_resolution", new Uniform2f(RESOLUTION));
|
||||||
this.image_shader.uniform(
|
});
|
||||||
GL,
|
|
||||||
"u_time",
|
|
||||||
new Uniform1f((time - this.last_time) / ms_per_frame)
|
|
||||||
);
|
|
||||||
this.image_shader.uniform(
|
|
||||||
GL,
|
|
||||||
"u_mouse",
|
|
||||||
new Uniform2f(this.resizer.get_mouse_pos())
|
|
||||||
);
|
|
||||||
this.image_shader.uniform(
|
|
||||||
GL,
|
|
||||||
"u_viewbox",
|
|
||||||
new Uniform4f(this.resizer.get_viewbox())
|
|
||||||
);
|
|
||||||
this.image_shader.uniform(GL, "u_resolution", new Uniform2f(RESOLUTION));
|
|
||||||
|
|
||||||
// Render
|
// Render
|
||||||
this.renderer.render(GL);
|
this.renderer.render(GL);
|
||||||
|
@ -578,18 +585,17 @@ export class GameInstance {
|
||||||
}
|
}
|
||||||
|
|
||||||
var game_instance: GameInstance;
|
var game_instance: GameInstance;
|
||||||
var meshes: Mesh[];
|
var textures: Texture[];
|
||||||
var shaders: Dictionary<ShaderFactory>;
|
var shaders: Dictionary<ShaderFactory>;
|
||||||
|
|
||||||
export async function set_instance(source: string): Promise<GameInstance> {
|
export async function set_instance(source: string): Promise<GameInstance> {
|
||||||
// TODO: embed shader programs
|
// TODO: embed shader programs
|
||||||
if (!meshes || !shaders) {
|
if (!textures || !shaders) {
|
||||||
const mesh_promises = [
|
const texture_promises = [
|
||||||
assets.shipSvg,
|
Texture.fromImage(GL, assets.fontPng, "font"),
|
||||||
assets.earthSvg,
|
Texture.fromImage(GL, assets.shipPng, "ship"),
|
||||||
assets.marsSvg,
|
Texture.fromImage(GL, assets.earthPng, "earth")
|
||||||
assets.venusSvg,
|
];
|
||||||
].map(url_to_mesh);
|
|
||||||
|
|
||||||
const shader_promies = [
|
const shader_promies = [
|
||||||
(async () =>
|
(async () =>
|
||||||
|
@ -616,10 +622,19 @@ export async function set_instance(source: string): Promise<GameInstance> {
|
||||||
assets.simpleVertexShader,
|
assets.simpleVertexShader,
|
||||||
),
|
),
|
||||||
])(),
|
])(),
|
||||||
|
(async () =>
|
||||||
|
<[string, ShaderFactory]>[
|
||||||
|
"masked_image",
|
||||||
|
await ShaderFactory.create_factory(
|
||||||
|
assets.maskedImageFragmentShader,
|
||||||
|
assets.simpleVertexShader,
|
||||||
|
),
|
||||||
|
])(),
|
||||||
|
|
||||||
];
|
];
|
||||||
let shaders_array: [string, ShaderFactory][];
|
let shaders_array: [string, ShaderFactory][];
|
||||||
[meshes, shaders_array] = await Promise.all([
|
[textures, shaders_array] = await Promise.all([
|
||||||
Promise.all(mesh_promises),
|
Promise.all(texture_promises),
|
||||||
Promise.all(shader_promies),
|
Promise.all(shader_promies),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
@ -631,8 +646,9 @@ export async function set_instance(source: string): Promise<GameInstance> {
|
||||||
|
|
||||||
game_instance = new GameInstance(
|
game_instance = new GameInstance(
|
||||||
Game.new(source),
|
Game.new(source),
|
||||||
meshes.slice(1),
|
textures.slice(2),
|
||||||
meshes[0],
|
textures[1],
|
||||||
|
textures[0],
|
||||||
shaders
|
shaders
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -57,8 +57,8 @@ async function main() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: do we still need this?
|
||||||
const mesh = await url_to_mesh("static/res/images/earth.svg");
|
const mesh = await url_to_mesh("static/res/images/earth.svg");
|
||||||
console.log(Math.max(...mesh.positions), Math.min(...mesh.positions));
|
|
||||||
const renderer = new Renderer();
|
const renderer = new Renderer();
|
||||||
|
|
||||||
const factory = await ShaderFactory.create_factory(assets.simpleFragmentShader, assets.simpleVertexShader);
|
const factory = await ShaderFactory.create_factory(assets.simpleFragmentShader, assets.simpleVertexShader);
|
||||||
|
|
|
@ -33,8 +33,8 @@ export class LabelFactory {
|
||||||
font: FontInfo;
|
font: FontInfo;
|
||||||
shader: Shader;
|
shader: Shader;
|
||||||
|
|
||||||
constructor(gl: WebGLRenderingContext, loc: string, font: FontInfo, shader: Shader) {
|
constructor(gl: WebGLRenderingContext, fontTexture: Texture, font: FontInfo, shader: Shader) {
|
||||||
this.texture = Texture.fromImage(gl, loc, 'font');
|
this.texture = fontTexture;
|
||||||
this.font = font;
|
this.font = font;
|
||||||
this.shader = shader;
|
this.shader = shader;
|
||||||
}
|
}
|
||||||
|
@ -79,7 +79,6 @@ export class Label {
|
||||||
const verts_pos = [];
|
const verts_pos = [];
|
||||||
const verts_tex = [];
|
const verts_tex = [];
|
||||||
|
|
||||||
const letterHeight = this.font.letterHeight / this.font.textureHeight;
|
|
||||||
let xPos = 0;
|
let xPos = 0;
|
||||||
|
|
||||||
switch (h_align) {
|
switch (h_align) {
|
||||||
|
@ -108,10 +107,17 @@ export class Label {
|
||||||
for (let i = 0; i < text.length; i++) {
|
for (let i = 0; i < text.length; i++) {
|
||||||
const info = this.font.glyphInfos[text[i]];
|
const info = this.font.glyphInfos[text[i]];
|
||||||
if (info) {
|
if (info) {
|
||||||
|
|
||||||
const dx = info.width / this.font.letterHeight;
|
const dx = info.width / this.font.letterHeight;
|
||||||
const letterWidth = info.width / this.font.textureWidth;
|
|
||||||
const x0 = info.x / this.font.textureWidth;
|
// apply half-pixel correction to prevent texture bleeding
|
||||||
const y0 = info.y / this.font.textureHeight;
|
// we should address the center of each texel, not the border
|
||||||
|
// https://gamedev.stackexchange.com/questions/46963/how-to-avoid-texture-bleeding-in-a-texture-atlas
|
||||||
|
const x0 = (info.x + 0.5) / this.font.textureWidth;
|
||||||
|
const y0 = (info.y + 0.5) / this.font.textureHeight;
|
||||||
|
const letterWidth = (info.width - 1) / this.font.textureWidth;
|
||||||
|
const letterHeight = (this.font.letterHeight - 1) / this.font.textureHeight;
|
||||||
|
|
||||||
verts_pos.push(xPos, yStart);
|
verts_pos.push(xPos, yStart);
|
||||||
verts_pos.push(xPos + dx, yStart);
|
verts_pos.push(xPos + dx, yStart);
|
||||||
verts_pos.push(xPos, yStart-1);
|
verts_pos.push(xPos, yStart-1);
|
||||||
|
@ -138,7 +144,7 @@ export class Label {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function defaultLabelFactory(gl: WebGLRenderingContext, shader: Shader): LabelFactory {
|
export function defaultLabelFactory(gl: WebGLRenderingContext, fontTexture: Texture, shader: Shader): LabelFactory {
|
||||||
const fontInfo = {
|
const fontInfo = {
|
||||||
letterHeight: 8,
|
letterHeight: 8,
|
||||||
spaceWidth: 8,
|
spaceWidth: 8,
|
||||||
|
@ -189,5 +195,5 @@ export function defaultLabelFactory(gl: WebGLRenderingContext, shader: Shader):
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
return new LabelFactory(gl, fontPng, fontInfo, shader);
|
return new LabelFactory(gl, fontTexture, fontInfo, shader);
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,15 +11,18 @@ export class Texture {
|
||||||
gl: WebGLRenderingContext,
|
gl: WebGLRenderingContext,
|
||||||
path: string,
|
path: string,
|
||||||
name: string,
|
name: string,
|
||||||
): Texture {
|
): Promise<Texture> {
|
||||||
const out = new Texture(gl, name);
|
return new Promise((resolve, reject) => {
|
||||||
|
const out = new Texture(gl, name);
|
||||||
|
|
||||||
const image = new Image();
|
const image = new Image();
|
||||||
image.onload = out.setImage.bind(out, gl, image);
|
image.onload = () => {
|
||||||
image.onerror = error;
|
out.setImage(gl, image);
|
||||||
image.src = path;
|
resolve(out);
|
||||||
|
}
|
||||||
return out;
|
image.onerror = reject;
|
||||||
|
image.src = path;
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
static fromRenderer(
|
static fromRenderer(
|
||||||
|
@ -99,8 +102,3 @@ export class Texture {
|
||||||
return this.height;
|
return this.height;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function error(e: any) {
|
|
||||||
console.error("IMAGE LOAD ERROR");
|
|
||||||
console.error(e);
|
|
||||||
}
|
|
||||||
|
|
Loading…
Reference in a new issue