initial commit

This commit is contained in:
soruh 2023-06-04 17:46:19 +02:00
commit 4b90f7c8d5
51 changed files with 9952 additions and 0 deletions

60
backend/.env.example Normal file
View File

@ -0,0 +1,60 @@
# An example of .env file
# ✂️ Copy and 📋 Paste the following to a new .env file
# And then add your own values
############
# Database #
############
DATABASE_URL=
DATABASE_USERNAME=
DATABASE_PASSWORD=
DATABASE_HOST=
DATABASE_NAME=
DATABASE_PORT=
# SSL_MODE possible options: disable | allow | prefer | require | verify-ca | verify-full
# if left empty, default value is prefer
DATABASE_SSL_MODE=
###############
# Application #
###############
APPLICATION_PORT=
# if commented out or left empty, telemetry will default to stdout
APPLICATION_TELEMETRY_PATH=
###########
# General #
###########
# MIGRATION possible options: auto (when application starts up) | manual (has to be run manually)
# if commented out or left empty, environment will default to manual
MIGRATION=
######################################################
######################EXAMPLES########################
######################################################
####################
# Filled Example 1 #
####################
#DATABASE_URL=postgres://username:password@localhost:5432/dbname
#DATABASE_USERNAME=username
#DATABASE_PASSWORD=password
#DATABASE_HOST=localhost
#DATABASE_NAME=dbname
#DATABASE_PORT=5432
#DATABASE_SSL_MODE=prefer
#APPLICATION_PORT=8000
####################
# Filled Example 2 #
####################
#DATABASE_URL=postgres://admin:secret@remoteserver.com:5432/sales?sslmode=allow
#DATABASE_USERNAME=admin
#DATABASE_PASSWORD=secret
#DATABASE_HOST=remoteserver.com
#DATABASE_NAME=sales
#DATABASE_PORT=5432
#DATABASE_SSL_MODE=allow
#APPLICATION_PORT=8000
#MIGRATION=auto

View File

@ -0,0 +1,28 @@
# Please refer to README
############
# Database #
############
DATABASE_URL=postgres://postgres:secret@localhost:5432/template
DATABASE_USERNAME=postgres
DATABASE_PASSWORD=secret
DATABASE_HOST=localhost
DATABASE_NAME=template
DATABASE_PORT=5432
# SSL_MODE possible options: disable | allow | prefer | require | verify-ca | verify-full
# if left empty, default value is prefer
DATABASE_SSL_MODE=
###############
# Application #
###############
APPLICATION_PORT=8000
# if commented out or left empty, telemetry will default to stdout
APPLICATION_TELEMETRY_PATH=
###########
# General #
###########
# MIGRATION possible options: auto (when application starts up) | manual (has to be run manually)
# if commented out or left empty, environment will default to manual
MIGRATION=auto

3
backend/.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
**/target
.env
log

3194
backend/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

56
backend/Cargo.toml Normal file
View File

@ -0,0 +1,56 @@
[package]
name = "backend"
version = "0.1.0"
authors = ["soruh <mail@soruh.de>"]
edition = "2021"
[workspace]
members = [".", "migration", "entity"]
[lib]
path = "src/lib.rs"
name = "lib"
[[bin]]
path = "src/main.rs"
name = "bin"
[dependencies]
actix-web = "4"
dotenv = "0.15.0"
secrecy = { version = "0.8.0", features = ["serde"] }
serde = { version = "1.0.144", features = ["derive"] }
tokio = { version = "1.20.1", features = ["macros", "rt-multi-thread"] }
tracing = { version = "0.1.36", features = ["log"] }
tracing-actix-web = "0.6.0"
tracing-bunyan-formatter = "0.3.3"
tracing-log = "0.1.3"
tracing-subscriber = { version = "0.3.15", features = ["registry", "env-filter"] }
tracing-appender = "0.2.2"
derive_more = "0.99.17"
chrono = "0.4.19"
validator = { version = "0.16", features = ["derive"] }
entity = { path = "entity" }
migration = { path = "migration" }
[dependencies.sea-orm]
version = "0.9.2"
default-features = true
features = ["mock", "default", "with-json", "macros", "sqlx-postgres", "runtime-actix-rustls", "chrono"]
[dev-dependencies]
async-std = { version = "^1", features = ["attributes", "tokio1"] }
reqwest = { version = "0.11.11", features = ["json"] }
once_cell = "1.13.1"
uuid = { version = "1.1.2", features = ["v4"] }
serde_json = "1.0.72"
[dev-dependencies.sqlx]
version = "0.6.1"
default-features = false
features = [
"runtime-actix-rustls",
"postgres",
]

View File

@ -0,0 +1,8 @@
[package]
name = "entity"
version = "0.1.0"
edition = "2021"
[dependencies]
serde = { version = "1.0.144", features = ["derive"] }
sea-orm = "0.9.2"

View File

@ -0,0 +1 @@
pub mod todo;

View File

@ -0,0 +1,3 @@
//! SeaORM Entity. Generated by sea-orm-codegen 0.9.2
pub use super::todo::Entity as Todo;

View File

@ -0,0 +1,28 @@
//! SeaORM Entity. Generated by sea-orm-codegen 0.9.2
use sea_orm::entity::prelude::*;
use serde::{Serialize, Deserialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "todo")]
pub struct Model {
#[sea_orm(primary_key)]
#[serde(skip_deserializing)]
pub id: i32,
pub title: String,
pub description: String,
pub done: bool,
pub created_at: DateTimeWithTimeZone,
pub updated_at: DateTimeWithTimeZone,
}
#[derive(Copy, Clone, Debug, EnumIter)]
pub enum Relation {}
impl RelationTrait for Relation {
fn def(&self) -> RelationDef {
panic!("No RelationDef")
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@ -0,0 +1,19 @@
[package]
name = "migration"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
name = "migration"
path = "src/lib.rs"
[dependencies]
async-std = { version = "^1", features = ["attributes", "tokio1"] }
[dependencies.sea-orm-migration]
version = "^0.9.0"
features = [
"runtime-actix-rustls",
"sqlx-postgres",
]

View File

@ -0,0 +1,41 @@
# Running Migrator CLI
- Generate a new migration file
```sh
cargo run -- migrate generate MIGRATION_NAME
```
- Apply all pending migrations
```sh
cargo run
```
```sh
cargo run -- up
```
- Apply first 10 pending migrations
```sh
cargo run -- up -n 10
```
- Rollback last applied migrations
```sh
cargo run -- down
```
- Rollback last 10 applied migrations
```sh
cargo run -- down -n 10
```
- Drop all tables from the database, then reapply all migrations
```sh
cargo run -- fresh
```
- Rollback all applied migrations, then reapply all migrations
```sh
cargo run -- refresh
```
- Rollback all applied migrations
```sh
cargo run -- reset
```
- Check the status of all migrations
```sh
cargo run -- status
```

View File

@ -0,0 +1,16 @@
pub use sea_orm_migration::prelude::*;
//❗ After creating a new migration file, remove the sample migration below 👇
mod m20220101_000001_create_todo_table;
pub struct Migrator;
#[async_trait::async_trait]
impl MigratorTrait for Migrator {
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
vec![
//❗ After creating a new migration file, remove the sample migration below 👇
Box::new(m20220101_000001_create_todo_table::Migration)
]
}
}

View File

@ -0,0 +1,32 @@
use crate::sea_orm::{Statement, ConnectionTrait};
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
//❗ Do not replace the sample below 👇
//❗ Instead, create a new migration file and add your own migration code there.
//❗ See the README for more information.
let sql = r#"
CREATE TABLE IF NOT EXISTS todo (
id SERIAL PRIMARY KEY,
title VARCHAR(255) UNIQUE NOT NULL,
description VARCHAR(255) NOT NULL,
done BOOLEAN NOT NULL DEFAULT FALSE,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
);
"#;
let stmt = Statement::from_string(manager.get_database_backend(), sql.to_owned());
manager.get_connection().execute(stmt).await.map(|_| ())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let sql = "DROP TABLE todo";
let stmt = Statement::from_string(manager.get_database_backend(), sql.to_owned());
manager.get_connection().execute(stmt).await.map(|_| ())
}
}

View File

@ -0,0 +1,6 @@
use sea_orm_migration::prelude::*;
#[async_std::main]
async fn main() {
cli::run_cli(migration::Migrator).await;
}

View File

@ -0,0 +1,92 @@
use secrecy::{ExposeSecret, Secret};
use serde::Deserialize;
use std::env;
#[derive(Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum SslMode {
Disable,
Allow,
Prefer,
Require,
VerifyCa,
VerifyFull
}
#[derive(Deserialize)]
pub struct Settings {
pub database: DatabaseSettings,
pub application_port: u16,
}
#[derive(Deserialize)]
pub struct DatabaseSettings {
pub username: String,
pub password: Secret<String>,
pub port: u16,
pub host: String,
pub database_name: String,
pub ssl_mode: SslMode,
}
impl DatabaseSettings {
pub fn parse_ssl_mode(&self) -> String {
match self.ssl_mode {
SslMode::Disable => "disable".to_owned(),
SslMode::Allow => "allow".to_owned(),
SslMode::Prefer => "prefer".to_owned(),
SslMode::Require => "require".to_owned(),
SslMode::VerifyCa => "verify-ca".to_owned(),
SslMode::VerifyFull => "verify-full".to_owned(),
}
}
pub fn connection_string(&self) -> Secret<String> {
Secret::new(format!(
"postgres://{}:{}@{}:{}/{}?sslmode={}",
self.username, self.password.expose_secret(), self.host, self.port, self.database_name, self.parse_ssl_mode()
))
}
pub fn connection_string_without_db(&self) -> Secret<String> {
Secret::new(format!(
"postgres://{}:{}@{}:{}?sslmode={}",
self.username, self.password.expose_secret(), self.host, self.port, self.parse_ssl_mode()
))
}
}
/// Reads from the .env file and returns the settings in the form of a configuration object.
pub fn get_config() -> Result<Settings, dotenv::Error> {
dotenv::dotenv().ok();
let db_host = env::var("DATABASE_HOST").expect("DATABASE_HOST is not set in .env file");
let db_port = env::var("DATABASE_PORT").expect("DATABASE_PORT is not set in .env file");
let db_port = db_port.parse().expect("DATABASE_PORT is not a number");
let db_name = env::var("DATABASE_NAME").expect("DATABASE_NAME is not set in .env file");
let db_username = env::var("DATABASE_USERNAME").expect("DATABASE_USERNAME is not set in .env file");
let db_password = env::var("DATABASE_PASSWORD").expect("DATABASE_PASSWORD is not set in .env file");
let db_ssl_mode = env::var("SSL_MODE").unwrap_or("".to_string());
let db_ssl_mode = match db_ssl_mode.as_str() {
"disable" => SslMode::Disable,
"allow" => SslMode::Allow,
"prefer" => SslMode::Prefer,
"require" => SslMode::Require,
"verify-ca" => SslMode::VerifyCa,
"verify-full" => SslMode::VerifyFull,
// if left empty, default to prefer
"" => SslMode::Prefer,
other => panic!("SSL_MODE: {} is not a valid value", other),
};
let app_port = env::var("APPLICATION_PORT").expect("APPLICATION_PORT is not set in .env file");
let app_port = app_port.parse().expect("APPLICATION_PORT is not a number");
Ok(Settings {
database: DatabaseSettings {
username: db_username,
password: Secret::new(db_password),
port: db_port,
host: db_host,
database_name: db_name,
ssl_mode: db_ssl_mode
},
application_port: app_port,
})
}

3
backend/src/core/mod.rs Normal file
View File

@ -0,0 +1,3 @@
pub mod config;
pub mod startup;
pub mod telemetry;

View File

@ -0,0 +1,29 @@
use actix_web::{web, App, HttpServer};
use actix_web::dev::Server;
use std::net::TcpListener;
use sea_orm::DbConn;
use tracing_actix_web::TracingLogger;
use crate::modules::health_check::controller::health_check;
use crate::modules::todo::controller::{create_todo, update_todo, get_list_of_todos, get_todo, bulk_delete_todos, delete_todo};
/// Runs the HTTP server.
pub fn run(listener: TcpListener, db: DbConn) -> Result<Server, std::io::Error> {
let db = web::Data::new(db);
let server = HttpServer::new(move || {
App::new()
.wrap(TracingLogger::default())
// Register your controllers below 👇
.service(health_check)
.service(get_list_of_todos)
.service(get_todo)
.service(create_todo)
.service(update_todo)
.service(delete_todo)
.service(bulk_delete_todos)
// Register application-wide shared data below 👇
.app_data(db.clone()) // 👈 ❗Important: Register the database connection pool
})
.listen(listener)?
.run();
Ok(server)
}

View File

@ -0,0 +1,31 @@
use tracing::subscriber::set_global_default;
use tracing::Subscriber;
use tracing_bunyan_formatter::{BunyanFormattingLayer, JsonStorageLayer};
use tracing_log::LogTracer;
use tracing_subscriber::fmt::MakeWriter;
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::{EnvFilter, Registry};
/// Returns a `tracing` subscriber that logs to stdout by default
pub fn get_subscriber<Sink>(
name: String,
env_filter: String,
sink: Sink,
) -> impl Subscriber + Send + Sync
where
Sink: for<'a> MakeWriter<'a> + Send + Sync + 'static,
{
let env_filter =
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new(env_filter));
let formatting_layer = BunyanFormattingLayer::new(name.clone().into(), sink);
Registry::default()
.with(env_filter)
.with(JsonStorageLayer)
.with(formatting_layer)
}
/// Initializes the global tracing subscriber.
pub fn init_subscriber(subscriber: impl Subscriber + Send + Sync) {
LogTracer::init().expect("Failed to set logger");
set_global_default(subscriber).expect("Failed to set subscriber");
}

3
backend/src/lib.rs Normal file
View File

@ -0,0 +1,3 @@
pub mod core;
pub mod shared;
pub mod modules;

59
backend/src/main.rs Normal file
View File

@ -0,0 +1,59 @@
use lib::core::config::get_config;
use lib::core::startup::run;
use lib::core::telemetry::{get_subscriber, init_subscriber};
use migration::{Migrator, MigratorTrait};
use secrecy::ExposeSecret;
use std::env;
use std::net::TcpListener;
const APPLICATION_NAME: &str = "backend";
#[tokio::main]
async fn main() -> std::io::Result<()> {
dotenv::dotenv().ok();
let application_telemetry_path = env::var("APPLICATION_TELEMETRY_PATH").unwrap_or_else(|_| "".to_string());
let migration = env::var("MIGRATION").unwrap_or_else(|_| "".to_string());
match application_telemetry_path {
application_telemetry_path if application_telemetry_path != "" => {
// Set up a subscriber for logging to files, rolling daily
let subscriber = get_subscriber(
APPLICATION_NAME.to_owned(),
"info".to_string(),
tracing_appender::rolling::daily(application_telemetry_path, "log"),
);
init_subscriber(subscriber);
}
_ => {
// Set up a subscriber for logging to the terminal -- good for development
let subscriber = get_subscriber(
APPLICATION_NAME.to_owned(),
"info".to_string(),
std::io::stdout,
);
init_subscriber(subscriber);
}
}
// Read the configuration from the environment.
let config = get_config().expect("Failed to read configuration.");
// Create a database connection pool.
let conn = sea_orm::Database::connect(&*config.database.connection_string().expose_secret())
.await
.unwrap();
// ❗ If enabled, automatically migrate the database to the latest version when the application starts up.
if migration == "auto" {
if let Err(_) = Migrator::up(&conn, None).await {
panic!("Failed to run migration.");
}
}
// Create a TCP listener at the configured address.
let address = format!("127.0.0.1:{}", config.application_port);
let listener = TcpListener::bind(address)?;
// Run the App 🚀
run(listener, conn)?.await
}

View File

@ -0,0 +1,6 @@
use actix_web::{get, HttpResponse};
#[get("/health_check")]
pub async fn health_check() -> HttpResponse {
HttpResponse::Ok().finish()
}

View File

@ -0,0 +1 @@
pub mod controller;

View File

@ -0,0 +1,2 @@
pub mod health_check;
pub mod todo;

View File

@ -0,0 +1,89 @@
use crate::{
modules::todo::{
dtos::{create_todo_dto::CreateTodoDto, update_todo_dto::UpdateTodoDto},
service::{find_todo_by_id, find_todos, insert_todo, update_todo_by_id, delete_todo_by_id, bulk_delete_todos_by_ids},
},
shared::{errors::CustomError, responses::CustomResponse},
};
use actix_web::{delete, get, post, put, web, HttpResponse};
use sea_orm::DbConn;
use serde::Deserialize;
use validator::Validate;
#[derive(Deserialize)]
pub struct GetTodosQuery {
query_string: Option<String>,
page: Option<usize>,
items_per_page: Option<usize>,
}
#[get("/todos")]
pub async fn get_list_of_todos(
conn: web::Data<DbConn>,
query: web::Query<GetTodosQuery>,
) -> Result<HttpResponse, CustomError> {
let results = find_todos(
&conn,
query.query_string.clone(),
query.items_per_page,
query.page,
)
.await?;
Ok(HttpResponse::Ok().json(results))
}
#[get("/todos/{id}")]
pub async fn get_todo(
conn: web::Data<DbConn>,
path: web::Path<usize>,
) -> Result<HttpResponse, CustomError> {
let id = path.into_inner();
let results = find_todo_by_id(&conn, id).await?;
Ok(HttpResponse::Ok().json(results))
}
#[post("/todos")]
pub async fn create_todo(
conn: web::Data<DbConn>,
create_todo_dto: web::Json<CreateTodoDto>,
) -> Result<CustomResponse, CustomError> {
if let Err(e) = create_todo_dto.validate() {
println!("DOES IT WORK?");
return Err(CustomError::ValidationError { e });
}
let title = create_todo_dto.title.clone();
let description = create_todo_dto.description.clone();
let done = create_todo_dto.done;
let resp = insert_todo(&conn, title.as_str(), description.as_str(), done).await?;
Ok(resp)
}
#[put("/todos/{id}")]
pub async fn update_todo(
conn: web::Data<DbConn>,
path: web::Path<usize>,
update_todo_dto: web::Json<UpdateTodoDto>,
) -> Result<CustomResponse, CustomError> {
if let Err(e) = update_todo_dto.validate() {
return Err(CustomError::ValidationError { e });
}
let id = path.into_inner();
let title = update_todo_dto.title.clone();
let description = update_todo_dto.description.clone();
let done = update_todo_dto.done;
let resp = update_todo_by_id(&conn, id, title, description, done).await?;
Ok(resp)
}
#[delete("/todos/{id}")]
pub async fn delete_todo(conn: web::Data<DbConn>, path: web::Path<usize>) -> Result<CustomResponse, CustomError> {
let id = path.into_inner();
let resp = delete_todo_by_id(&conn, id).await?;
Ok(resp)
}
#[delete("/todos")]
pub async fn bulk_delete_todos(conn: web::Data<DbConn>, ids: web::Json<Vec<usize>>) -> Result<CustomResponse, CustomError> {
let resp = bulk_delete_todos_by_ids(&conn, ids.into_inner()).await?;
Ok(resp)
}

View File

@ -0,0 +1,10 @@
use validator::Validate;
#[derive(serde::Deserialize, Validate)]
pub struct CreateTodoDto {
#[validate(length(min = 1, max = 30))]
pub title: String,
#[validate(length(min = 1, max = 1000))]
pub description: String,
pub done: bool,
}

View File

@ -0,0 +1,2 @@
pub mod create_todo_dto;
pub mod update_todo_dto;

View File

@ -0,0 +1,10 @@
use validator::Validate;
#[derive(serde::Deserialize, Validate)]
pub struct UpdateTodoDto {
#[validate(length(min = 1, max = 30))]
pub title: Option<String>,
#[validate(length(min = 1, max = 1000))]
pub description: Option<String>,
pub done: Option<bool>,
}

View File

@ -0,0 +1,3 @@
pub mod controller;
mod service;
mod dtos;

View File

@ -0,0 +1,535 @@
use migration::DbErr;
use sea_orm::{query::*, ActiveModelTrait, ColumnTrait, DbConn, EntityTrait, Set, ModelTrait};
use crate::shared::errors::CustomError;
use crate::shared::responses::CustomResponse;
use entity::todo;
pub async fn find_todos(
conn: &DbConn,
query_string: Option<String>,
items_per_page: Option<usize>,
page_num: Option<usize>,
) -> Result<Vec<todo::Model>, CustomError> {
let mut stmt = todo::Entity::find();
if let Some(query_string) = query_string {
stmt = stmt.filter(todo::Column::Title.contains(query_string.as_str()));
}
let results = stmt
.order_by_desc(todo::Column::UpdatedAt)
.paginate(conn, items_per_page.unwrap_or(10))
.fetch_page(page_num.unwrap_or(0))
.await
.map_err(|_| CustomError::ServerError)?;
Ok(results)
}
pub async fn find_todo_by_id(conn: &DbConn, id: usize) -> Result<todo::Model, CustomError> {
let result = todo::Entity::find_by_id(id as i32)
.one(conn)
.await
.map_err(|_| CustomError::ServerError)?;
if result.is_none() {
return Err(CustomError::NotFound);
}
Ok(result.unwrap())
}
pub async fn insert_todo(
conn: &DbConn,
title: &str,
description: &str,
done: bool,
) -> Result<CustomResponse, CustomError> {
let res = todo::Entity::insert(todo::ActiveModel {
title: Set(title.to_string()),
description: Set(description.to_string()),
done: Set(done),
..Default::default()
})
.exec(conn)
.await
.map_err(|e| {
match e {
DbErr::Query(..) => CustomError::Conflict,
_ => CustomError::ServerError,
}
})?;
Ok(CustomResponse::Created {id: res.last_insert_id as usize})
}
pub async fn update_todo_by_id(
conn: &DbConn,
id: usize,
title: Option<String>,
description: Option<String>,
done: Option<bool>,
) -> Result<CustomResponse, CustomError> {
let todo = todo::Entity::find_by_id(id as i32)
.one(conn)
.await
.map_err(|_| CustomError::ServerError)?;
if todo.is_none() {
return Err(CustomError::NotFound);
}
let mut todo: todo::ActiveModel = todo.unwrap().into();
if let Some(title) = title {
todo.title = Set(title);
}
if let Some(description) = description {
todo.description = Set(description);
}
if let Some(done) = done {
todo.done = Set(done);
}
todo.update(conn)
.await
.map_err(|e| {
println!("Updated error: {:?}", e);
CustomError::ServerError
})?;
Ok(CustomResponse::Updated { id })
}
pub async fn delete_todo_by_id(conn: &DbConn, id: usize) -> Result<CustomResponse, CustomError> {
let found: todo::Model = find_todo_by_id(conn, id).await?;
found.delete(conn).await.map_err(|_| CustomError::ServerError)?;
Ok(CustomResponse::Deleted { id })
}
pub async fn bulk_delete_todos_by_ids(
conn: &DbConn,
ids: Vec<usize>,
) -> Result<CustomResponse, CustomError> {
let txn = conn.begin().await.map_err(|e| {
println!("Transaction error: {:?}", e);
CustomError::ServerError
})?;
for id in ids.clone() {
let found: todo::Model = find_todo_by_id(conn, id).await?;
found.delete(&txn).await.map_err(|_| CustomError::ServerError)?;
}
txn.commit().await.map_err(|e| {
println!("Transaction error: {:?}", e);
CustomError::ServerError
})?;
Ok(CustomResponse::BulkDeleted { ids })
}
#[cfg(test)]
mod tests {
use super::*;
use chrono::{FixedOffset, TimeZone};
use entity::todo;
use sea_orm::{DatabaseBackend, MockDatabase, MockExecResult, Transaction};
#[async_std::test]
async fn test_find_todos() -> Result<(), CustomError> {
let datetime = FixedOffset::east(0).ymd(2016, 11, 08).and_hms(0, 0, 0);
let db = MockDatabase::new(DatabaseBackend::Postgres)
.append_query_results(vec![
// First query result
vec![
todo::Model {
id: 1,
title: "Todo 1".to_owned(),
description: "Todo 1 description".to_owned(),
done: false,
created_at: datetime,
updated_at: datetime,
},
todo::Model {
id: 2,
title: "Todo 2".to_owned(),
description: "Todo 2 description".to_owned(),
done: true,
created_at: datetime,
updated_at: datetime,
},
],
// Second query result
vec![
todo::Model {
id: 1,
title: "Apple pie".to_owned(),
description: "description".to_owned(),
done: false,
created_at: datetime,
updated_at: datetime,
},
todo::Model {
id: 3,
title: "Apple pizza".to_owned(),
description: "description".to_owned(),
done: false,
created_at: datetime,
updated_at: datetime,
},
],
// Third query result
vec![todo::Model {
id: 1,
title: "Apple pie".to_owned(),
description: "description".to_owned(),
done: false,
created_at: datetime,
updated_at: datetime,
}],
])
.into_connection();
// testing find_todos with no query string
assert_eq!(
find_todos(&db, None, None, None).await?,
vec![
todo::Model {
id: 1,
title: "Todo 1".to_owned(),
description: "Todo 1 description".to_owned(),
done: false,
created_at: datetime,
updated_at: datetime,
},
todo::Model {
id: 2,
title: "Todo 2".to_owned(),
description: "Todo 2 description".to_owned(),
done: true,
created_at: datetime,
updated_at: datetime,
}
]
);
// testing find_todos with query string
assert_eq!(
find_todos(&db, Some("Apple".to_owned()), None, None).await?,
vec![
todo::Model {
id: 1,
title: "Apple pie".to_owned(),
description: "description".to_owned(),
done: false,
created_at: datetime,
updated_at: datetime,
},
todo::Model {
id: 3,
title: "Apple pizza".to_owned(),
description: "description".to_owned(),
done: false,
created_at: datetime,
updated_at: datetime,
}
]
);
// testing find_todos with query string and pagination
assert_eq!(
find_todos(&db, Some("Apple".to_owned()), Some(5), Some(1)).await?,
vec![todo::Model {
id: 1,
title: "Apple pie".to_owned(),
description: "description".to_owned(),
done: false,
created_at: datetime,
updated_at: datetime,
}]
);
// Checking transaction log
assert_eq!(
db.into_transaction_log(),
vec![
Transaction::from_sql_and_values(
DatabaseBackend::Postgres,
r#"SELECT "todo"."id", "todo"."title", "todo"."description", "todo"."done", "todo"."created_at", "todo"."updated_at" FROM "todo" ORDER BY "todo"."updated_at" DESC LIMIT $1 OFFSET $2"#,
vec![10u64.into(), 0u64.into()]
),
Transaction::from_sql_and_values(
DatabaseBackend::Postgres,
r#"SELECT "todo"."id", "todo"."title", "todo"."description", "todo"."done", "todo"."created_at", "todo"."updated_at" FROM "todo" WHERE "todo"."title" LIKE $1 ORDER BY "todo"."updated_at" DESC LIMIT $2 OFFSET $3"#,
vec!["%Apple%".into(), 10u64.into(), 0u64.into()]
),
Transaction::from_sql_and_values(
DatabaseBackend::Postgres,
r#"SELECT "todo"."id", "todo"."title", "todo"."description", "todo"."done", "todo"."created_at", "todo"."updated_at" FROM "todo" WHERE "todo"."title" LIKE $1 ORDER BY "todo"."updated_at" DESC LIMIT $2 OFFSET $3"#,
vec!["%Apple%".into(), 5u64.into(), 5u64.into()]
),
]
);
Ok(())
}
#[async_std::test]
async fn test_find_todo_by_id() -> Result<(), CustomError> {
let datetime = FixedOffset::east(0).ymd(2016, 11, 08).and_hms(0, 0, 0);
let db = MockDatabase::new(DatabaseBackend::Postgres)
.append_query_results(vec![
// First query result
vec![todo::Model {
id: 1,
title: "Todo 1".to_owned(),
description: "Todo 1 description".to_owned(),
done: false,
created_at: datetime,
updated_at: datetime,
}],
// Second query result
vec![],
])
.into_connection();
// testing find_todo_by_id with existing id
assert_eq!(
find_todo_by_id(&db, 1).await?,
todo::Model {
id: 1,
title: "Todo 1".to_owned(),
description: "Todo 1 description".to_owned(),
done: false,
created_at: datetime,
updated_at: datetime,
}
);
// testing find_todo_by_id with non-existing id
assert_eq!(
find_todo_by_id(&db, 2).await.unwrap_err(),
CustomError::NotFound
);
Ok(())
}
#[async_std::test]
async fn test_insert_todo() -> Result<(), CustomError> {
let title = "Test Title";
let description = "Test Description";
let done = false;
let datetime = FixedOffset::east(0).ymd(2016, 11, 08).and_hms(0, 0, 0);
let db = MockDatabase::new(DatabaseBackend::Postgres)
.append_query_results(vec![vec![todo::Model {
id: 15,
title: title.to_owned(),
description: description.to_owned(),
done,
created_at: datetime,
updated_at: datetime,
}]])
.append_exec_results(vec![MockExecResult {
last_insert_id: 15,
rows_affected: 1,
}])
.into_connection();
insert_todo(&db, title, description, done).await?;
assert_eq!(
db.into_transaction_log(),
vec![Transaction::from_sql_and_values(
DatabaseBackend::Postgres,
r#"INSERT INTO "todo" ("title", "description", "done") VALUES ($1, $2, $3) RETURNING "id""#,
vec![title.into(), description.into(), done.into()]
)]
);
Ok(())
}
#[async_std::test]
async fn test_update_todo_by_id() -> Result<(), CustomError> {
let id = 1;
let old_title = "Old Title";
let title = "Test Title";
let description = "Test Description";
let done = false;
let datetime = FixedOffset::east(0).ymd(2016, 11, 08).and_hms(0, 0, 0);
let db = MockDatabase::new(DatabaseBackend::Postgres)
.append_query_results(vec![
// First query result
vec![todo::Model {
id,
title: old_title.to_owned(),
description: description.to_owned(),
done,
created_at: datetime,
updated_at: datetime,
}],
// Second query result
vec![todo::Model {
id,
title: old_title.to_owned(),
description: description.to_owned(),
done,
created_at: datetime,
updated_at: datetime,
}],
// Third query result
vec![],
])
.append_exec_results(vec![MockExecResult {
last_insert_id: 15,
rows_affected: 1,
}])
.into_connection();
// testing update_todo_by_id with existing id
update_todo_by_id(
&db,
id.try_into().unwrap(),
Some(title.to_owned()),
Some(description.to_owned()),
Some(done),
)
.await?;
// testing update_todo_by_id with non-existing id
assert_eq!(
update_todo_by_id(
&db,
2,
Some(title.to_owned()),
Some(description.to_owned()),
Some(done)
)
.await
.unwrap_err(),
CustomError::NotFound
);
assert_eq!(
db.into_transaction_log()[..2],
vec![
Transaction::from_sql_and_values(
DatabaseBackend::Postgres,
r#"SELECT "todo"."id", "todo"."title", "todo"."description", "todo"."done", "todo"."created_at", "todo"."updated_at" FROM "todo" WHERE "todo"."id" = $1 LIMIT $2"#,
vec![id.into(), 1u64.into()]
),
Transaction::from_sql_and_values(
DatabaseBackend::Postgres,
r#"UPDATE "todo" SET "title" = $1, "description" = $2, "done" = $3 WHERE "todo"."id" = $4 RETURNING "id", "title", "description", "done", "created_at", "updated_at""#,
vec![title.into(), description.into(), done.into(), id.into()]
),
]
);
Ok(())
}
#[async_std::test]
async fn test_delete_todo_by_id() -> Result<(), CustomError> {
let id = 1;
let datetime = FixedOffset::east(0).ymd(2016, 11, 08).and_hms(0, 0, 0);
let db = MockDatabase::new(DatabaseBackend::Postgres)
.append_query_results(vec![
vec![todo::Model {
id,
title: "Todo 1".to_owned(),
description: "Todo 1 description".to_owned(),
done: false,
created_at: datetime,
updated_at: datetime,
}],
vec![],
])
.append_exec_results(vec![MockExecResult {
last_insert_id: 1,
rows_affected: 1,
}])
.append_exec_results(vec![])
.into_connection();
// testing delete_todo_by_id with existing id
delete_todo_by_id(&db, id.try_into().unwrap()).await?;
// testing delete_todo_by_id with non-existing id
assert_eq!(
delete_todo_by_id(&db, 2).await.unwrap_err(),
CustomError::NotFound
);
assert_eq!(
db.into_transaction_log()[1..2],
vec![Transaction::from_sql_and_values(
DatabaseBackend::Postgres,
r#"DELETE FROM "todo" WHERE "todo"."id" = $1"#,
vec![id.into()]
)]
);
Ok(())
}
#[async_std::test]
async fn test_bulk_delete_todos() -> Result<(), CustomError> {
let datetime = FixedOffset::east(0).ymd(2016, 11, 08).and_hms(0, 0, 0);
let db = MockDatabase::new(DatabaseBackend::Postgres)
.append_query_results(vec![
// First query result
vec![todo::Model {
id: 1,
title: "test1".to_owned(),
description: "test1".to_owned(),
done: false,
created_at: datetime,
updated_at: datetime,
}],
// Second query result
vec![todo::Model {
id: 2,
title: "test2".to_owned(),
description: "test2".to_owned(),
done: false,
created_at: datetime,
updated_at: datetime,
}],
// Third query result
vec![todo::Model {
id: 3,
title: "test3".to_owned(),
description: "test3".to_owned(),
done: false,
created_at: datetime,
updated_at: datetime,
}],
])
.append_exec_results(vec![MockExecResult {
last_insert_id: 1,
rows_affected: 1,
}])
.append_exec_results(vec![MockExecResult {
last_insert_id: 2,
rows_affected: 2,
}])
.append_exec_results(vec![MockExecResult {
last_insert_id: 3,
rows_affected: 3,
}])
.into_connection();
bulk_delete_todos_by_ids(&db, vec![1, 2, 3]).await?;
// skipped assertion as there is no non-trivial way to test a transaction with many statements
Ok(())
}
}

View File

@ -0,0 +1,86 @@
use actix_web::{error::ResponseError, http::StatusCode, HttpResponse};
use derive_more::{Display, Error};
use serde::Serialize;
use validator::ValidationErrors;
#[derive(Serialize)]
struct FormattedErrorResponse {
status_code: u16,
error: String,
message: String,
}
#[derive(Serialize)]
struct FormattedValidationErrorResponse {
status_code: u16,
error: String,
message: ValidationErrors,
}
#[derive(Debug, Display, Error, PartialEq)]
pub enum CustomError {
#[display(fmt = "Validation error")]
ValidationError { e: ValidationErrors },
#[display(fmt = "Internal server error. Please try again later.")]
ServerError,
#[display(fmt = "Bad request")]
BadRequestWithMsg { message: String },
#[display(fmt = "Bad request")]
BadRequest,
#[display(fmt = "Not found")]
NotFoundWithMsg { message: String },
#[display(fmt = "Not found")]
NotFound,
#[display(fmt = "Conflict")]
Conflict,
#[display(fmt = "Unauthorized")]
Unauthorized,
}
impl CustomError {
fn message(&self) -> String {
match self {
CustomError::ServerError => "Internal Server Error".to_owned(),
CustomError::BadRequest => "Bad Request".to_owned(),
CustomError::BadRequestWithMsg { .. } => "Bad Request".to_owned(),
CustomError::NotFound => "Not Found".to_owned(),
CustomError::NotFoundWithMsg { .. } => "Not Found".to_owned(),
CustomError::Conflict => "Conflict".to_owned(),
CustomError::Unauthorized => "Unauthorized".to_owned(),
_ => "".to_owned(),
}
}
}
impl ResponseError for CustomError {
fn status_code(&self) -> StatusCode {
match *self {
CustomError::ServerError => StatusCode::INTERNAL_SERVER_ERROR,
CustomError::ValidationError { .. } => StatusCode::BAD_REQUEST,
CustomError::BadRequest => StatusCode::BAD_REQUEST,
CustomError::BadRequestWithMsg { .. } => StatusCode::BAD_REQUEST,
CustomError::NotFound => StatusCode::NOT_FOUND,
CustomError::NotFoundWithMsg { .. } => StatusCode::NOT_FOUND,
CustomError::Conflict => StatusCode::CONFLICT,
CustomError::Unauthorized => StatusCode::UNAUTHORIZED,
}
}
fn error_response(&self) -> HttpResponse {
if let CustomError::ValidationError { e } = &self {
let response = FormattedValidationErrorResponse {
status_code: self.status_code().as_u16(),
error: self.message(),
message: e.to_owned(),
};
HttpResponse::build(self.status_code()).json(response)
} else {
let response = FormattedErrorResponse {
status_code: self.status_code().as_u16(),
error: self.message(),
message: self.to_string(),
};
HttpResponse::build(self.status_code()).json(response)
}
}
}

View File

@ -0,0 +1,2 @@
pub mod errors;
pub mod responses;

View File

@ -0,0 +1,65 @@
use actix_web::{HttpResponse, Responder, HttpRequest, body::BoxBody, http::StatusCode};
use derive_more::Display;
use serde::Serialize;
#[derive(Serialize)]
struct FormattedResponseWithId {
message: String,
id: usize,
}
#[derive(Serialize)]
struct FormattedResponseWithIds {
message: String,
ids: Vec<usize>,
}
#[derive(Debug, Display, PartialEq)]
pub enum CustomResponse {
#[display(fmt = "Created")]
Created { id: usize },
#[display(fmt = "Updated")]
Updated { id: usize },
#[display(fmt = "Deleted")]
Deleted { id: usize },
#[display(fmt = "Bulk Deleted")]
BulkDeleted { ids: Vec<usize> },
}
impl CustomResponse {
fn id(&self) -> Option<usize> {
match self {
CustomResponse::Created { id } => Some(*id),
CustomResponse::Updated { id } => Some(*id),
CustomResponse::Deleted { id } => Some(*id),
CustomResponse::BulkDeleted { .. } => None,
}
}
fn status_code (&self) -> StatusCode {
match self {
CustomResponse::Created { .. } => StatusCode::CREATED,
CustomResponse::Updated { .. } => StatusCode::OK,
CustomResponse::Deleted { .. } => StatusCode::OK,
CustomResponse::BulkDeleted { .. } => StatusCode::OK,
}
}
}
impl Responder for CustomResponse {
type Body = BoxBody;
fn respond_to(self, _: &HttpRequest) -> HttpResponse {
if let CustomResponse::BulkDeleted { ids } = &self {
let response = FormattedResponseWithIds {
message: self.to_string(),
ids: ids.to_owned()
};
HttpResponse::build(self.status_code()).json(response)
} else {
let response = FormattedResponseWithId {
message: self.to_string(),
id: self.id().unwrap(),
};
HttpResponse::build(self.status_code()).json(response)
}
}
}

13
frontend/.eslintignore Normal file
View File

@ -0,0 +1,13 @@
.DS_Store
node_modules
/build
/.svelte-kit
/package
.env
.env.*
!.env.example
# Ignore files for PNPM, NPM and YARN
pnpm-lock.yaml
package-lock.json
yarn.lock

30
frontend/.eslintrc.cjs Normal file
View File

@ -0,0 +1,30 @@
module.exports = {
root: true,
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
'plugin:svelte/recommended',
'prettier'
],
parser: '@typescript-eslint/parser',
plugins: ['@typescript-eslint'],
parserOptions: {
sourceType: 'module',
ecmaVersion: 2020,
extraFileExtensions: ['.svelte']
},
env: {
browser: true,
es2017: true,
node: true
},
overrides: [
{
files: ['*.svelte'],
parser: 'svelte-eslint-parser',
parserOptions: {
parser: '@typescript-eslint/parser'
}
}
]
};

10
frontend/.gitignore vendored Normal file
View File

@ -0,0 +1,10 @@
.DS_Store
node_modules
/build
/.svelte-kit
/package
.env
.env.*
!.env.example
vite.config.js.timestamp-*
vite.config.ts.timestamp-*

2
frontend/.npmrc Normal file
View File

@ -0,0 +1,2 @@
engine-strict=true
resolution-mode=highest

13
frontend/.prettierignore Normal file
View File

@ -0,0 +1,13 @@
.DS_Store
node_modules
/build
/.svelte-kit
/package
.env
.env.*
!.env.example
# Ignore files for PNPM, NPM and YARN
pnpm-lock.yaml
package-lock.json
yarn.lock

9
frontend/.prettierrc Normal file
View File

@ -0,0 +1,9 @@
{
"useTabs": true,
"singleQuote": true,
"trailingComma": "none",
"printWidth": 100,
"plugins": ["prettier-plugin-svelte"],
"pluginSearchDirs": ["."],
"overrides": [{ "files": "*.svelte", "options": { "parser": "svelte" } }]
}

38
frontend/README.md Normal file
View File

@ -0,0 +1,38 @@
# create-svelte
Everything you need to build a Svelte project, powered by [`create-svelte`](https://github.com/sveltejs/kit/tree/master/packages/create-svelte).
## Creating a project
If you're seeing this, you've probably already done this step. Congrats!
```bash
# create a new project in the current directory
npm create svelte@latest
# create a new project in my-app
npm create svelte@latest my-app
```
## Developing
Once you've created a project and installed dependencies with `npm install` (or `pnpm install` or `yarn`), start a development server:
```bash
npm run dev
# or start the server and open the app in a new browser tab
npm run dev -- --open
```
## Building
To create a production version of your app:
```bash
npm run build
```
You can preview the production build with `npm run preview`.
> To deploy your app, you may need to install an [adapter](https://kit.svelte.dev/docs/adapters) for your target environment.

4958
frontend/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

35
frontend/package.json Normal file
View File

@ -0,0 +1,35 @@
{
"name": "frontend",
"version": "0.0.1",
"private": true,
"scripts": {
"dev": "vite dev --strictPort --port 5173",
"build": "vite build",
"preview": "vite preview --strictPort --port 5173",
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
"lint": "prettier --plugin-search-dir . --check . && eslint .",
"format": "prettier --plugin-search-dir . --write ."
},
"devDependencies": {
"@sveltejs/adapter-auto": "^2.0.0",
"@sveltejs/adapter-static": "^2.0.2",
"@sveltejs/kit": "^1.5.0",
"@typescript-eslint/eslint-plugin": "^5.45.0",
"@typescript-eslint/parser": "^5.45.0",
"eslint": "^8.28.0",
"eslint-config-prettier": "^8.5.0",
"eslint-plugin-svelte": "^2.26.0",
"prettier": "^2.8.0",
"prettier-plugin-svelte": "^2.8.1",
"svelte": "^3.54.0",
"svelte-check": "^3.0.1",
"tslib": "^2.4.1",
"typescript": "^5.0.0",
"vite": "^4.3.0"
},
"type": "module",
"dependencies": {
"webrtc-adapter": "^8.2.2"
}
}

13
frontend/src/app.d.ts vendored Normal file
View File

@ -0,0 +1,13 @@
// See https://kit.svelte.dev/docs/types#app
// for information about these interfaces
declare global {
namespace App {
// interface Error {}
// interface Locals {}
// interface PageData {}
// interface Platform {}
}
}
export { };

12
frontend/src/app.html Normal file
View File

@ -0,0 +1,12 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="icon" href="%sveltekit.assets%/favicon.png" />
<meta name="viewport" content="width=device-width" />
%sveltekit.head%
</head>
<body data-sveltekit-preload-data="hover">
<div style="display: contents">%sveltekit.body%</div>
</body>
</html>

View File

@ -0,0 +1,2 @@
export const prerender = true
export const ssr = false;

View File

@ -0,0 +1,204 @@
<script lang="ts">
import adapter from 'webrtc-adapter';
import * as media from './media';
let media_devices = media.devices;
let available_devices = media.availableDevices;
let video: HTMLMediaElement;
let audio: HTMLMediaElement;
let public_ip: string | null = '';
let remote_offer = '';
let local_offer = '';
$: getUserMediaOptions = {
audio: $available_devices.audioinput,
video: $available_devices.videoinput
};
media.initialize().then(() => console.log('options:', getUserMediaOptions));
async function getUserMedia(): Promise<[RTCPeerConnection, Promise<RTCIceCandidate[]>]> {
const supportedConstraints = navigator.mediaDevices.getSupportedConstraints();
console.log(supportedConstraints);
// This obviously shouldn't be here and only is for testing
async function sign_username(secret: string, user: string): Promise<string> {
// encoder to convert string to Uint8Array
let enc = new TextEncoder('utf-8');
let key = await window.crypto.subtle.importKey(
'raw', // raw format of the key - should be Uint8Array
enc.encode(secret),
{
// algorithm details
name: 'HMAC',
hash: { name: 'SHA-1' }
},
false, // export = false
['sign', 'verify'] // what this key can do
);
let signature = await window.crypto.subtle.sign('HMAC', key, enc.encode(user));
function _arrayBufferToBase64(buffer: ArrayBuffer) {
var binary = '';
var bytes = new Uint8Array(buffer);
var len = bytes.byteLength;
for (let i = 0; i < len; i++) binary += String.fromCharCode(bytes[i]);
return window.btoa(binary);
}
return _arrayBufferToBase64(signature);
}
let username = `${+new Date() + 60 * 1000}:test`;
let credential = await sign_username('secret', username);
console.log(credential);
const iceServers = [
// Test some TURN server
{
urls: ['turn:turn.h.glsys.de:3478'],
username,
credential
}
// Test some STUN server
// { urls: ['stun:turn.h.glsys.de:3478'] },
];
let pc = new RTCPeerConnection({ iceServers });
let candidates: RTCIceCandidate[] = [];
pc.addEventListener('icecandidate', (e) => {
if (!e.candidate) return;
console.log(e.candidate.candidate);
candidates.push(e.candidate);
// If a srflx candidate was found, notify that the STUN server works!
if (e.candidate.type == 'srflx') {
console.log('The STUN server is reachable!');
console.log(` Your Public IP Address is: ${e.candidate.address}`);
public_ip = e.candidate.address;
}
// If a relay candidate was found, notify that the TURN server works!
if (e.candidate.type == 'relay') {
console.log('The TURN server is reachable !');
}
});
let resolve: (candidates: RTCIceCandidate[]) => void, reject: (value: unknown) => void;
let promise: Promise<RTCIceCandidate[]> = new Promise((res, rej) => {
resolve = res;
reject = rej;
});
pc.addEventListener('icegatheringstatechange', (e) => {
console.log(e);
if (e.target.iceGatheringState == 'complete') resolve(candidates);
});
pc.addEventListener('icecandidateerror', (e) => {
console.error(e);
reject(e);
});
pc.ontrack = (e) => {
audio.srcObject = e.streams[0];
};
console.log('options:', getUserMediaOptions);
let stream = await navigator.mediaDevices.getUserMedia(getUserMediaOptions);
pc.addTrack(stream.getAudioTracks()[0]);
return [pc, promise];
let stream2 = await navigator.mediaDevices.getDisplayMedia({ audio: true });
await media.update();
console.log(stream);
video.srcObject = stream;
await video.play();
console.log(stream.getAudioTracks());
console.log(stream.getVideoTracks());
}
let pc: RTCPeerConnection;
let candidates: Promise<RTCIceCandidate[]>;
async function accept() {
[pc, candidates] = await getUserMedia();
pc.setRemoteDescription(JSON.parse(remote_offer));
let answer = await pc.createAnswer({
offerToReceiveAudio: true,
offerToReceiveVideo: false
});
console.log(answer);
await pc.setLocalDescription(answer);
for (let candidate of await candidates) pc.addIceCandidate(candidate);
local_offer = JSON.stringify(answer);
}
async function offer() {
[pc, candidates] = await getUserMedia();
let offer = await pc.createOffer({
offerToReceiveAudio: true,
offerToReceiveVideo: false
});
console.log(offer);
await pc.setLocalDescription(offer);
local_offer = JSON.stringify(offer);
}
async function establish() {
pc.setRemoteDescription(JSON.parse(remote_offer));
for (let candidate of await candidates) pc.addIceCandidate(candidate);
}
</script>
<h1>Welcome</h1>
<p>Your public ip is: {public_ip || 'unknown'}</p>
<video bind:this={video} />
<audio bind:this={audio} />
<p>See also: <a href="/a">a</a></p>
<h1>remote offer:</h1>
<textarea bind:value={remote_offer} />
<button on:click={accept}>accept</button>
<button on:click={offer}>offer</button>
<button on:click={establish}>establish</button>
<h1>local offer:</h1>
<textarea value={local_offer} />
<ul>
{#each $media_devices as device}
<li>{device.kind} | <code>{device.deviceId}</code> | {device.label}</li>
{/each}
</ul>
<div>{JSON.stringify($available_devices)}</div>

View File

@ -0,0 +1 @@
<h1>A</h1>

View File

@ -0,0 +1,37 @@
import { writable, type Writable, derived, type Readable } from 'svelte/store';
export const devices: Writable<MediaDeviceInfo[]> = writable([]);
export const availableDevices: Readable<{
audioinput: boolean,
audiooutput: boolean,
videoinput: boolean,
}> = derived(devices, $devices => {
let available: { [key in MediaDeviceKind]: boolean } = {
audioinput: false,
audiooutput: false,
videoinput: false,
};
for (const device of $devices) {
if (available.hasOwnProperty(device.kind)) available[device.kind] = true;
}
return available;
})
export async function initialize() {
await update();
addEventListener("devicechange", event => {
console.log(event);
update();
});
}
export async function update() {
devices.set(await navigator.mediaDevices.enumerateDevices());
}

BIN
frontend/static/favicon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

28
frontend/svelte.config.js Normal file
View File

@ -0,0 +1,28 @@
// import adapter from '@sveltejs/adapter-auto';
import adapter from '@sveltejs/adapter-static';
import { vitePreprocess } from '@sveltejs/kit/vite';
/** @type {import('@sveltejs/kit').Config} */
const config = {
// Consult https://kit.svelte.dev/docs/integrations#preprocessors
// for more information about preprocessors
preprocess: vitePreprocess(),
kit: {
// adapter-auto only supports some environments, see https://kit.svelte.dev/docs/adapter-auto for a list.
// If your environment is not supported or you settled on a specific environment, switch out the adapter.
// See https://kit.svelte.dev/docs/adapters for more information about adapters.
// adapter: adapter()
adapter: adapter({
// default options are shown. On some platforms
// these options are set automatically — see below
pages: 'build',
assets: 'build',
fallback: null,
precompress: false,
strict: true
}),
}
};
export default config;

18
frontend/tsconfig.json Normal file
View File

@ -0,0 +1,18 @@
{
"extends": "./.svelte-kit/tsconfig.json",
"compilerOptions": {
"allowJs": true,
"checkJs": true,
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true,
"skipLibCheck": true,
"sourceMap": true,
"strict": true,
"strictNullChecks": true
}
// Path aliases are handled by https://kit.svelte.dev/docs/configuration#alias
//
// If you want to overwrite includes/excludes, make sure to copy over the relevant includes/excludes
// from the referenced tsconfig.json - TypeScript does not merge them in
}

6
frontend/vite.config.ts Normal file
View File

@ -0,0 +1,6 @@
import { sveltekit } from '@sveltejs/kit/vite';
import { defineConfig } from 'vite';
export default defineConfig({
plugins: [sveltekit()]
});