Rename main folders and write sql backend adaptor

This commit is contained in:
Ben Grant 2023-05-11 17:04:17 +10:00
parent 1d34f8e06d
commit fdc58b428b
212 changed files with 3577 additions and 4775 deletions

View file

@ -3,7 +3,7 @@ name: Deploy Backend
on:
push:
branches: ['main']
paths: ['crabfit-backend/**']
paths: ['backend/**']
jobs:
deploy:
@ -11,7 +11,7 @@ jobs:
defaults:
run:
working-directory: crabfit-backend
working-directory: backend
permissions:
contents: read
@ -33,5 +33,5 @@ jobs:
- id: deploy
uses: google-github-actions/deploy-appengine@v0
with:
working_directory: crabfit-backend
working_directory: backend
version: v1

View file

@ -3,7 +3,7 @@ name: Deploy Frontend
on:
push:
branches: ['main']
paths: ['crabfit-frontend/**']
paths: ['frontend/**']
jobs:
deploy:
@ -11,7 +11,7 @@ jobs:
defaults:
run:
working-directory: crabfit-frontend
working-directory: frontend
permissions:
contents: read
@ -33,5 +33,5 @@ jobs:
- id: deploy
uses: google-github-actions/deploy-appengine@v0
with:
working_directory: crabfit-frontend
working_directory: frontend
version: v1

3
.gitignore vendored
View file

@ -1,3 +1,4 @@
/graphics
.DS_Store
/crabfit-browser-extension/*.zip
/browser-extension/*.zip
js-backend

2
backend/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
target
.env

3014
backend/Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

15
backend/Cargo.toml Normal file
View file

@ -0,0 +1,15 @@
[package]
name = "crabfit_backend"
version = "1.1.0"
edition = "2021"
[workspace]
members = ["data", "adaptors/*"]
[dependencies]
axum = "0.6.18"
serde = { version = "1.0.162", features = ["derive"] }
tokio = { version = "1.28.0", features = ["macros", "rt-multi-thread"] }
data = { path = "data" }
sql-adaptor = { path = "adaptors/sql" }
dotenv = "0.15.0"

View file

@ -0,0 +1,14 @@
[package]
name = "sql-adaptor"
version = "0.1.0"
edition = "2021"
[dependencies]
async-trait = "0.1.68"
data = { path = "../../data" }
sea-orm = { version = "0.11.3", features = [ "macros", "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", "runtime-tokio-native-tls" ] }
serde = { version = "1.0.162", features = [ "derive" ] }
async-std = { version = "1", features = ["attributes", "tokio1"] }
sea-orm-migration = "0.11.0"
serde_json = "1.0.96"
chrono = "0.4.24"

View file

@ -0,0 +1,42 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.3
use chrono::{DateTime as ChronoDateTime, Utc};
use data::event::Event;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
#[sea_orm(table_name = "event")]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
pub name: String,
pub created_at: DateTime,
pub times: Json,
pub timezone: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::person::Entity")]
Person,
}
impl Related<super::person::Entity> for Entity {
fn to() -> RelationDef {
Relation::Person.def()
}
}
impl ActiveModelBehavior for ActiveModel {}
impl From<Model> for Event {
fn from(value: Model) -> Self {
Self {
id: value.id,
name: value.name,
created_at: ChronoDateTime::<Utc>::from_utc(value.created_at, Utc),
times: serde_json::from_value(value.times).unwrap_or(vec![]),
timezone: value.timezone,
}
}
}

View file

@ -0,0 +1,7 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.3
pub mod prelude;
pub mod event;
pub mod person;
pub mod stats;

View file

@ -0,0 +1,48 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.3
use chrono::{DateTime as ChronoDateTime, Utc};
use data::person::Person;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
#[sea_orm(table_name = "person")]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub name: String,
pub password_hash: Option<String>,
pub created_at: DateTime,
pub availability: Json,
#[sea_orm(primary_key, auto_increment = false)]
pub event_id: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::event::Entity",
from = "Column::EventId",
to = "super::event::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
Event,
}
impl Related<super::event::Entity> for Entity {
fn to() -> RelationDef {
Relation::Event.def()
}
}
impl ActiveModelBehavior for ActiveModel {}
impl From<Model> for Person {
fn from(value: Model) -> Self {
Self {
name: value.name,
password_hash: value.password_hash,
created_at: ChronoDateTime::<Utc>::from_utc(value.created_at, Utc),
availability: serde_json::from_value(value.availability).unwrap_or(vec![]),
}
}
}

View file

@ -0,0 +1,5 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.3
pub use super::event::Entity as Event;
pub use super::person::Entity as Person;
pub use super::stats::Entity as Stats;

View file

@ -0,0 +1,27 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.3
use data::stats::Stats;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
#[sea_orm(table_name = "stats")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i32,
pub event_count: i32,
pub person_count: i32,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}
impl From<Model> for Stats {
fn from(value: Model) -> Self {
Self {
event_count: value.event_count,
person_count: value.person_count,
}
}
}

View file

@ -0,0 +1,152 @@
use std::{env, error::Error};
use async_trait::async_trait;
use data::{
adaptor::Adaptor,
event::{Event, EventDeletion},
person::Person,
stats::Stats,
};
use entity::{event, person, stats};
use migration::{Migrator, MigratorTrait};
use sea_orm::{
ActiveModelTrait,
ActiveValue::{NotSet, Set},
ColumnTrait, Database, DatabaseConnection, DbErr, EntityTrait, ModelTrait, QueryFilter,
TransactionTrait, TryIntoModel,
};
use serde_json::json;
mod entity;
mod migration;
pub struct PostgresAdaptor {
db: DatabaseConnection,
}
#[async_trait]
impl Adaptor for PostgresAdaptor {
async fn new() -> Self {
let connection_string = env::var("DATABASE_URL").unwrap();
// Connect to the database
let db = Database::connect(&connection_string).await.unwrap();
println!("Connected to database at {}", connection_string);
// Setup tables
Migrator::up(&db, None).await.unwrap();
Self { db }
}
async fn get_stats(&self) -> Result<Stats, Box<dyn Error>> {
Ok(get_stats_row(&self.db).await?.try_into_model()?.into())
}
async fn increment_stat_event_count(&self) -> Result<i32, Box<dyn Error>> {
let mut current_stats = get_stats_row(&self.db).await?;
current_stats.event_count = Set(current_stats.event_count.unwrap() + 1);
Ok(current_stats.save(&self.db).await?.event_count.unwrap())
}
async fn increment_stat_person_count(&self) -> Result<i32, Box<dyn Error>> {
let mut current_stats = get_stats_row(&self.db).await?;
current_stats.person_count = Set(current_stats.person_count.unwrap() + 1);
Ok(current_stats.save(&self.db).await?.person_count.unwrap())
}
async fn get_people(&self, event_id: String) -> Result<Option<Vec<Person>>, Box<dyn Error>> {
// TODO: optimize into one query
let event_row = event::Entity::find_by_id(event_id).one(&self.db).await?;
Ok(match event_row {
Some(event) => Some(
event
.find_related(person::Entity)
.all(&self.db)
.await?
.into_iter()
.map(|model| model.into())
.collect(),
),
None => None,
})
}
async fn upsert_person(
&self,
event_id: String,
person: Person,
) -> Result<Person, Box<dyn Error>> {
Ok(person::ActiveModel {
name: Set(person.name),
password_hash: Set(person.password_hash),
created_at: Set(person.created_at.naive_utc()),
availability: Set(serde_json::to_value(person.availability).unwrap_or(json!([]))),
event_id: Set(event_id),
}
.save(&self.db)
.await?
.try_into_model()?
.into())
}
async fn get_event(&self, id: String) -> Result<Option<Event>, Box<dyn Error>> {
Ok(event::Entity::find_by_id(id)
.one(&self.db)
.await?
.map(|model| model.into()))
}
async fn create_event(&self, event: Event) -> Result<Event, Box<dyn Error>> {
Ok(event::ActiveModel {
id: Set(event.id),
name: Set(event.name),
created_at: Set(event.created_at.naive_utc()),
times: Set(serde_json::to_value(event.times).unwrap_or(json!([]))),
timezone: Set(event.timezone),
}
.save(&self.db)
.await?
.try_into_model()?
.into())
}
async fn delete_event(&self, id: String) -> Result<EventDeletion, Box<dyn Error>> {
let event_id = id.clone();
let person_count = self
.db
.transaction::<_, u64, DbErr>(|t| {
Box::pin(async move {
// Delete people
let people_delete_result = person::Entity::delete_many()
.filter(person::Column::EventId.eq(&event_id))
.exec(t)
.await?;
// Delete event
event::Entity::delete_by_id(event_id).exec(t).await?;
Ok(people_delete_result.rows_affected)
})
})
.await?;
Ok(EventDeletion { id, person_count })
}
}
// Get the current stats as an ActiveModel
async fn get_stats_row(db: &DatabaseConnection) -> Result<stats::ActiveModel, DbErr> {
let current_stats = stats::Entity::find().one(db).await?;
Ok(match current_stats {
Some(model) => model.into(),
None => stats::ActiveModel {
id: NotSet,
event_count: Set(0),
person_count: Set(0),
},
})
}

View file

@ -0,0 +1,12 @@
pub use sea_orm_migration::prelude::*;
mod setup_tables;
pub struct Migrator;
#[async_trait::async_trait]
impl MigratorTrait for Migrator {
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
vec![Box::new(setup_tables::Migration)]
}
}

View file

@ -0,0 +1,120 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
print!("Setting up database...");
// Stats table
manager
.create_table(
Table::create()
.table(Stats::Table)
.if_not_exists()
.col(
ColumnDef::new(Stats::Id)
.integer()
.not_null()
.auto_increment()
.primary_key(),
)
.col(ColumnDef::new(Stats::EventCount).integer().not_null())
.col(ColumnDef::new(Stats::PersonCount).integer().not_null())
.to_owned(),
)
.await?;
// Events table
manager
.create_table(
Table::create()
.table(Event::Table)
.if_not_exists()
.col(ColumnDef::new(Event::Id).string().not_null().primary_key())
.col(ColumnDef::new(Event::Name).string().not_null())
.col(ColumnDef::new(Event::CreatedAt).timestamp().not_null())
.col(ColumnDef::new(Event::Times).json().not_null())
.col(ColumnDef::new(Event::Timezone).string().not_null())
.to_owned(),
)
.await?;
// People table
manager
.create_table(
Table::create()
.table(Person::Table)
.if_not_exists()
.col(ColumnDef::new(Person::Name).string().not_null())
.col(ColumnDef::new(Person::PasswordHash).string())
.col(ColumnDef::new(Person::CreatedAt).timestamp().not_null())
.col(ColumnDef::new(Person::Availability).json().not_null())
.col(ColumnDef::new(Person::EventId).string().not_null())
.primary_key(Index::create().col(Person::EventId).col(Person::Name))
.to_owned(),
)
.await?;
// Relation
manager
.create_foreign_key(
ForeignKey::create()
.name("FK_person_event")
.from(Person::Table, Person::EventId)
.to(Event::Table, Event::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Cascade)
.to_owned(),
)
.await?;
println!(" done");
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(Stats::Table).to_owned())
.await?;
manager
.drop_table(Table::drop().table(Person::Table).to_owned())
.await?;
manager
.drop_table(Table::drop().table(Event::Table).to_owned())
.await?;
Ok(())
}
}
/// Learn more at https://docs.rs/sea-query#iden
#[derive(Iden)]
enum Stats {
Table,
Id,
EventCount,
PersonCount,
}
#[derive(Iden)]
enum Event {
Table,
Id,
Name,
CreatedAt,
Times,
Timezone,
}
#[derive(Iden)]
enum Person {
Table,
Name,
PasswordHash,
CreatedAt,
Availability,
EventId,
}

9
backend/data/Cargo.toml Normal file
View file

@ -0,0 +1,9 @@
[package]
name = "data"
description = "Structs and traits for the data storage and transfer of Crab Fit"
version = "0.1.0"
edition = "2021"
[dependencies]
async-trait = "0.1.68"
chrono = "0.4.24"

View file

@ -0,0 +1,37 @@
use std::error::Error;
use async_trait::async_trait;
use crate::{
event::{Event, EventDeletion},
person::Person,
stats::Stats,
};
/// Data storage adaptor, all methods on an adaptor can return an error if
/// something goes wrong, or potentially None if the data requested was not found.
#[async_trait]
pub trait Adaptor {
/// Creates a new adaptor and performs all setup required
///
/// # Panics
/// If an error occurs while setting up the adaptor
async fn new() -> Self;
async fn get_stats(&self) -> Result<Stats, Box<dyn Error>>;
async fn increment_stat_event_count(&self) -> Result<i32, Box<dyn Error>>;
async fn increment_stat_person_count(&self) -> Result<i32, Box<dyn Error>>;
async fn get_people(&self, event_id: String) -> Result<Option<Vec<Person>>, Box<dyn Error>>;
async fn upsert_person(
&self,
event_id: String,
person: Person,
) -> Result<Person, Box<dyn Error>>;
async fn get_event(&self, id: String) -> Result<Option<Event>, Box<dyn Error>>;
async fn create_event(&self, event: Event) -> Result<Event, Box<dyn Error>>;
/// Delete an event as well as all related people
async fn delete_event(&self, id: String) -> Result<EventDeletion, Box<dyn Error>>;
}

16
backend/data/src/event.rs Normal file
View file

@ -0,0 +1,16 @@
use chrono::{DateTime, Utc};
pub struct Event {
pub id: String,
pub name: String,
pub created_at: DateTime<Utc>,
pub times: Vec<String>,
pub timezone: String,
}
/// Info about a deleted event
pub struct EventDeletion {
pub id: String,
/// The amount of people that were in this event that were also deleted
pub person_count: u64,
}

4
backend/data/src/lib.rs Normal file
View file

@ -0,0 +1,4 @@
pub mod adaptor;
pub mod event;
pub mod person;
pub mod stats;

View file

@ -0,0 +1,8 @@
use chrono::{DateTime, Utc};
pub struct Person {
pub name: String,
pub password_hash: Option<String>,
pub created_at: DateTime<Utc>,
pub availability: Vec<String>,
}

View file

@ -0,0 +1,4 @@
pub struct Stats {
pub event_count: i32,
pub person_count: i32,
}

33
backend/src/main.rs Normal file
View file

@ -0,0 +1,33 @@
use std::net::SocketAddr;
use axum::{routing::get, Router, Server};
use data::adaptor::Adaptor;
use sql_adaptor::PostgresAdaptor;
#[cfg(debug_assertions)]
const MODE: &str = "debug";
#[cfg(not(debug_assertions))]
const MODE: &str = "release";
#[tokio::main]
async fn main() {
// Load env
dotenv::dotenv().ok();
PostgresAdaptor::new().await;
let app = Router::new().route("/", get(get_root));
let addr = SocketAddr::from(([127, 0, 0, 1], 3000));
println!("Crab Fit API listening at http://{} in {} mode", addr, MODE);
Server::bind(&addr)
.serve(app.into_make_service())
.await
.unwrap();
}
async fn get_root() -> String {
format!("Crab Fit API v{}", env!("CARGO_PKG_VERSION"))
}

View file

Before

Width:  |  Height:  |  Size: 3.3 KiB

After

Width:  |  Height:  |  Size: 3.3 KiB

View file

Before

Width:  |  Height:  |  Size: 416 B

After

Width:  |  Height:  |  Size: 416 B

View file

Before

Width:  |  Height:  |  Size: 841 B

After

Width:  |  Height:  |  Size: 841 B

View file

Before

Width:  |  Height:  |  Size: 1.3 KiB

After

Width:  |  Height:  |  Size: 1.3 KiB

View file

@ -1,50 +0,0 @@
module.exports = {
'env': {
'es2021': true,
'node': true
},
'extends': 'eslint:recommended',
'overrides': [
],
'parserOptions': {
'ecmaVersion': 'latest',
'sourceType': 'module'
},
'rules': {
'indent': [
'error',
2
],
'linebreak-style': [
'error',
'unix'
],
'quotes': [
'error',
'single'
],
'semi': [
'error',
'never'
],
'eqeqeq': 2,
'no-return-await': 1,
'no-var': 2,
'prefer-const': 1,
'yoda': 2,
'no-trailing-spaces': 1,
'eol-last': [1, 'always'],
'no-unused-vars': [
1,
{
'args': 'all',
'argsIgnorePattern': '^_',
'ignoreRestSiblings': true
},
],
'arrow-parens': [
'error',
'as-needed'
],
}
}

View file

@ -1,12 +0,0 @@
.gcloudignore
.git
.gitignore
.env
node_modules/
.parcel-cache
res
routes
swagger.yaml

View file

@ -1,8 +0,0 @@
node_modules
dist
.parcel-cache
.env
npm-debug.log*
yarn-debug.log*
yarn-error.log*

View file

@ -1,7 +0,0 @@
runtime: nodejs16
service: api
handlers:
- url: /.*
secure: always
redirect_http_response_code: 301
script: auto

View file

@ -1,9 +0,0 @@
cron:
- description: "clean up old events"
url: /tasks/cleanup
schedule: every monday 09:00
target: api
- description: "remove people with an event id that no longer exists"
url: /tasks/removeOrphans
schedule: 1st wednesday of month 09:00
target: api

View file

@ -1,61 +0,0 @@
import { config } from 'dotenv'
import { Datastore } from '@google-cloud/datastore'
import express from 'express'
import cors from 'cors'
import packageJson from './package.json'
import {
stats,
getEvent,
createEvent,
getPeople,
createPerson,
login,
updatePerson,
taskCleanup,
taskRemoveOrphans,
} from './routes'
config()
const app = express()
const port = 8080
const corsOptions = {
origin: process.env.NODE_ENV === 'production' ? 'https://crab.fit' : 'http://localhost:5173',
}
const datastore = new Datastore({
keyFilename: process.env.GOOGLE_APPLICATION_CREDENTIALS,
})
app.use(express.json())
app.use((req, _res, next) => {
req.datastore = datastore
req.types = {
event: process.env.NODE_ENV === 'production' ? 'Event' : 'DevEvent',
person: process.env.NODE_ENV === 'production' ? 'Person' : 'DevPerson',
stats: process.env.NODE_ENV === 'production' ? 'Stats' : 'DevStats',
}
next()
})
app.options('*', cors(corsOptions))
app.use(cors(corsOptions))
app.get('/', (_req, res) => res.send(`Crabfit API v${packageJson.version}`))
app.get('/stats', stats)
app.get('/event/:eventId', getEvent)
app.post('/event', createEvent)
app.get('/event/:eventId/people', getPeople)
app.post('/event/:eventId/people', createPerson)
app.post('/event/:eventId/people/:personName', login)
app.patch('/event/:eventId/people/:personName', updatePerson)
// Tasks
app.get('/tasks/cleanup', taskCleanup)
app.get('/tasks/removeOrphans', taskRemoveOrphans)
app.listen(port, () => {
console.log(`Crabfit API listening at http://localhost:${port} in ${process.env.NODE_ENV === 'production' ? 'prod' : 'dev'} mode`)
})

View file

@ -1,34 +0,0 @@
{
"name": "crabfit-backend",
"version": "2.0.0",
"description": "API for Crabfit",
"author": "Ben Grant",
"license": "GPL-3.0-only",
"private": true,
"source": "index.js",
"main": "dist/index.js",
"engines": {
"node": ">=12.0.0"
},
"scripts": {
"build:dev": "NODE_ENV=development parcel build --no-cache",
"dev": "rm -rf .parcel-cache dist && NODE_ENV=development nodemon --exec \"yarn build:dev && yarn start\" --watch routes --watch res --watch index.js",
"build": "parcel build",
"start": "node ./dist/index.js",
"lint": "eslint index.js ./routes"
},
"dependencies": {
"@google-cloud/datastore": "^7.0.0",
"bcrypt": "^5.0.1",
"cors": "^2.8.5",
"dayjs": "^1.11.5",
"dotenv": "^16.0.1",
"express": "^4.18.1",
"punycode": "^2.1.1"
},
"devDependencies": {
"eslint": "^8.22.0",
"nodemon": "^2.0.19",
"parcel": "^2.7.0"
}
}

View file

@ -1,201 +0,0 @@
[
"adorable",
"adventurous",
"aggressive",
"agreeable",
"alert",
"alive",
"amused",
"angry",
"annoyed",
"annoying",
"anxious",
"arrogant",
"ashamed",
"attractive",
"average",
"beautiful",
"better",
"bewildered",
"blue",
"blushing",
"bored",
"brainy",
"brave",
"breakable",
"bright",
"busy",
"calm",
"careful",
"cautious",
"charming",
"cheerful",
"clean",
"clear",
"clever",
"cloudy",
"clumsy",
"colorful",
"comfortable",
"concerned",
"confused",
"cooperative",
"courageous",
"crazy",
"creepy",
"crowded",
"curious",
"cute",
"dangerous",
"dark",
"defiant",
"delightful",
"depressed",
"determined",
"different",
"difficult",
"disgusted",
"distinct",
"disturbed",
"dizzy",
"doubtful",
"drab",
"dull",
"eager",
"easy",
"elated",
"elegant",
"embarrassed",
"enchanting",
"encouraging",
"energetic",
"enthusiastic",
"envious",
"evil",
"excited",
"expensive",
"exuberant",
"fair",
"faithful",
"famous",
"fancy",
"fantastic",
"fierce",
"fine",
"foolish",
"fragile",
"frail",
"frantic",
"friendly",
"frightened",
"funny",
"gentle",
"gifted",
"glamorous",
"gleaming",
"glorious",
"good",
"gorgeous",
"graceful",
"grumpy",
"handsome",
"happy",
"healthy",
"helpful",
"hilarious",
"homely",
"hungry",
"important",
"impossible",
"inexpensive",
"innocent",
"inquisitive",
"itchy",
"jealous",
"jittery",
"jolly",
"joyous",
"kind",
"lazy",
"light",
"lively",
"lonely",
"long",
"lovely",
"lucky",
"magnificent",
"misty",
"modern",
"motionless",
"muddy",
"mushy",
"mysterious",
"naughty",
"nervous",
"nice",
"nutty",
"obedient",
"obnoxious",
"odd",
"old-fashioned",
"open",
"outrageous",
"outstanding",
"panicky",
"perfect",
"plain",
"pleasant",
"poised",
"powerful",
"precious",
"prickly",
"proud",
"puzzled",
"quaint",
"real",
"relieved",
"scary",
"selfish",
"shiny",
"shy",
"silly",
"sleepy",
"smiling",
"smoggy",
"sparkling",
"splendid",
"spotless",
"stormy",
"strange",
"successful",
"super",
"talented",
"tame",
"tasty",
"tender",
"tense",
"terrible",
"thankful",
"thoughtful",
"thoughtless",
"tired",
"tough",
"uninterested",
"unsightly",
"unusual",
"upset",
"uptight",
"vast",
"victorious",
"vivacious",
"wandering",
"weary",
"wicked",
"wide-eyed",
"wild",
"witty",
"worried",
"worrisome",
"zany",
"zealous"
]

View file

@ -1,47 +0,0 @@
[
"American Horseshoe",
"Atlantic Ghost",
"Baja Elbow",
"Big Claw Purple Hermit",
"Coldwater Mole",
"Cuata Swim",
"Deepwater Frog",
"Dwarf Teardrop",
"Elegant Hermit",
"Flat Spider",
"Ghost",
"Globe Purse",
"Green",
"Halloween",
"Harbor Spider",
"Inflated Spider",
"Left Clawed Hermit",
"Lumpy Claw",
"Magnificent Hermit",
"Mexican Spider",
"Mouthless Land",
"Northern Lemon Rock",
"Pacific Arrow",
"Pacific Mole",
"Paco Box",
"Panamic Spider",
"Purple Shore",
"Red Rock",
"Red Swim",
"Red-leg Hermit",
"Robust Swim",
"Rough Swim",
"Sand Swim",
"Sally Lightfoot",
"Shamed-face Box",
"Shamed-face Heart Box",
"Shell",
"Small Arched Box",
"Southern Kelp",
"Spotted Box",
"Striated Mole",
"Striped Shore",
"Tropical Mole",
"Walking Rock",
"Yellow Shore"
]

View file

@ -1,84 +0,0 @@
import dayjs from 'dayjs'
import punycode from 'punycode/'
import adjectives from '../res/adjectives.json'
import crabs from '../res/crabs.json'
const capitalize = string => string.charAt(0).toUpperCase() + string.slice(1)
// Generate a random name based on an adjective and a crab species
const generateName = () =>
`${capitalize(adjectives[Math.floor(Math.random() * adjectives.length)])} ${crabs[Math.floor(Math.random() * crabs.length)]} Crab`
// Generate a slug for the crab fit
const generateId = name => {
let id = punycode.encode(name.trim().toLowerCase()).trim().replace(/[^A-Za-z0-9 ]/g, '').replace(/\s+/g, '-')
if (id.replace(/-/g, '') === '') {
id = generateName().trim().toLowerCase().replace(/[^A-Za-z0-9 ]/g, '').replace(/\s+/g, '-')
}
const number = Math.floor(100000 + Math.random() * 900000)
return `${id}-${number}`
}
const createEvent = async (req, res) => {
const { event } = req.body
try {
const name = event.name.trim() === '' ? generateName() : event.name.trim()
let eventId = generateId(name)
const currentTime = dayjs().unix()
// Check if the event ID already exists, and if so generate a new one
let eventResult
do {
const query = req.datastore.createQuery(req.types.event)
.select('__key__')
.filter('__key__', req.datastore.key([req.types.event, eventId]))
eventResult = (await req.datastore.runQuery(query))[0][0]
if (eventResult !== undefined) {
eventId = generateId(name)
}
} while (eventResult !== undefined)
const entity = {
key: req.datastore.key([req.types.event, eventId]),
data: {
name: name,
created: currentTime,
times: event.times,
timezone: event.timezone,
},
}
await req.datastore.insert(entity)
res.status(201).send({
id: eventId,
name: name,
created: currentTime,
times: event.times,
timezone: event.timezone,
})
// Update stats
const eventCountResult = (await req.datastore.get(req.datastore.key([req.types.stats, 'eventCount'])))[0] || null
if (eventCountResult) {
await req.datastore.upsert({
...eventCountResult,
value: eventCountResult.value + 1,
})
} else {
await req.datastore.insert({
key: req.datastore.key([req.types.stats, 'eventCount']),
data: { value: 1 },
})
}
} catch (e) {
console.error(e)
res.status(400).send({ error: 'An error occurred while creating the event' })
}
}
export default createEvent

View file

@ -1,65 +0,0 @@
import dayjs from 'dayjs'
import bcrypt from 'bcrypt'
const createPerson = async (req, res) => {
const { eventId } = req.params
const { person } = req.body
try {
const event = (await req.datastore.get(req.datastore.key([req.types.event, eventId])))[0]
const query = req.datastore.createQuery(req.types.person)
.filter('eventId', eventId)
.filter('name', person.name)
const personResult = (await req.datastore.runQuery(query))[0][0]
if (event) {
if (person && personResult === undefined) {
const currentTime = dayjs().unix()
// If password
let hash = null
if (person.password) {
hash = await bcrypt.hash(person.password, 10)
}
const entity = {
key: req.datastore.key(req.types.person),
data: {
name: person.name.trim(),
password: hash,
eventId: eventId,
created: currentTime,
availability: [],
},
}
await req.datastore.insert(entity)
res.status(201).send({ success: 'Created' })
// Update stats
const personCountResult = (await req.datastore.get(req.datastore.key([req.types.stats, 'personCount'])))[0] || null
if (personCountResult) {
await req.datastore.upsert({
...personCountResult,
value: personCountResult.value + 1,
})
} else {
await req.datastore.insert({
key: req.datastore.key([req.types.stats, 'personCount']),
data: { value: 1 },
})
}
} else {
res.status(400).send({ error: 'Unable to create person' })
}
} else {
res.status(404).send({ error: 'Event does not exist' })
}
} catch (e) {
console.error(e)
res.status(400).send({ error: 'An error occurred while creating the person' })
}
}
export default createPerson

View file

@ -1,29 +0,0 @@
import dayjs from 'dayjs'
const getEvent = async (req, res) => {
const { eventId } = req.params
try {
const event = (await req.datastore.get(req.datastore.key([req.types.event, eventId])))[0]
if (event) {
res.send({
id: eventId,
...event,
})
// Update last visited time
await req.datastore.upsert({
...event,
visited: dayjs().unix()
})
} else {
res.status(404).send({ error: 'Event not found' })
}
} catch (e) {
console.error(e)
res.status(404).send({ error: 'Event not found' })
}
}
export default getEvent

View file

@ -1,20 +0,0 @@
const getPeople = async (req, res) => {
const { eventId } = req.params
try {
const query = req.datastore.createQuery(req.types.person).filter('eventId', eventId)
let people = (await req.datastore.runQuery(query))[0]
people = people.map(person => ({
name: person.name,
availability: person.availability,
created: person.created,
}))
res.send({ people })
} catch (e) {
console.error(e)
res.status(404).send({ error: 'Person not found' })
}
}
export default getPeople

View file

@ -1,10 +0,0 @@
export { default as stats } from './stats'
export { default as getEvent } from './getEvent'
export { default as createEvent } from './createEvent'
export { default as getPeople } from './getPeople'
export { default as createPerson } from './createPerson'
export { default as login } from './login'
export { default as updatePerson } from './updatePerson'
export { default as taskCleanup } from './taskCleanup'
export { default as taskRemoveOrphans } from './taskRemoveOrphans'

View file

@ -1,35 +0,0 @@
import bcrypt from 'bcrypt'
const login = async (req, res) => {
const { eventId, personName } = req.params
const { person } = req.body
try {
const query = req.datastore.createQuery(req.types.person)
.filter('eventId', eventId)
.filter('name', personName)
const personResult = (await req.datastore.runQuery(query))[0][0]
if (personResult) {
if (personResult.password) {
const passwordsMatch = person && person.password && await bcrypt.compare(person.password, personResult.password)
if (!passwordsMatch) {
return res.status(401).send({ error: 'Incorrect password' })
}
}
res.send({
name: personName,
availability: personResult.availability,
created: personResult.created,
})
} else {
res.status(404).send({ error: 'Person does not exist' })
}
} catch (e) {
console.error(e)
res.status(400).send({ error: 'An error occurred' })
}
}
export default login

View file

@ -1,29 +0,0 @@
import packageJson from '../package.json'
const stats = async (req, res) => {
let eventCount = null
let personCount = null
try {
const eventResult = (await req.datastore.get(req.datastore.key([req.types.stats, 'eventCount'])))[0] || null
const personResult = (await req.datastore.get(req.datastore.key([req.types.stats, 'personCount'])))[0] || null
if (eventResult) {
eventCount = eventResult.value
}
if (personResult) {
personCount = personResult.value
}
} catch (e) {
console.error(e)
}
res.send({
eventCount,
personCount,
version: packageJson.version,
})
}
export default stats

View file

@ -1,48 +0,0 @@
import dayjs from 'dayjs'
const taskCleanup = async (req, res) => {
if (req.header('X-Appengine-Cron') === undefined) {
return res.status(400).send({ error: 'This task can only be run from a cron job' })
}
const threeMonthsAgo = dayjs().subtract(3, 'month').unix()
console.log(`Running cleanup task at ${dayjs().format('h:mma D MMM YYYY')}`)
try {
// Fetch events that haven't been visited in over 3 months
const eventQuery = req.datastore.createQuery(req.types.event).filter('visited', '<', threeMonthsAgo)
const oldEvents = (await req.datastore.runQuery(eventQuery))[0]
if (oldEvents && oldEvents.length > 0) {
const oldEventIds = oldEvents.map(e => e[req.datastore.KEY].name)
console.log(`Found ${oldEventIds.length} events to remove`)
// Fetch availabilities linked to the events discovered
let peopleDiscovered = 0
await Promise.all(oldEventIds.map(async eventId => {
const peopleQuery = req.datastore.createQuery(req.types.person).filter('eventId', eventId)
const oldPeople = (await req.datastore.runQuery(peopleQuery))[0]
if (oldPeople && oldPeople.length > 0) {
peopleDiscovered += oldPeople.length
await req.datastore.delete(oldPeople.map(person => person[req.datastore.KEY]))
}
}))
await req.datastore.delete(oldEvents.map(event => event[req.datastore.KEY]))
console.log(`Cleanup successful: ${oldEventIds.length} events and ${peopleDiscovered} people removed`)
res.sendStatus(200)
} else {
console.log('Found 0 events to remove, ending cleanup')
res.sendStatus(404)
}
} catch (e) {
console.error(e)
res.sendStatus(404)
}
}
export default taskCleanup

View file

@ -1,48 +0,0 @@
import dayjs from 'dayjs'
const taskRemoveOrphans = async (req, res) => {
if (req.header('X-Appengine-Cron') === undefined) {
return res.status(400).send({ error: 'This task can only be run from a cron job' })
}
const threeMonthsAgo = dayjs().subtract(3, 'month').unix()
console.log(`Running orphan removal task at ${dayjs().format('h:mma D MMM YYYY')}`)
try {
// Fetch people that are older than 3 months
const peopleQuery = req.datastore.createQuery(req.types.person).filter('created', '<', threeMonthsAgo)
const oldPeople = (await req.datastore.runQuery(peopleQuery))[0]
if (oldPeople && oldPeople.length > 0) {
console.log(`Found ${oldPeople.length} people older than 3 months, checking for events`)
// Fetch events linked to the people discovered
let peopleWithoutEvents = 0
await Promise.all(oldPeople.map(async person => {
const event = (await req.datastore.get(req.datastore.key([req.types.event, person.eventId])))[0]
if (!event) {
peopleWithoutEvents++
await req.datastore.delete(person[req.datastore.KEY])
}
}))
if (peopleWithoutEvents > 0) {
console.log(`Orphan removal successful: ${peopleWithoutEvents} people removed`)
res.sendStatus(200)
} else {
console.log('Found 0 people without events, ending orphan removal')
res.sendStatus(404)
}
} else {
console.log('Found 0 people older than 3 months, ending orphan removal')
res.sendStatus(404)
}
} catch (e) {
console.error(e)
res.sendStatus(404)
}
}
export default taskRemoveOrphans

View file

@ -1,40 +0,0 @@
import bcrypt from 'bcrypt'
const updatePerson = async (req, res) => {
const { eventId, personName } = req.params
const { person } = req.body
try {
const query = req.datastore.createQuery(req.types.person)
.filter('eventId', eventId)
.filter('name', personName)
const personResult = (await req.datastore.runQuery(query))[0][0]
if (personResult) {
if (person && person.availability) {
if (personResult.password) {
const passwordsMatch = person.password && await bcrypt.compare(person.password, personResult.password)
if (!passwordsMatch) {
return res.status(401).send({ error: 'Incorrect password' })
}
}
await req.datastore.upsert({
...personResult,
availability: person.availability,
})
res.status(200).send({ success: 'Updated' })
} else {
res.status(400).send({ error: 'Availability must be set' })
}
} else {
res.status(404).send({ error: 'Person not found' })
}
} catch (e) {
console.error(e)
res.status(400).send('An error occurred')
}
}
export default updatePerson

View file

@ -1,245 +0,0 @@
swagger: "2.0"
info:
title: "Crab Fit"
description: "Compare and align schedules to find a time that works for everyone"
version: "1.0.0"
host: "api-dot-crabfit.appspot.com"
x-google-endpoints:
- name: "api-dot-crabfit.appspot.com"
allowCors: true
schemes:
- "https"
produces:
- "application/json"
definitions:
Event:
type: "object"
properties:
id:
type: "string"
name:
type: "string"
timezone:
type: "string"
created:
type: "integer"
times:
type: "array"
items:
type: "string"
Person:
type: "object"
properties:
name:
type: "string"
availability:
type: "array"
items:
type: "string"
created:
type: "integer"
paths:
"/stats":
get:
summary: "Return stats for crabfit"
operationId: "getStats"
responses:
200:
description: "OK"
schema:
type: "object"
properties:
eventCount:
type: "integer"
personCount:
type: "integer"
version:
type: "string"
"/event/{eventId}":
get:
summary: "Return an event details"
operationId: "getEvent"
parameters:
- in: "path"
name: "eventId"
required: true
type: "string"
description: "The ID of the event"
responses:
200:
description: "OK"
schema:
$ref: '#/definitions/Event'
404:
description: "Not found"
"/event":
post:
summary: "Create a new event"
operationId: "postEvent"
parameters:
- in: "body"
name: "event"
required: true
schema:
type: "object"
properties:
name:
type: "string"
timezone:
type: "string"
times:
type: "array"
items:
type: "string"
description: "New event details"
responses:
201:
description: "Created"
schema:
$ref: '#/definitions/Event'
400:
description: "Invalid data"
"/event/{eventId}/people":
get:
summary: "Get availabilities for an event"
operationId: "getPeople"
parameters:
- in: "path"
name: "eventId"
required: true
type: "string"
description: "The ID of the event"
responses:
200:
description: "OK"
schema:
type: "object"
properties:
people:
type: "array"
items:
$ref: "#/definitions/Person"
404:
description: "Not found"
post:
summary: "Add a new person to the event"
operationId: "postPeople"
parameters:
- in: "path"
name: "eventId"
required: true
type: "string"
description: "The ID of the event"
- in: "body"
name: "person"
required: true
schema:
type: "object"
properties:
name:
type: "string"
password:
type: "string"
description: "New person details"
responses:
201:
description: "Created"
404:
description: "Not found"
400:
description: "Invalid data"
"/event/{eventId}/people/{personName}":
post:
summary: "Login as this person"
operationId: "getPerson"
parameters:
- in: "path"
name: "eventId"
required: true
type: "string"
description: "The ID of the event"
- in: "path"
name: "personName"
required: true
type: "string"
description: "The name of the person"
- in: "body"
name: "person"
required: false
schema:
type: "object"
properties:
password:
type: "string"
description: "Login details"
responses:
200:
description: "OK"
schema:
$ref: "#/definitions/Person"
401:
description: "Incorrect password"
404:
description: "Not found"
patch:
summary: "Update this person's availabilities"
operationId: "patchPerson"
parameters:
- in: "path"
name: "eventId"
required: true
type: "string"
description: "The ID of the event"
- in: "path"
name: "personName"
required: true
type: "string"
description: "The name of the person"
- in: "body"
name: "person"
required: true
schema:
type: "object"
properties:
password:
type: "string"
availability:
type: "array"
items:
type: "string"
description: "Updated person details"
responses:
200:
description: "OK"
401:
description: "Incorrect password"
404:
description: "Not found"
400:
description: "Invalid data"
"/tasks/cleanup":
get:
summary: "Delete events inactive for more than 3 months"
operationId: "taskCleanup"
tags:
- tasks
responses:
200:
description: "OK"
404:
description: "Not found"
400:
description: "Not called from a cron job"
"/tasks/removeOrphans":
get:
summary: "Deletes people if the event they were created under no longer exists"
operationId: "taskRemoveOrphans"
tags:
- tasks
responses:
200:
description: "OK"
404:
description: "Not found"
400:
description: "Not called from a cron job"

File diff suppressed because it is too large Load diff

View file

Before

Width:  |  Height:  |  Size: 104 KiB

After

Width:  |  Height:  |  Size: 104 KiB

Some files were not shown because too many files have changed in this diff Show more