Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 0 additions & 44 deletions editoast/database/src/tables.rs
Original file line number Diff line number Diff line change
Expand Up @@ -862,47 +862,6 @@ diesel::table! {
}
}

diesel::table! {
use diesel::sql_types::*;
use postgis_diesel::sql_types::*;
use super::sql_types::TrainMainCategory;

train_schedule (id) {
id -> Int8,
#[max_length = 128]
train_name -> Varchar,
labels -> Array<Nullable<Text>>,
#[max_length = 128]
rolling_stock_name -> Varchar,
timetable_id -> Int8,
start_time -> Timestamptz,
schedule -> Jsonb,
margins -> Jsonb,
initial_speed -> Float8,
comfort -> Int2,
path -> Jsonb,
constraint_distribution -> Int2,
#[max_length = 128]
speed_limit_tag -> Nullable<Varchar>,
power_restrictions -> Jsonb,
options -> Jsonb,
main_category -> Nullable<TrainMainCategory>,
#[max_length = 255]
sub_category -> Nullable<Varchar>,
}
}

diesel::table! {
use diesel::sql_types::*;
use postgis_diesel::sql_types::*;

train_schedule_round_trips (id) {
id -> Int8,
left_id -> Int8,
right_id -> Nullable<Int8>,
}
}

diesel::table! {
use diesel::sql_types::*;
use postgis_diesel::sql_types::*;
Expand Down Expand Up @@ -996,7 +955,6 @@ diesel::joinable!(stdcm_search_environment -> timetable (timetable_id));
diesel::joinable!(stdcm_search_environment -> work_schedule_group (work_schedule_group_id));
diesel::joinable!(study -> project (project_id));
diesel::joinable!(temporary_speed_limit -> temporary_speed_limit_group (temporary_speed_limit_group_id));
diesel::joinable!(train_schedule -> timetable (timetable_id));
diesel::joinable!(train_schedule_set -> catalog_entry (catalog_entry_id));
diesel::joinable!(work_schedule -> work_schedule_group (work_schedule_group_id));

Expand Down Expand Up @@ -1056,8 +1014,6 @@ diesel::allow_tables_to_appear_in_same_query!(
temporary_speed_limit_group,
timetable,
towed_rolling_stock,
train_schedule,
train_schedule_round_trips,
train_schedule_set,
work_schedule,
work_schedule_group,
Expand Down
1 change: 0 additions & 1 deletion editoast/database/src/tables_patch.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use crate::tables::*;

// Add missing joinable macros since diesel can not generate them automatically
diesel::joinable!(train_schedule_round_trips -> train_schedule (left_id));
diesel::joinable!(paced_train_round_trips -> paced_train (left_id));
2 changes: 0 additions & 2 deletions editoast/editoast_models/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ pub mod tags;
pub mod temporary_speed_limits;
pub mod timetable;
pub mod towed_rolling_stock;
pub mod train_schedule;
pub mod work_schedules;

// Export all Model at crate root
Expand All @@ -35,7 +34,6 @@ pub use tags::Tags;
pub use temporary_speed_limits::TemporarySpeedLimit;
pub use temporary_speed_limits::TemporarySpeedLimitGroup;
pub use towed_rolling_stock::TowedRollingStock;
pub use train_schedule::TrainSchedule;
pub use work_schedules::WorkSchedule;
pub use work_schedules::WorkScheduleGroup;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,11 @@ use serde::Serialize;
use utoipa::ToSchema;

use database::DbConnection;
use database::tables::paced_train;
use database::tables::project;
use database::tables::rolling_stock;
use database::tables::scenario;
use database::tables::study;
use database::tables::train_schedule;

use super::RollingStock;

Expand Down Expand Up @@ -50,12 +50,12 @@ impl RollingStock {
&self,
conn: &mut DbConnection,
) -> Result<Vec<ScenarioReference>, database::DatabaseError> {
let schedules: Vec<_> = train_schedule::table
let schedules: Vec<_> = paced_train::table
.inner_join(
rolling_stock::table.on(train_schedule::rolling_stock_name.eq(rolling_stock::name)),
rolling_stock::table.on(paced_train::rolling_stock_name.eq(rolling_stock::name)),
)
.inner_join(
(scenario::table.on(scenario::timetable_id.eq(train_schedule::timetable_id)))
(scenario::table.on(scenario::timetable_id.eq(paced_train::timetable_id)))
.inner_join(study::table.inner_join(project::table)),
)
.select((
Expand All @@ -67,7 +67,7 @@ impl RollingStock {
scenario::name,
))
.filter(rolling_stock::id.eq(self.id))
.filter(train_schedule::id.is_not_null())
.filter(paced_train::id.is_not_null())
.load::<SchedulesFromRollingStock>(conn.write().await.deref_mut())
.await?;
let schedules = schedules.into_iter().map_into().collect();
Expand Down
71 changes: 0 additions & 71 deletions editoast/editoast_models/src/round_trips.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,77 +9,6 @@ use crate::pagination::load_for_pagination;

use crate as editoast_models;

#[derive(Clone, Debug, Model)]
#[model(row(derive(QueryableByName)))]
#[model(table = database::tables::train_schedule_round_trips)]
#[model(gen(batch_ops = c))]
pub struct TrainScheduleRoundTrips {
pub id: i64,
/// First ID of the train schedule of this round trip
pub left_id: i64,
/// Second ID of the train schedule of this round trip
/// This is `None` for one-way trains
pub right_id: Option<i64>,
}

impl TrainScheduleRoundTrips {
#[tracing::instrument(
name = "list_paginated<TrainScheduleRoundTrips>",
skip_all,
err,
fields(timetable_id, limit, offset)
)]
pub async fn list_paginated(
conn: &mut DbConnection,
timetable_id: i64,
page: u64,
page_size: u64,
) -> Result<(Vec<Self>, u64), database::DatabaseError> {
use database::tables::train_schedule;
use database::tables::train_schedule_round_trips;

let query = train_schedule_round_trips::table
.inner_join(train_schedule::table)
.select(train_schedule_round_trips::all_columns)
.filter(train_schedule::dsl::timetable_id.eq(timetable_id))
.order_by(train_schedule_round_trips::id.asc());

let (results, count): (Vec<TrainScheduleRoundTripsRow>, _) =
load_for_pagination(conn, query, page, page_size).await?;
let results: Vec<_> = results.into_iter().map_into().collect();

Ok((results, count))
}

/// Deletes a batch of train schedule round trips given a list of train schedule IDs
///
/// **IMPORTANT**: This function does not take ids of round trips, but rather the IDs of the train schedules
#[tracing::instrument(
name = "delete_batch_train_ids<TrainScheduleRoundTrips>",
skip_all,
err,
fields(train_schedule_ids)
)]
pub async fn delete_batch_train_ids<I: IntoIterator<Item = i64> + Send>(
conn: &mut DbConnection,
train_schedule_ids: I,
) -> Result<usize, database::DatabaseError> {
use database::tables::train_schedule_round_trips::dsl;
use diesel::prelude::*;
use diesel_async::RunQueryDsl;
use std::ops::DerefMut;

let ids = train_schedule_ids.into_iter().collect::<Vec<_>>();
let nb = diesel::delete(
database::tables::train_schedule_round_trips::table
.filter(dsl::left_id.eq_any(&ids).or(dsl::right_id.eq_any(&ids))),
)
.execute(conn.write().await.deref_mut())
.await?;
Ok(nb)
}
}

#[derive(Clone, Debug, Model)]
#[model(row(derive(QueryableByName)))]
#[model(table = database::tables::paced_train_round_trips)]
Expand Down
7 changes: 0 additions & 7 deletions editoast/editoast_models/src/scenario.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,13 +63,6 @@ impl Scenario {
Ok(infra_name)
}

pub async fn trains_count(
&self,
conn: &mut DbConnection,
) -> Result<i64, database::DatabaseError> {
Timetable::trains_count(self.timetable_id, conn).await
}

pub async fn paced_trains_count(
&self,
conn: &mut DbConnection,
Expand Down
Loading
Loading