Skip to content

Commit

Permalink
[feature] Use PrefixConfiguration in converters
Browse files Browse the repository at this point in the history
  • Loading branch information
woshilapin committed Jul 23, 2020
1 parent ff3a2fe commit 55661eb
Show file tree
Hide file tree
Showing 6 changed files with 155 additions and 124 deletions.
25 changes: 15 additions & 10 deletions examples/gtfs_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,18 +13,23 @@
// along with this program. If not, see <https://www.gnu.org/licenses/>

use serde_json::json;
use std::path::Path;
use transit_model::Result;
use std::collections::BTreeMap;
use transit_model::{
gtfs,
objects::{Contributor, Dataset},
Result,
};

fn run() -> Result<()> {
let configuration: transit_model::gtfs::Configuration<&Path> =
transit_model::gtfs::Configuration {
config_path: None,
prefix: None,
on_demand_transport: false,
on_demand_transport_comment: None,
};
let objects = transit_model::gtfs::read_from_path(".", configuration)?;
let configuration: gtfs::Configuration = gtfs::Configuration {
contributor: Contributor::default(),
dataset: Dataset::default(),
feed_infos: BTreeMap::new(),
prefix_conf: None,
on_demand_transport: false,
on_demand_transport_comment: None,
};
let objects = gtfs::read_from_path(".", configuration)?;
let json_objs = json!(objects);
println!("{:?}", json_objs.to_string());
Ok(())
Expand Down
9 changes: 6 additions & 3 deletions gtfs2netexfr/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ use slog::{slog_o, Drain};
use slog_async::OverflowStrategy;
use std::path::PathBuf;
use structopt::StructOpt;
use transit_model::Result;
use transit_model::{read_utils, Result};

#[derive(Debug, StructOpt)]
#[structopt(name = "gtfs2netexfr", about = "Convert a GTFS to NeTEx France.")]
Expand Down Expand Up @@ -96,9 +96,12 @@ fn init_logger() -> slog_scope::GlobalLoggerGuard {
fn run(opt: Opt) -> Result<()> {
info!("Launching gtfs2netexfr...");

let (contributor, dataset, feed_infos) = read_utils::read_config(opt.config)?;
let configuration = transit_model::gtfs::Configuration {
config_path: opt.config,
prefix: None,
contributor,
dataset,
feed_infos,
prefix_conf: None,
on_demand_transport: opt.odt,
on_demand_transport_comment: opt.odt_comment,
};
Expand Down
14 changes: 11 additions & 3 deletions gtfs2ntfs/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ use slog::{slog_o, Drain};
use slog_async::OverflowStrategy;
use std::path::PathBuf;
use structopt::StructOpt;
use transit_model::{transfers::generates_transfers, Result};
use transit_model::{read_utils, transfers::generates_transfers, PrefixConfiguration, Result};

#[derive(Debug, StructOpt)]
#[structopt(name = "gtfs2ntfs", about = "Convert a GTFS to an NTFS.")]
Expand Down Expand Up @@ -99,9 +99,17 @@ fn init_logger() -> slog_scope::GlobalLoggerGuard {
fn run(opt: Opt) -> Result<()> {
info!("Launching gtfs2ntfs...");

let (contributor, dataset, feed_infos) = read_utils::read_config(opt.config)?;
let mut prefix_conf = PrefixConfiguration::default();
if let Some(data_prefix) = opt.prefix {
prefix_conf.set_data_prefix(data_prefix);
}
prefix_conf.set_dataset_id(&dataset.id);
let configuration = transit_model::gtfs::Configuration {
config_path: opt.config,
prefix: opt.prefix,
contributor,
dataset,
feed_infos,
prefix_conf: Some(prefix_conf),
on_demand_transport: opt.odt,
on_demand_transport_comment: opt.odt_comment,
};
Expand Down
55 changes: 27 additions & 28 deletions src/gtfs/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,17 +21,15 @@ use crate::{
calendars::{manage_calendars, write_calendar_dates},
gtfs::read::EquipmentList,
model::{Collections, Model},
objects,
objects::{Availability, StopPoint, StopType, Time},
objects::{self, Availability, Contributor, Dataset, StopPoint, StopType, Time},
read_utils,
utils::*,
validity_period, AddPrefix, PrefixConfiguration, Result,
};
use derivative::Derivative;
use log::info;
use serde::{Deserialize, Serialize};
use std::fmt;
use std::path::Path;
use std::{collections::BTreeMap, fmt, path::Path};
use typed_index_collection::{CollectionWithId, Idx};

#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
Expand Down Expand Up @@ -257,33 +255,42 @@ struct Shape {
}

///parameters consolidation
#[derive(Clone)]
pub struct Configuration<P: AsRef<Path>> {
/// path to configuration file
pub config_path: Option<P>,
pub struct Configuration {
/// The Contributor providing the Dataset
pub contributor: Contributor,
/// Describe the Dataset being parsed
pub dataset: Dataset,
/// Additional key-values for the 'feed_infos.txt'
pub feed_infos: BTreeMap<String, String>,
/// used to prefix objects
pub prefix: Option<String>,
pub prefix_conf: Option<PrefixConfiguration>,
/// stop time precision management
pub on_demand_transport: bool,
/// on demand transport comment template
pub on_demand_transport_comment: Option<String>,
}

fn read<H, P: AsRef<Path>>(file_handler: &mut H, configuration: Configuration<P>) -> Result<Model>
fn read<H>(file_handler: &mut H, configuration: Configuration) -> Result<Model>
where
for<'a> &'a mut H: read_utils::FileHandler,
{
let mut collections = Collections::default();
let mut equipments = EquipmentList::default();

manage_calendars(file_handler, &mut collections)?;
let Configuration {
contributor,
mut dataset,
feed_infos,
prefix_conf,
on_demand_transport,
on_demand_transport_comment,
} = configuration;

let (contributor, mut dataset, feed_infos) =
read_utils::read_config(configuration.config_path)?;
manage_calendars(file_handler, &mut collections)?;
validity_period::compute_dataset_validity_period(&mut dataset, &collections.calendars)?;

collections.contributors = CollectionWithId::new(vec![contributor])?;
collections.datasets = CollectionWithId::new(vec![dataset])?;
collections.contributors = CollectionWithId::from(contributor);
collections.datasets = CollectionWithId::from(dataset);
collections.feed_infos = feed_infos;

let (networks, companies) = read::read_agency(file_handler)?;
Expand All @@ -303,17 +310,15 @@ where
read::manage_stop_times(
&mut collections,
file_handler,
configuration.on_demand_transport,
configuration.on_demand_transport_comment,
on_demand_transport,
on_demand_transport_comment,
)?;
read::manage_frequencies(&mut collections, file_handler)?;
read::manage_pathways(&mut collections, file_handler)?;
collections.levels = read_utils::read_opt_collection(file_handler, "levels.txt")?;

//add prefixes
if let Some(prefix) = configuration.prefix {
let mut prefix_conf = PrefixConfiguration::default();
prefix_conf.set_data_prefix(prefix);
if let Some(prefix_conf) = prefix_conf {
collections.prefix(&prefix_conf);
}

Expand All @@ -331,10 +336,7 @@ where
/// The `prefix` argument is a string that will be prepended to every
/// identifiers, allowing to namespace the dataset. By default, no
/// prefix will be added to the identifiers.
pub fn read_from_path<P: AsRef<Path>>(
p: P,
configuration: Configuration<impl AsRef<Path>>,
) -> Result<Model> {
pub fn read_from_path<P: AsRef<Path>>(p: P, configuration: Configuration) -> Result<Model> {
let mut file_handle = read_utils::PathFileHandler::new(p.as_ref().to_path_buf());
read(&mut file_handle, configuration)
}
Expand All @@ -349,10 +351,7 @@ pub fn read_from_path<P: AsRef<Path>>(
/// The `prefix` argument is a string that will be prepended to every
/// identifiers, allowing to namespace the dataset. By default, no
/// prefix will be added to the identifiers.
pub fn read_from_zip<P: AsRef<Path>>(
path: P,
configuration: Configuration<impl AsRef<Path>>,
) -> Result<Model> {
pub fn read_from_zip<P: AsRef<Path>>(path: P, configuration: Configuration) -> Result<Model> {
let mut file_handler = read_utils::ZipHandler::new(path)?;
read(&mut file_handler, configuration)
}
Expand Down
Loading

0 comments on commit 55661eb

Please sign in to comment.