Commit a36c352b authored by Emmanuel Raviart's avatar Emmanuel Raviart
Browse files

Add open-data-splitter.

parent e55ff1b4
Pipeline #104520 passed with stage
in 2 minutes and 11 seconds
......@@ -265,6 +265,11 @@ name = "itoa"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "json"
version = "0.11.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "juniper"
version = "0.11.1"
......@@ -753,6 +758,17 @@ dependencies = [
"tricoteuses_api_assemblee_config 0.1.0",
]
[[package]]
name = "tricoteuses_api_assemblee_open_data_splitter"
version = "0.1.0"
dependencies = [
"clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)",
"json 0.11.13 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"tricoteuses_api_assemblee_config 0.1.0",
]
[[package]]
name = "tricoteuses_api_assemblee_photos_fetcher"
version = "0.1.0"
......@@ -958,6 +974,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum isatty 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e31a8281fc93ec9693494da65fbf28c0c2aa60a2eaec25dc58e2f31952e95edc"
"checksum itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5b8467d9c1cebe26feb08c640139247fac215782d35371ade9a2136ed6085358"
"checksum itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1306f3464951f30e30d12373d31c79fbd52d236e5e896fd92f96ec7babbbe60b"
"checksum json 0.11.13 (registry+https://github.com/rust-lang/crates.io-index)" = "9ad0485404155f45cce53a40d4b2d6ac356418300daed05273d9e26f91c390be"
"checksum juniper 0.11.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d95deabb0bc5e15f508d48017b3791502e734a3d64c261d8ef9658899f04f351"
"checksum juniper_codegen 0.11.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f787e228fc7df6061a0b9474dc0223199c7b917a3fe2ba874272b077a1c8a46b"
"checksum juniper_rocket 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "383cf2d5352548241191b9c9d0e8aec5d8f3a49d98ae4051423226bfd064d07d"
......
......@@ -3,6 +3,7 @@ members = [
"config-lib",
"data-lib",
"open-data-fetcher",
"open-data-splitter",
"photos-fetcher",
"web-server",
]
\ No newline at end of file
[package]
name = "tricoteuses_api_assemblee_open_data_splitter"
version = "0.1.0"
authors = ["Emmanuel Raviart <emmanuel@raviart.com>"]
edition = "2018"
[dependencies]
clap = "2.32"
json = "0.11"
lazy_static = "1"
regex = "1"
tricoteuses_api_assemblee_config = { path = "../config-lib" }
extern crate clap;
extern crate json;
#[macro_use]
extern crate lazy_static;
extern crate regex;
extern crate tricoteuses_api_assemblee_config as config;
use clap::{App, Arg};
use config::Verbosity;
use regex::Regex;
use std::fs::{self, File};
use std::io::prelude::*;
use std::path::Path;
fn main() {
let matches = App::new("Tricoteuses API Assemblée")
.version("1.0")
.author("Emmanuel Raviart <emmanuel@raviart.com>")
.about("GraphQL API to access open data of French Assemblée nationale")
.arg(
Arg::with_name("config")
.short("c")
.long("config")
.value_name("FILE")
.help("Sets a custom config file")
.takes_value(true),
)
.arg(
Arg::with_name("v")
.short("v")
.multiple(true)
.help("Sets the level of verbosity"),
)
.get_matches();
let config_file_path = Path::new(matches.value_of("config").unwrap_or("../Config.toml"));
let config = config::load(config_file_path);
let verbosity = match matches.occurrences_of("v") {
0 => Verbosity::Verbosity0,
1 => Verbosity::Verbosity1,
2 | _ => Verbosity::Verbosity2,
};
let config_dir = config_file_path.parent().unwrap();
let data_dir = config_dir.join(config.data.dir);
for dataset in &config.acteurs_et_organes {
let json_file_path = fs::canonicalize(data_dir.join(&dataset.filename)).unwrap();
if verbosity != Verbosity::Verbosity0 {
println!(
"Splitting \"acteurs et organes\" file: {}...",
json_file_path.to_string_lossy()
);
}
let mut json_file = File::open(&json_file_path).expect("JSON file not found");
let mut json_text = String::new();
json_file
.read_to_string(&mut json_text)
.expect("error while reading JSON file");
let data = json::parse(&json_text).expect("invalid JSON");
let dataset_dir = data_dir.join(json_file_path.file_stem().unwrap());
if !dataset_dir.exists() {
fs::create_dir(&dataset_dir).expect("Creation of dataset directory failed");
}
let acteurs_dir = dataset_dir.join("acteurs");
if acteurs_dir.exists() {
fs::remove_dir_all(&acteurs_dir).expect("Recursive deletion of acteurs directory failed");
}
fs::create_dir(&acteurs_dir).expect("Creation of acteurs directory failed");
for acteur in data["export"]["acteurs"]["acteur"].members() {
let json_file_path = acteurs_dir.join(acteur["uid"]["#text"].as_str().unwrap().to_owned() + ".json");
fs::write(&json_file_path, acteur.pretty(2)).expect("Error while writing JSON file");
}
let organes_dir = dataset_dir.join("organes");
if organes_dir.exists() {
fs::remove_dir_all(&organes_dir).expect("Recursive deletion of organes directory failed");
}
fs::create_dir(&organes_dir).expect("Creation of organes directory failed");
for organe in data["export"]["organes"]["organe"].members() {
let json_file_path = organes_dir.join(organe["uid"].as_str().unwrap().to_owned() + ".json");
fs::write(&json_file_path, organe.pretty(2)).expect("Error while writing JSON file");
}
}
lazy_static! {
static ref reunion_uid_regex: Regex =
Regex::new(r"^RU(AN|CG|SN)(R\d+)(L\d+)(S\d{4})(IDC|IDFL?|IDS)(\d+)$").unwrap();
}
for dataset in &config.agendas {
let json_file_path = fs::canonicalize(data_dir.join(&dataset.filename)).unwrap();
if verbosity != Verbosity::Verbosity0 {
println!(
"Splitting \"agendas\" file: {}...",
json_file_path.to_string_lossy()
);
}
let mut json_file = File::open(&json_file_path).expect("JSON file not found");
let mut json_text = String::new();
json_file
.read_to_string(&mut json_text)
.expect("error while reading JSON file");
let data = json::parse(&json_text).expect("invalid JSON");
let dataset_dir = data_dir.join(json_file_path.file_stem().unwrap());
if !dataset_dir.exists() {
fs::create_dir(&dataset_dir).expect("Creation of dataset directory failed");
}
let reunions_dir = dataset_dir.join("reunions");
if reunions_dir.exists() {
fs::remove_dir_all(&reunions_dir).expect("Recursive deletion of reunions directory failed");
}
fs::create_dir(&reunions_dir).expect("Creation of reunions directory failed");
for reunion in data["reunions"]["reunion"].members() {
let uid = reunion["uid"].as_str().unwrap();
let captures = reunion_uid_regex.captures(&uid).expect(&format!("Unexpected uid for reunion: {}", uid));
let organe_dir = reunions_dir.join(captures[1].to_string());
if !organe_dir.exists() {
fs::create_dir(&organe_dir).expect("Creation of organe directory failed");
}
let republique_dir = organe_dir.join(captures[2].to_string());
if !republique_dir.exists() {
fs::create_dir(&republique_dir).expect("Creation of republique directory failed");
}
let legislature_dir = republique_dir.join(captures[3].to_string());
if !legislature_dir.exists() {
fs::create_dir(&legislature_dir).expect("Creation of legislature directory failed");
}
let session_dir = legislature_dir.join(captures[4].to_string());
if !session_dir.exists() {
fs::create_dir(&session_dir).expect("Creation of session directory failed");
}
let json_file_path = session_dir.join(uid.to_owned() + ".json");
fs::write(&json_file_path, reunion.pretty(2)).expect("Error while writing JSON file");
}
}
for dataset in &config.amendements {
let json_file_path = fs::canonicalize(data_dir.join(&dataset.filename)).unwrap();
if verbosity != Verbosity::Verbosity0 {
println!(
"Splitting \"amendements\" file: {}...",
json_file_path.to_string_lossy()
);
}
let mut json_file = File::open(&json_file_path).expect("JSON file not found");
let mut json_text = String::new();
json_file
.read_to_string(&mut json_text)
.expect("error while reading JSON file");
let data = json::parse(&json_text).expect("invalid JSON");
let dataset_dir = data_dir.join(json_file_path.file_stem().unwrap());
if !dataset_dir.exists() {
fs::create_dir(&dataset_dir).expect("Creation of dataset directory failed");
}
let textes_legislatifs_dir = dataset_dir.join("textes_legislatifs");
if textes_legislatifs_dir.exists() {
fs::remove_dir_all(&textes_legislatifs_dir).expect("Recursive deletion of textes_legislatifs directory failed");
}
fs::create_dir(&textes_legislatifs_dir).expect("Creation of textes_legislatifs directory failed");
for texte_legislatif in data["textesEtAmendements"]["texteleg"].members() {
let json_file_path = textes_legislatifs_dir.join(texte_legislatif["refTexteLegislatif"].as_str().unwrap().to_owned() + ".json");
fs::write(&json_file_path, texte_legislatif.pretty(2)).expect("Error while writing JSON file");
}
}
for dataset in &config.dossiers_legislatifs {
let json_file_path = fs::canonicalize(data_dir.join(&dataset.filename)).unwrap();
if verbosity != Verbosity::Verbosity0 {
println!(
"Splitting \"dossiers législatifs\" file: {}...",
json_file_path.to_string_lossy()
);
}
let mut json_file = File::open(&json_file_path).expect("JSON file not found");
let mut json_text = String::new();
json_file
.read_to_string(&mut json_text)
.expect("error while reading JSON file");
let data = json::parse(&json_text).expect("invalid JSON");
let dataset_dir = data_dir.join(json_file_path.file_stem().unwrap());
if !dataset_dir.exists() {
fs::create_dir(&dataset_dir).expect("Creation of dataset directory failed");
}
let dossiers_legislatifs_dir = dataset_dir.join("dossiers_legislatifs");
if dossiers_legislatifs_dir.exists() {
fs::remove_dir_all(&dossiers_legislatifs_dir).expect("Recursive deletion of dossiers_legislatifs directory failed");
}
fs::create_dir(&dossiers_legislatifs_dir).expect("Creation of dossiers_legislatifs directory failed");
for dossier_legislatif in data["export"]["dossiersLegislatifs"]["dossier"].members() {
let json_file_path = dossiers_legislatifs_dir.join(dossier_legislatif["dossierParlementaire"]["uid"].as_str().unwrap().to_owned() + ".json");
fs::write(&json_file_path, dossier_legislatif["dossierParlementaire"].pretty(2)).expect("Error while writing JSON file");
}
let textes_legislatifs_dir = dataset_dir.join("textes_legislatifs");
if textes_legislatifs_dir.exists() {
fs::remove_dir_all(&textes_legislatifs_dir).expect("Recursive deletion of textes_legislatifs directory failed");
}
fs::create_dir(&textes_legislatifs_dir).expect("Creation of textes_legislatifs directory failed");
for texte_legislatif in data["export"]["textesLegislatifs"]["document"].members() {
let json_file_path = textes_legislatifs_dir.join(texte_legislatif["uid"].as_str().unwrap().to_owned() + ".json");
fs::write(&json_file_path, texte_legislatif.pretty(2)).expect("Error while writing JSON file");
}
}
for dataset in &config.scrutins {
let json_file_path = fs::canonicalize(data_dir.join(&dataset.filename)).unwrap();
if verbosity != Verbosity::Verbosity0 {
println!(
"Splitting \"scrutins\" file: {}...",
json_file_path.to_string_lossy()
);
}
let mut json_file = File::open(&json_file_path).expect("JSON file not found");
let mut json_text = String::new();
json_file
.read_to_string(&mut json_text)
.expect("error while reading JSON file");
let data = json::parse(&json_text).expect("invalid JSON");
let dataset_dir = data_dir.join(json_file_path.file_stem().unwrap());
if !dataset_dir.exists() {
fs::create_dir(&dataset_dir).expect("Creation of dataset directory failed");
}
let scrutins_dir = dataset_dir.join("scrutins");
if scrutins_dir.exists() {
fs::remove_dir_all(&scrutins_dir).expect("Recursive deletion of scrutins directory failed");
}
fs::create_dir(&scrutins_dir).expect("Creation of scrutins directory failed");
for scrutin in data["scrutins"]["scrutin"].members() {
let json_file_path = scrutins_dir.join(scrutin["uid"].as_str().unwrap().to_owned() + ".json");
fs::write(&json_file_path, scrutin.pretty(2)).expect("Error while writing JSON file");
}
}
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment