Commit 8420c25f authored by Adrien Pavie's avatar Adrien Pavie
Browse files

Improve performance of dump exports

parent 78ab744f
......@@ -12,6 +12,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
### Changed
- Added contact information in various themes (cinema, hosting, public_service, restaurant, shop_craft_office, toilets)
- Improve performance of dump exports (single table + several ogr2ogr calls)
### Fixed
- URL in metadata file for dump exports
......
......@@ -150,159 +150,267 @@ function getMetadataText(theme, boundary, format, url) {
}
/**
* Extract thematic data
* Create SQL request for exporting theme data
* @param {Object} theme The theme metadata
* @param {Object} boundary The boundary metadata, or null for whole database export
* @param {Object} [options] Options
* @param {string} [options.format] Output file format
* @param {number} [options.radius] Radius around boundary center for data filtering
* @param {boolean} [options.aspoint] Geometry will be converted as point, instead full geometry is used
* @param {boolean} [options.metadata] Add a metadata file (SCDL format)
* @param {string} [options.url] Original URL used for getting this dataset
* @return {Promise} Resolves on output file
* @param {string} [options.tablename] If output format is "table", use this option to define the table name
* @return {string} SQL request to generate wanted data
*/
exports.getThemeData = (theme, boundary, options) => {
return whenAvailable()
.then(() => {
const start = Date.now();
const format = options.format || "csv";
const metadata = options.metadata || false;
const resultViewName = `${theme.id}${boundary ? boundary.id.toString() : "dump"}${Date.now()}`;
const baseFilePath = CONFIG.WORK_DIR + "/" + resultViewName;
const boundaryId = boundary ? "'" + boundary.id.toString().replace(/'/g, "") + "'" : null;
function getSQLForThemeData(theme, boundary, options) {
const boundaryId = boundary ? "'" + boundary.id.toString().replace(/'/g, "") + "'" : null;
// Create condition for filtering
const geomConds = [];
// Create condition for filtering
const geomConds = [];
if(boundaryId) {
geomConds.push({ sort: 0, cond: `b.osm_id = ${boundaryId}` });
if(boundaryId) {
geomConds.push({ sort: 0, cond: `b.osm_id = ${boundaryId}` });
const dist = options.radius > 0 ? options.radius * 1000 : null;
if(dist) {
geomConds.push({ sort: 2, cond: `ST_DWithin(t.centroid, b.center, ${dist})` });
}
else {
geomConds.push({ sort: 1, cond: "b.way && t.centroid" });
geomConds.push({ sort: 2, cond: "ST_Intersects(t.centroid, b.way)" });
}
const dist = options.radius > 0 ? options.radius * 1000 : null;
if(dist) {
geomConds.push({ sort: 2, cond: `ST_DWithin(t.centroid, b.center, ${dist})` });
}
const hasAdm8 = theme.sql.includes("<ADM8");
if(hasAdm8) {
geomConds.push({ sort: 0, cond: "b2.type = 'admin_8'" });
geomConds.push({ sort: 1, cond: "b2.way && t.centroid" });
geomConds.push({ sort: 3, cond: "ST_Contains(b2.way, t.centroid)" });
else {
geomConds.push({ sort: 1, cond: "b.way && t.centroid" });
geomConds.push({ sort: 2, cond: "ST_Intersects(t.centroid, b.way)" });
}
const geomCond = geomConds.sort((a, b) => a.sort - b.sort).map(g => g.cond).join(" AND ");
}
// Create geometry column
let geom = options.aspoint ? "t.centroid AS geom" : "t.way AS geom";
if(format === "csv" && theme.skipGeomCSV) { geom = ""; }
else if(format === "xlsx") {
if(theme.skipGeomCSV) { geom = ""; }
else {
geom = options.aspoint ?
"ST_X(ST_Transform(t.centroid, 4326)) AS x, ST_Y(ST_Transform(t.centroid, 4326)) AS y"
: "ST_AsText(ST_Transform(t.way, 4326)) AS wkt";
}
}
const hasAdm8 = theme.sql.includes("<ADM8");
if(hasAdm8) {
geomConds.push({ sort: 0, cond: "b2.type = 'admin_8'" });
geomConds.push({ sort: 1, cond: "b2.way && t.centroid" });
geomConds.push({ sort: 3, cond: "ST_Intersects(b2.way, t.centroid)" });
}
const geomCond = geomConds.sort((a, b) => a.sort - b.sort).map(g => g.cond).join(" AND ");
// Create SQL request
let request = theme.geomtypes.map(g => (
theme.sql
.replace(/<TABLE>/g, `${boundaryId ? 'boundary b, ' : ''}planet_osm_${g} t${hasAdm8 ? ", boundary b2" : ""}`)
.replace(/<GEOM>/g, g === "point" ? geom.replace(/t\.centroid/g, "t.way") : geom)
.replace(/<GEOMEMBED>/g, "t.way")
.replace(/<ADM8NAME>/g, "b2.name")
.replace(/<ADM8REF>/g, "b2.ref")
.replace(/<OSMID>/g, g === "point" ? "CONCAT('node/', t.osm_id)" : "CASE WHEN t.osm_id > 0 THEN CONCAT('way/', t.osm_id) ELSE CONCAT('relation/', -t.osm_id) END")
.replace(/<GEOMCOND>/g, g === "point" ? "<GEOMCONDPT>" : "<GEOMCOND>")
.replace(/SELECT\s+,/g, "SELECT ")
.replace(/,\s+FROM/g, " FROM")
)).join(" UNION ");
// Generate metadata file content
const metadataFile = baseFilePath + "_metadata.csv";
if(metadata) {
const metadataTxt = getMetadataText(theme, boundary, format, options.url);
fs.writeFileSync(metadataFile, metadataTxt);
// Create geometry column
let geom = options.aspoint ? "t.centroid AS geom" : "t.way AS geom";
if(options.format === "csv" && theme.skipGeomCSV) { geom = ""; }
else if(options.format === "xlsx") {
if(theme.skipGeomCSV) { geom = ""; }
else {
geom = options.aspoint ?
"ST_X(ST_Transform(t.centroid, 4326)) AS x, ST_Y(ST_Transform(t.centroid, 4326)) AS y"
: "ST_AsText(ST_Transform(t.way, 4326)) AS wkt";
}
}
else if(options.format === "table") {
geom = "t.centroid AS geom_center, t.way AS geom_full";
}
/*
* Export commands
* Depends of the format to use
*/
let outFile = baseFilePath + "." + FORMAT_TO_EXT[format];
const bashCommands = [];
const sql2bash = txt => txt.replace(/"/g, '\\"').replace(/\n/g, " ");
// Common conditional geometry system for GeoJSON/CSV and Shapefile as point
if(["geojson", "csv", "xlsx"].includes(format) || (format === "shapefile" && options.aspoint)) {
request = request
.replace(/<GEOMCOND>/g, geomCond)
.replace(/<GEOMCONDPT>/g, geomCond.replace(/t\.centroid/g, "t.way"));
}
// Create SQL request
let request = theme.geomtypes.map(g => (
theme.sql
.replace(/<TABLE>/g, `${boundaryId ? 'boundary b, ' : ''}planet_osm_${g} t${hasAdm8 ? ", boundary b2" : ""}`)
.replace(/<GEOM>/g, g === "point" ? geom.replace(/t\.centroid/g, "t.way") : geom)
.replace(/<GEOMEMBED>/g, "t.way")
.replace(/<ADM8NAME>/g, "b2.name")
.replace(/<ADM8REF>/g, "b2.ref")
.replace(/<OSMID>/g, g === "point" ? "CONCAT('node/', t.osm_id)" : "CASE WHEN t.osm_id > 0 THEN CONCAT('way/', t.osm_id) ELSE CONCAT('relation/', -t.osm_id) END")
.replace(/<GEOMCOND>/g, g === "point" ? "<GEOMCONDPT>" : "<GEOMCOND>")
.replace(/SELECT\s+,/g, "SELECT ")
.replace(/,\s+FROM/g, " FROM")
)).join(" UNION ");
// Common conditional geometry system for GeoJSON/CSV and Shapefile as point
if(["geojson", "csv", "xlsx", "table"].includes(options.format) || (options.format === "shapefile" && options.aspoint)) {
request = request
.replace(/<GEOMCOND>/g, geomCond)
.replace(/<GEOMCONDPT>/g, geomCond.replace(/t\.centroid/g, "t.way"));
}
else if(options.format === "shapefile" && !options.aspoint) {
request = request
.replace(/<GEOMCOND>/g, `<GEOMCONDTYPE> AND ${geomCond}`)
.replace(/<GEOMCONDPT>/g, `<GEOMCONDTYPE> AND ${geomCond.replace(/t\.centroid/g, "t.way")}`);
}
// GeoJSON
if(format === "geojson") {
bashCommands.push(`ogr2ogr -f "GeoJSON" ${outFile} PG:"host=${CONFIG.PG_HOST} user=${CONFIG.PG_USER} dbname=${CONFIG.PG_DB_CLEAN} port=${CONFIG.PG_PORT}" -sql "${sql2bash(request)}" -t_srs EPSG:4326`);
}
// CSV
else if(format === "csv") {
bashCommands.push(`ogr2ogr -f "CSV" ${outFile} PG:"host=${CONFIG.PG_HOST} user=${CONFIG.PG_USER} dbname=${CONFIG.PG_DB_CLEAN} port=${CONFIG.PG_PORT}" -sql "${sql2bash(request)}" -t_srs EPSG:4326 -lco GEOMETRY=${options.aspoint ? "AS_XY" : "AS_WKT"} -lco SEPARATOR=SEMICOLON`);
// Prepend CREATE TABLE if table format
if(options.format === "table") {
request = `CREATE TABLE ${options.tablename} AS ${request}`;
}
return request;
}
/**
* Create export files using given SQL request
* @param {Object} theme The theme metadata
* @param {Object} boundary The boundary metadata, or null for whole database export
* @param {string} request The SQL request to retrieve data
* @param {Object} [options] Options
* @param {string} [options.format] Output file format
* @param {boolean} [options.aspoint] Geometry will be converted as point, instead full geometry is used
* @param {boolean} [options.metadata] Add a metadata file (SCDL format)
* @param {string} [options.url] Original URL used for getting this dataset
* @param {string} [options.destinationFolder] Destination folder (defaults to tmp directory set in config.json)
* @return {Promise} Resolves on output file
*/
function createDataFile(theme, boundary, request, options) {
const filename = `${theme.id}${boundary ? boundary.id.toString() : "dump"}${Date.now()}_${options.format}`;
const baseFilePath = `${CONFIG.WORK_DIR}/${filename}`;
// Generate metadata file content
const metadataFile = baseFilePath + "_metadata.csv";
if(options.metadata) {
const metadataTxt = getMetadataText(theme, boundary, options.format, options.url);
fs.writeFileSync(metadataFile, metadataTxt);
}
/*
* Export commands
* Depends of the format to use
*/
let outFile = baseFilePath + "." + FORMAT_TO_EXT[options.format];
const bashCommands = [];
const sql2bash = txt => txt.replace(/"/g, '\\"').replace(/\n/g, " ");
const pgdb = `PG:"host=${CONFIG.PG_HOST} user=${CONFIG.PG_USER} dbname=${CONFIG.PG_DB_CLEAN} port=${CONFIG.PG_PORT}"`;
// GeoJSON
if(options.format === "geojson") {
bashCommands.push(`ogr2ogr -f "GeoJSON" ${outFile} ${pgdb} -sql "${sql2bash(request)}" -t_srs EPSG:4326`);
}
// CSV
else if(options.format === "csv") {
bashCommands.push(`ogr2ogr -f "CSV" ${outFile} ${pgdb} -sql "${sql2bash(request)}" -t_srs EPSG:4326 -lco GEOMETRY=${options.aspoint ? "AS_XY" : "AS_WKT"} -lco SEPARATOR=SEMICOLON`);
}
// XLS
else if(options.format === "xlsx") {
bashCommands.push(`ogr2ogr -f "XLSX" ${outFile} ${pgdb} -sql "${sql2bash(request)}"`);
}
// Shapefile
else if(options.format === "shapefile") {
bashCommands.push(`mkdir -p ${baseFilePath}`);
// Process geometry type by type
if(options.aspoint) {
const out = baseFilePath + "/data.shp";
bashCommands.push(`ogr2ogr -f "ESRI Shapefile" ${out} ${pgdb} -sql "${sql2bash(request)}" -lco ENCODING=UTF-8`);
}
// XLS
else if(format === "xlsx") {
bashCommands.push(`ogr2ogr -f "XLSX" ${outFile} PG:"host=${CONFIG.PG_HOST} user=${CONFIG.PG_USER} dbname=${CONFIG.PG_DB_CLEAN} port=${CONFIG.PG_PORT}" -sql "${sql2bash(request)}"`);
else {
theme.geomtypes.forEach(type => {
const out = baseFilePath + "/" + type + ".shp";
const typeReq = request.replace(/<GEOMCONDTYPE>/g, GEOM_TO_COND[type]);
bashCommands.push(`ogr2ogr -f "ESRI Shapefile" ${out} ${pgdb} -sql "${sql2bash(typeReq)}" -lco ENCODING=UTF-8`);
});
}
// Shapefile
else if(format === "shapefile") {
bashCommands.push(`mkdir -p ${baseFilePath}`);
// Process geometry type by type
if(options.aspoint) {
const out = baseFilePath + "/data.shp";
bashCommands.push(`ogr2ogr -f "ESRI Shapefile" ${out} PG:"host=${CONFIG.PG_HOST} user=${CONFIG.PG_USER} dbname=${CONFIG.PG_DB_CLEAN} port=${CONFIG.PG_PORT}" -sql "${sql2bash(request)}" -lco ENCODING=UTF-8`);
}
else {
theme.geomtypes.forEach(type => {
const out = baseFilePath + "/" + type + ".shp";
const typeReq = request
.replace(/<GEOMCOND>/g, GEOM_TO_COND[type] + " AND " + geomCond)
.replace(/<GEOMCONDPT>/g, GEOM_TO_COND[type] + " AND " + geomCond.replace(/t\.centroid/g, "t.way"));
bashCommands.push(`ogr2ogr -f "ESRI Shapefile" ${out} PG:"host=${CONFIG.PG_HOST} user=${CONFIG.PG_USER} dbname=${CONFIG.PG_DB_CLEAN} port=${CONFIG.PG_PORT}" -sql "${sql2bash(typeReq)}" -lco ENCODING=UTF-8`);
});
}
// Zip the whole output folder
bashCommands.push(`zip -j -r ${outFile} ${baseFilePath} && rm -rf ${baseFilePath}`);
// Zip the whole output folder
bashCommands.push(`zip -j -r ${outFile} ${baseFilePath} && rm -rf ${baseFilePath}`);
}
// Add metadata along dataset
if(options.metadata) {
const zipFile = baseFilePath + ".zip";
bashCommands.push(`zip -j ${zipFile} ${outFile} ${metadataFile}`);
bashCommands.push(`printf "@ ${outFile.replace(CONFIG.WORK_DIR + "/", "")}\n@=data.${FORMAT_TO_EXT[options.format]}\n" | zipnote -w ${zipFile}`);
bashCommands.push(`printf "@ ${metadataFile.replace(CONFIG.WORK_DIR + "/", "")}\n@=metadata.csv\n" | zipnote -w ${zipFile}`);
bashCommands.push(`rm ${outFile} ${metadataFile}`);
outFile = zipFile;
}
// Execute command on system
return exec(bashCommands.join(" && "))
.then(result => {
// Send failure message if any
if(result.stderr.length > 0) {
// Check if it's not a warning
if(result.stderr.split('\n').filter(l => l.trim().length > 0 && !l.trim().startsWith('Warning')).length > 0) {
throw new Error("Can't extract data from DB: "+result.stderr);
}
}
// Add metadata along dataset
if(metadata) {
const zipFile = baseFilePath + ".zip";
bashCommands.push(`zip -j ${zipFile} ${outFile} ${metadataFile}`);
bashCommands.push(`printf "@ ${outFile.replace(CONFIG.WORK_DIR + "/", "")}\n@=data.${FORMAT_TO_EXT[format]}\n" | zipnote -w ${zipFile}`);
bashCommands.push(`printf "@ ${metadataFile.replace(CONFIG.WORK_DIR + "/", "")}\n@=metadata.csv\n" | zipnote -w ${zipFile}`);
bashCommands.push(`rm ${outFile} ${metadataFile}`);
outFile = zipFile;
if(options.destinationFolder) {
const newFile = `${options.destinationFolder}/${theme.id}${options.metadata ? "_"+options.format+".zip" : "."+options.format}`;
fs.copyFileSync(outFile, newFile);
fs.unlinkSync(outFile);
outFile = newFile;
}
// console.log(request);
// Execute command on system
return exec(bashCommands.join(" && "))
.then(result => {
// Send failure message if any
if(result.stderr.length > 0) {
// Check if it's not a warning
if(result.stderr.split('\n').filter(l => l.trim().length > 0 && !l.trim().startsWith('Warning')).length > 0) {
throw new Error("Can't extract data from DB: "+result.stderr);
}
}
return outFile;
});
}
/**
* Full dump of database of a given theme
* @param {Object} theme The theme metadata
* @param {Object} [options] Options
* @param {string} [options.baseUrl] Base URL where files will be available at the end
* @param {string} [options.destinationFolder] Folder which will receive processed files
* @return {Promise} Resolves with output filenames
*/
exports.createDumpTheme = (theme, options) => {
return whenAvailable()
.then(() => {
// Get request for this theme
const tablename = "tmp_"+Date.now();
const request = getSQLForThemeData(theme, null, {
format: "table",
tablename: tablename
});
const cleanupDb = () => pool.query(`DROP TABLE ${tablename}`);
// Launch SQL request
return pool.query(request)
.then(() => {
// Creates files
const promises = [
createDataFile(theme, null, `SELECT *, geom_full AS geom FROM ${tablename}`, {
format: "geojson",
aspoint: false,
metadata: true,
url: `${options.baseUrl}/${theme.id}_geojson.zip`,
destinationFolder: options.destinationFolder
}),
createDataFile(theme, null, `SELECT *, geom_center AS geom FROM ${tablename}`, {
format: "csv",
aspoint: true,
metadata: true,
url: `${options.baseUrl}/${theme.id}_csv.zip`,
destinationFolder: options.destinationFolder
})
];
return Promise.all(promises)
.then(outFiles => {
return cleanupDb()
.then(() => {
return outFiles;
});
})
.catch(e => {
return cleanupDb()
.then(() => { throw e; });
});
});
});
};
/**
* Extract thematic data
* @param {Object} theme The theme metadata
* @param {Object} boundary The boundary metadata, or null for whole database export
* @param {Object} [options] Options
* @param {string} [options.format] Output file format
* @param {number} [options.radius] Radius around boundary center for data filtering
* @param {boolean} [options.aspoint] Geometry will be converted as point, instead full geometry is used
* @param {boolean} [options.metadata] Add a metadata file (SCDL format)
* @param {string} [options.url] Original URL used for getting this dataset
* @return {Promise} Resolves on output file
*/
exports.getThemeData = (theme, boundary, options) => {
return whenAvailable()
.then(() => {
const start = Date.now();
options.format = options.format || "csv";
options.metadata = options.metadata || false;
return createDataFile(theme, boundary, getSQLForThemeData(theme, boundary, options), options)
.then(outfile => {
console.log(`Request ${theme.id} on ${boundary ? boundary.name : "dump"} (${new Date().toISOString().split("T")[0]}): ${Math.round((Date.now()-start)/1000)}s`);
return outFile;
return outfile;
});
});
};
......@@ -13,26 +13,10 @@ if(!fs.existsSync(DUMP_DIR)) {
fs.mkdirSync(DUMP_DIR, { recursive: true });
}
// Dump a single theme + format
const dumpTheme = (theme, format) => {
const fileName = `${theme.id}_${format}.zip`;
return db.getThemeData(theme, null, {
format: format,
url: (CONFIG.API_BASE_URL || "http://localhost:3000") + "/dump/" + fileName,
metadata: true,
aspoint: format === "csv"
})
.then(outFile => {
// Copy + remove to support cross-device mv
fs.copyFileSync(outFile, `${DUMP_DIR}/${fileName}`);
fs.unlinkSync(outFile);
return true;
});
};
// Create dump for each theme
const nextTheme = () => {
const theme = themes.full.shift();
const start = Date.now();
if(!theme) {
console.log("Dumping all themes done");
......@@ -43,10 +27,13 @@ const nextTheme = () => {
return nextTheme();
}
dumpTheme(theme, "geojson")
db.createDumpTheme(theme, {
baseUrl: (CONFIG.API_BASE_URL || "http://localhost:3000") + "/dump",
destinationFolder: DUMP_DIR
})
.then(() => {
dumpTheme(theme, "csv")
.then(() => nextTheme());
console.log(`Dump for theme ${theme.id} took ${Math.round((Date.now()-start)/1000)}s`);
nextTheme();
});
};
......
......@@ -113,7 +113,7 @@ psql -h "${PG_HOST}" -U "${PG_USER}" -p "${PG_PORT}" -d "${PG_DB_TMP}" -c "CREAT
psql -h "${PG_HOST}" -U "${PG_USER}" -p "${PG_PORT}" -d "${PG_DB_TMP}" -c "ALTER TABLE planet_osm_line ADD COLUMN centroid GEOMETRY(Point, 3857); ALTER TABLE planet_osm_polygon ADD COLUMN centroid GEOMETRY(Point, 3857); UPDATE planet_osm_line SET centroid = ST_Centroid(way); UPDATE planet_osm_polygon SET centroid = ST_Centroid(way); CREATE INDEX planet_osm_line_centroid_idx ON planet_osm_line USING GIST(centroid); CREATE INDEX planet_osm_polygon_centroid_idx ON planet_osm_polygon USING GIST(centroid);"
# Add boundaries for API requests
psql -h "${PG_HOST}" -U "${PG_USER}" -p "${PG_PORT}" -d "${PG_DB_TMP}" -c "CREATE EXTENSION pg_trgm; CREATE EXTENSION unaccent; CREATE TABLE boundary AS SELECT osm_id, COALESCE(tags->'ref:INSEE', tags->'ref:FR:SIREN', tags->'ref:NUTS', tags->'ref') AS ref, CASE WHEN boundary = 'administrative' THEN 'admin_'||admin_level ELSE boundary END AS type, tags->'name' AS name, lower(unaccent(tags->'name')) AS simple_name, ST_SimplifyPreserveTopology(way, 10) AS way, ST_Centroid(way) AS center FROM planet_osm_polygon WHERE boundary IN ('administrative', 'local_authority', 'political') AND (admin_level IS NULL OR (admin_level::int >= 6 AND admin_level::int <= 8)); CREATE INDEX boundary_osm_id_idx ON boundary(osm_id); CREATE INDEX boundary_way_idx ON boundary USING GIST(way); CREATE INDEX boundary_center_idx ON boundary USING GIST(center); CREATE INDEX boundary_ref_trgm_idx ON boundary USING GIST(ref gist_trgm_ops); CREATE INDEX boundary_simple_name_trgm_idx ON boundary USING GIST(name gist_trgm_ops);"
psql -h "${PG_HOST}" -U "${PG_USER}" -p "${PG_PORT}" -d "${PG_DB_TMP}" -c "CREATE EXTENSION pg_trgm; CREATE EXTENSION unaccent; CREATE TABLE boundary AS SELECT osm_id, COALESCE(tags->'ref:INSEE', tags->'ref:FR:SIREN', tags->'ref:NUTS', tags->'ref') AS ref, CASE WHEN boundary = 'administrative' THEN 'admin_'||admin_level ELSE boundary END AS type, tags->'name' AS name, lower(unaccent(tags->'name')) AS simple_name, ST_SimplifyPreserveTopology(way, 10) AS way, ST_Centroid(way) AS center FROM planet_osm_polygon WHERE boundary IN ('administrative', 'local_authority', 'political') AND (admin_level IS NULL OR (admin_level::int >= 6 AND admin_level::int <= 8)); CREATE INDEX boundary_osm_id_idx ON boundary(osm_id); CREATE INDEX boundary_way_idx ON boundary USING GIST(way); CREATE INDEX boundary_center_idx ON boundary USING GIST(center); CREATE INDEX boundary_ref_trgm_idx ON boundary USING GIST(ref gist_trgm_ops); CREATE INDEX boundary_simple_name_trgm_idx ON boundary USING GIST(name gist_trgm_ops); CREATE INDEX boundary_type_idx ON boundary(type);"
# JSON boundary version for API
bounds_json="
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment