Fixed generateCacheScript as not to redownload duplicate features

This commit is contained in:
pietervdvn 2021-07-27 15:06:36 +02:00
parent 4fd233e557
commit 0564c524a2
2 changed files with 20 additions and 6 deletions

View file

@ -86,6 +86,10 @@ export default class ScriptUtils {
}) })
} }
public static erasableLog(...text) {
process.stdout.write("\r "+text.join(" ")+" \r")
}
public static sleep(ms) { public static sleep(ms) {
if (ms <= 0) { if (ms <= 0) {

View file

@ -34,7 +34,7 @@ function createOverpassObject(theme: LayoutConfig) {
if (layer.source.geojsonSource !== undefined) { if (layer.source.geojsonSource !== undefined) {
// This layer defines a geoJson-source // This layer defines a geoJson-source
// SHould it be cached? // SHould it be cached?
if (!layer.source.isOsmCacheLayer) { if (layer.source.isOsmCacheLayer !== true) {
continue; continue;
} }
} }
@ -131,7 +131,7 @@ async function downloadExtraData(theme: LayoutConfig)/* : any[] */ {
if (source === undefined) { if (source === undefined) {
continue; continue;
} }
if (layer.source.isOsmCacheLayer) { if (layer.source.isOsmCacheLayer !== undefined) {
// Cached layers are not considered here // Cached layers are not considered here
continue; continue;
} }
@ -148,7 +148,7 @@ function postProcess(targetdir: string, r: TileRange, theme: LayoutConfig, extra
for (let y = r.ystart; y <= r.yend; y++) { for (let y = r.ystart; y <= r.yend; y++) {
processed++; processed++;
const filename = rawJsonName(targetdir, x, y, r.zoomlevel) const filename = rawJsonName(targetdir, x, y, r.zoomlevel)
console.log(" Post processing", processed, "/", r.total, filename) ScriptUtils.erasableLog(" Post processing", processed, "/", r.total, filename)
if (!existsSync(filename)) { if (!existsSync(filename)) {
console.error("Not found - and not downloaded. Run this script again!: " + filename) console.error("Not found - and not downloaded. Run this script again!: " + filename)
continue; continue;
@ -234,9 +234,8 @@ function splitPerLayer(targetdir: string, r: TileRange, theme: LayoutConfig) {
}) })
const new_path = geoJsonName(targetdir + "_" + layer.id, x, y, z); const new_path = geoJsonName(targetdir + "_" + layer.id, x, y, z);
console.log(new_path, " has ", geojson.features.length, " features after filtering (dropped ", oldLength - geojson.features.length, ")") ScriptUtils.erasableLog(new_path, " has ", geojson.features.length, " features after filtering (dropped ", oldLength - geojson.features.length, ")")
if (geojson.features.length == 0) { if (geojson.features.length == 0) {
console.log("Not writing geojson file as it is empty", new_path)
continue; continue;
} }
writeFileSync(new_path, JSON.stringify(geojson, null, " ")) writeFileSync(new_path, JSON.stringify(geojson, null, " "))
@ -289,9 +288,20 @@ async function createOverview(targetdir: string, r: TileRange, z: number, layern
} }
} }
const featuresDedup = []
const seen = new Set<string>()
for (const feature of allFeatures) {
const id = feature.properties.id
if(seen.has(id)){
continue
}
seen.add(id)
featuresDedup.push(feature)
}
const geojson = { const geojson = {
"type": "FeatureCollection", "type": "FeatureCollection",
"features": allFeatures "features": featuresDedup
} }
writeFileSync(targetdir + "_" + layername + "_points.geojson", JSON.stringify(geojson, null, " ")) writeFileSync(targetdir + "_" + layername + "_points.geojson", JSON.stringify(geojson, null, " "))
} }