xref: /aosp_15_r20/external/timezone-boundary-builder/index.js (revision f0df97945b4fdddd066170b120f192941b8d7fbf)
1var exec = require('child_process').exec
2var fs = require('fs')
3var path = require('path')
4
5var area = require('@mapbox/geojson-area')
6var geojsonhint = require('@mapbox/geojsonhint')
7var bbox = require('@turf/bbox').default
8var helpers = require('@turf/helpers')
9var multiPolygon = helpers.multiPolygon
10var polygon = helpers.polygon
11var asynclib = require('async')
12var https = require('follow-redirects').https
13var jsts = require('jsts')
14var rimraf = require('rimraf')
15var overpass = require('query-overpass')
16var yargs = require('yargs')
17
18const FeatureWriterStream = require('./util/featureWriterStream')
19const ProgressStats = require('./util/progressStats')
20
21var osmBoundarySources = require('./osmBoundarySources.json')
22var zoneCfg = require('./timezones.json')
23var expectedZoneOverlaps = require('./expectedZoneOverlaps.json')
24
25const argv = yargs
26  .option('downloads_dir', {
27    description: 'Set the download location',
28    default: './downloads',
29    type: 'string'
30  })
31  .option('dist_dir', {
32    description: 'Set the dist location',
33    default: './dist',
34    type: 'string'
35  })
36  .option('excluded_zones', {
37    description: 'Exclude specified zones',
38    type: 'array'
39  })
40  .option('included_zones', {
41    description: 'Include specified zones',
42    type: 'array'
43  })
44  .option('skip_analyze_diffs', {
45    description: 'Skip analysis of diffs between versions',
46    type: 'boolean'
47  })
48  .option('skip_shapefile', {
49    description: 'Skip shapefile creation',
50    type: 'boolean'
51  })
52  .option('skip_validation', {
53    description: 'Skip validation',
54    type: 'boolean'
55  })
56  .option('skip_zip', {
57    description: 'Skip zip creation',
58    type: 'boolean'
59  })
60  .help()
61  .strict()
62  .alias('help', 'h')
63  .argv
64
65// Resolve the arguments with paths so relative paths become absolute.
66const downloadsDir = path.resolve(argv.downloads_dir)
67const distDir = path.resolve(argv.dist_dir)
68
69// allow building of only a specified zones
70let includedZones = []
71let excludedZones = []
72if (argv.included_zones || argv.excluded_zones) {
73  if (argv.included_zones) {
74    const newZoneCfg = {}
75    includedZones = argv.included_zones
76    includedZones.forEach((zoneName) => {
77      newZoneCfg[zoneName] = zoneCfg[zoneName]
78    })
79    zoneCfg = newZoneCfg
80  }
81  if (argv.excluded_zones) {
82    const newZoneCfg = {}
83    excludedZones = argv.excluded_zones
84    Object.keys(zoneCfg).forEach((zoneName) => {
85      if (!excludedZones.includes(zoneName)) {
86        newZoneCfg[zoneName] = zoneCfg[zoneName]
87      }
88    })
89    zoneCfg = newZoneCfg
90  }
91
92  // filter out unneccessary downloads
93  var newOsmBoundarySources = {}
94  Object.keys(zoneCfg).forEach((zoneName) => {
95    zoneCfg[zoneName].forEach((op) => {
96      if (op.source === 'overpass') {
97        newOsmBoundarySources[op.id] = osmBoundarySources[op.id]
98      }
99    })
100  })
101
102  osmBoundarySources = newOsmBoundarySources
103}
104
105var geoJsonReader = new jsts.io.GeoJSONReader()
106var geoJsonWriter = new jsts.io.GeoJSONWriter()
107var precisionModel = new jsts.geom.PrecisionModel(1000000)
108var precisionReducer = new jsts.precision.GeometryPrecisionReducer(precisionModel)
109var distZones = {}
110var lastReleaseJSONfile
111var minRequestGap = 4
112var curRequestGap = 4
113const bufferDistance = 0.01
114
115var safeMkdir = function (dirname, callback) {
116  fs.mkdir(dirname, function (err) {
117    if (err && err.code === 'EEXIST') {
118      callback()
119    } else {
120      callback(err)
121    }
122  })
123}
124
125var debugGeo = function (
126  op,
127  a,
128  b,
129  reducePrecision,
130  bufferAfterPrecisionReduction
131) {
132  var result
133
134  if (reducePrecision) {
135    a = precisionReducer.reduce(a)
136    b = precisionReducer.reduce(b)
137  }
138
139  try {
140    switch (op) {
141      case 'union':
142        result = a.union(b)
143        break
144      case 'intersection':
145        result = a.intersection(b)
146        break
147      case 'intersects':
148        result = a.intersects(b)
149        break
150      case 'diff':
151        result = a.difference(b)
152        break
153      default:
154        var err = new Error('invalid op: ' + op)
155        throw err
156    }
157  } catch (e) {
158    if (e.name === 'TopologyException') {
159      if (reducePrecision) {
160        if (bufferAfterPrecisionReduction) {
161          console.log('Encountered TopologyException, retry with buffer increase')
162          return debugGeo(
163            op,
164            a.buffer(bufferDistance),
165            b.buffer(bufferDistance),
166            true,
167            bufferAfterPrecisionReduction
168          )
169        } else {
170          throw new Error('Encountered TopologyException after reducing precision')
171        }
172      } else {
173        console.log('Encountered TopologyException, retry with GeometryPrecisionReducer')
174        return debugGeo(op, a, b, true, bufferAfterPrecisionReduction)
175      }
176    }
177    console.log('op err')
178    console.log(e)
179    console.log(e.stack)
180    fs.writeFileSync('debug_' + op + '_a.json', JSON.stringify(geoJsonWriter.write(a)))
181    fs.writeFileSync('debug_' + op + '_b.json', JSON.stringify(geoJsonWriter.write(b)))
182    throw e
183  }
184
185  return result
186}
187
188var fetchIfNeeded = function (file, superCallback, downloadCallback, fetchFn) {
189  // check for file that got downloaded
190  fs.stat(file, function (err) {
191    if (!err) {
192      // file found, skip download steps
193      return superCallback()
194    }
195    // check for manual file that got fixed and needs validation
196    var fixedFile = file.replace('.json', '_fixed.json')
197    fs.stat(fixedFile, function (err) {
198      if (!err) {
199        // file found, return fixed file
200        return downloadCallback(null, require(fixedFile))
201      }
202      // no manual fixed file found, download from overpass
203      fetchFn()
204    })
205  })
206}
207
208var geoJsonToGeom = function (geoJson) {
209  try {
210    return geoJsonReader.read(JSON.stringify(geoJson))
211  } catch (e) {
212    console.error('error converting geojson to geometry')
213    fs.writeFileSync('debug_geojson_read_error.json', JSON.stringify(geoJson))
214    throw e
215  }
216}
217
218var geomToGeoJson = function (geom) {
219  return geoJsonWriter.write(geom)
220}
221
222var geomToGeoJsonString = function (geom) {
223  return JSON.stringify(geoJsonWriter.write(geom))
224}
225
226const downloadProgress = new ProgressStats(
227  'Downloading',
228  Object.keys(osmBoundarySources).length
229)
230
231var downloadOsmBoundary = function (boundaryId, boundaryCallback) {
232  var cfg = osmBoundarySources[boundaryId]
233  var query = '[out:json][timeout:60];('
234  if (cfg.way) {
235    query += 'way'
236  } else {
237    query += 'relation'
238  }
239  var boundaryFilename = downloadsDir + '/' + boundaryId + '.json'
240  var debug = 'getting data for ' + boundaryId
241  var queryKeys = Object.keys(cfg)
242
243  for (var i = queryKeys.length - 1; i >= 0; i--) {
244    var k = queryKeys[i]
245    if (k === 'way') continue
246    var v = cfg[k]
247
248    query += '["' + k + '"="' + v + '"]'
249  }
250
251  query += ';);out body;>;out meta qt;'
252
253  downloadProgress.beginTask(debug, true)
254
255  asynclib.auto({
256    downloadFromOverpass: function (cb) {
257      console.log('downloading from overpass')
258      fetchIfNeeded(boundaryFilename, boundaryCallback, cb, function () {
259        var overpassResponseHandler = function (err, data) {
260          if (err) {
261            console.log(err)
262            console.log('Increasing overpass request gap')
263            curRequestGap *= 2
264            makeQuery()
265          } else {
266            console.log('Success, decreasing overpass request gap')
267            curRequestGap = Math.max(minRequestGap, curRequestGap / 2)
268            cb(null, data)
269          }
270        }
271        var makeQuery = function () {
272          console.log('waiting ' + curRequestGap + ' seconds')
273          setTimeout(function () {
274            overpass(query, overpassResponseHandler, { flatProperties: true })
275          }, curRequestGap * 1000)
276        }
277        makeQuery()
278      })
279    },
280    validateOverpassResult: ['downloadFromOverpass', function (results, cb) {
281      var data = results.downloadFromOverpass
282      if (!data.features) {
283        var err = new Error('Invalid geojson for boundary: ' + boundaryId)
284        return cb(err)
285      }
286      if (data.features.length === 0) {
287        console.error('No data for the following query:')
288        console.error(query)
289        console.error('To read more about this error, please visit https://git.io/vxKQL')
290        return cb(new Error('No data found for from overpass query'))
291      }
292      cb()
293    }],
294    saveSingleMultiPolygon: ['validateOverpassResult', function (results, cb) {
295      var data = results.downloadFromOverpass
296      var combined
297
298      // union all multi-polygons / polygons into one
299      for (var i = data.features.length - 1; i >= 0; i--) {
300        var curOsmGeom = data.features[i].geometry
301        const curOsmProps = data.features[i].properties
302        if (
303          (curOsmGeom.type === 'Polygon' || curOsmGeom.type === 'MultiPolygon') &&
304          curOsmProps.type === 'boundary' // need to make sure enclaves aren't unioned
305        ) {
306          console.log('combining border')
307          let errors = geojsonhint.hint(curOsmGeom)
308          if (errors && errors.length > 0) {
309            const stringifiedGeojson = JSON.stringify(curOsmGeom, null, 2)
310            errors = geojsonhint.hint(stringifiedGeojson)
311            console.error('Invalid geojson received in Overpass Result')
312            console.error('Overpass query: ' + query)
313            const problemFilename = boundaryId + '_convert_to_geom_error.json'
314            fs.writeFileSync(problemFilename, stringifiedGeojson)
315            console.error('saved problem file to ' + problemFilename)
316            console.error('To read more about this error, please visit https://git.io/vxKQq')
317            return cb(errors)
318          }
319          try {
320            var curGeom = geoJsonToGeom(curOsmGeom)
321          } catch (e) {
322            console.error('error converting overpass result to geojson')
323            console.error(e)
324
325            fs.writeFileSync(boundaryId + '_convert_to_geom_error-all-features.json', JSON.stringify(data))
326            return cb(e)
327          }
328          if (!combined) {
329            combined = curGeom
330          } else {
331            combined = debugGeo('union', curGeom, combined)
332          }
333        }
334      }
335      try {
336        fs.writeFile(boundaryFilename, geomToGeoJsonString(combined), cb)
337      } catch (e) {
338        console.error('error writing combined border to geojson')
339        fs.writeFileSync(boundaryId + '_combined_border_convert_to_geom_error.json', JSON.stringify(data))
340        return cb(e)
341      }
342    }]
343  }, boundaryCallback)
344}
345
346var getTzDistFilename = function (tzid) {
347  return distDir + '/' + tzid.replace(/\//g, '__') + '.json'
348}
349
350/**
351 * Get the geometry of the requested source data
352 *
353 * @return {Object} geom  The geometry of the source
354 * @param {Object} source  An object representing the data source
355 *   must have `source` key and then either:
356 *     - `id` if from a file
357 *     - `id` if from a file
358 */
359var getDataSource = function (source) {
360  var geoJson
361  if (source.source === 'overpass') {
362    geoJson = require(downloadsDir + '/' + source.id + '.json')
363  } else if (source.source === 'manual-polygon') {
364    geoJson = polygon(source.data).geometry
365  } else if (source.source === 'manual-multipolygon') {
366    geoJson = multiPolygon(source.data).geometry
367  } else if (source.source === 'dist') {
368    geoJson = require(getTzDistFilename(source.id))
369  } else {
370    var err = new Error('unknown source: ' + source.source)
371    throw err
372  }
373  return geoJsonToGeom(geoJson)
374}
375
376/**
377 * Post process created timezone boundary.
378 * - remove small holes and exclaves
379 * - reduce geometry precision
380 *
381 * @param  {Geometry} geom  The jsts geometry of the timezone
382 * @param  {boolean} returnAsObject if true, return as object, otherwise return stringified
383 * @return {Object|String}         geojson as object or stringified
384 */
385var postProcessZone = function (geom, returnAsObject) {
386  // reduce precision of geometry
387  const geojson = geomToGeoJson(precisionReducer.reduce(geom))
388
389  // iterate through all polygons
390  const filteredPolygons = []
391  let allPolygons = geojson.coordinates
392  if (geojson.type === 'Polygon') {
393    allPolygons = [geojson.coordinates]
394  }
395
396  allPolygons.forEach((curPolygon, idx) => {
397    // remove any polygon with very small area
398    const polygonFeature = polygon(curPolygon)
399    const polygonArea = area.geometry(polygonFeature.geometry)
400
401    if (polygonArea < 1) return
402
403    // find all holes
404    const filteredLinearRings = []
405
406    curPolygon.forEach((curLinearRing, lrIdx) => {
407      if (lrIdx === 0) {
408        // always keep first linearRing
409        filteredLinearRings.push(curLinearRing)
410      } else {
411        const polygonFromLinearRing = polygon([curLinearRing])
412        const linearRingArea = area.geometry(polygonFromLinearRing.geometry)
413
414        // only include holes with relevant area
415        if (linearRingArea > 1) {
416          filteredLinearRings.push(curLinearRing)
417        }
418      }
419    })
420
421    filteredPolygons.push(filteredLinearRings)
422  })
423
424  // recompile to geojson string
425  const newGeojson = {
426    type: geojson.type
427  }
428
429  if (geojson.type === 'Polygon') {
430    newGeojson.coordinates = filteredPolygons[0]
431  } else {
432    newGeojson.coordinates = filteredPolygons
433  }
434
435  return returnAsObject ? newGeojson : JSON.stringify(newGeojson)
436}
437
438const buildingProgress = new ProgressStats(
439  'Building',
440  Object.keys(zoneCfg).length
441)
442
443var makeTimezoneBoundary = function (tzid, callback) {
444  buildingProgress.beginTask(`makeTimezoneBoundary for ${tzid}`, true)
445
446  var ops = zoneCfg[tzid]
447  var geom
448
449  asynclib.eachSeries(ops, function (task, cb) {
450    var taskData = getDataSource(task)
451    console.log('-', task.op, task.id)
452    if (task.op === 'init') {
453      geom = taskData
454    } else if (task.op === 'intersect') {
455      geom = debugGeo('intersection', geom, taskData)
456    } else if (task.op === 'difference') {
457      geom = debugGeo('diff', geom, taskData)
458    } else if (task.op === 'difference-reverse-order') {
459      geom = debugGeo('diff', taskData, geom)
460    } else if (task.op === 'union') {
461      geom = debugGeo('union', geom, taskData)
462    } else {
463      var err = new Error('unknown op: ' + task.op)
464      return cb(err)
465    }
466    cb()
467  },
468  function (err) {
469    if (err) { return callback(err) }
470    fs.writeFile(getTzDistFilename(tzid),
471      postProcessZone(geom),
472      callback)
473  })
474}
475
476var loadDistZonesIntoMemory = function () {
477  console.log('load zones into memory')
478  var zones = Object.keys(zoneCfg)
479  var tzid
480
481  for (var i = 0; i < zones.length; i++) {
482    tzid = zones[i]
483    distZones[tzid] = getDataSource({ source: 'dist', id: tzid })
484  }
485}
486
487var getDistZoneGeom = function (tzid) {
488  return distZones[tzid]
489}
490
491var roundDownToTenth = function (n) {
492  return Math.floor(n * 10) / 10
493}
494
495var roundUpToTenth = function (n) {
496  return Math.ceil(n * 10) / 10
497}
498
499var formatBounds = function (bounds) {
500  let boundsStr = '['
501  boundsStr += roundDownToTenth(bounds[0]) + ', '
502  boundsStr += roundDownToTenth(bounds[1]) + ', '
503  boundsStr += roundUpToTenth(bounds[2]) + ', '
504  boundsStr += roundUpToTenth(bounds[3]) + ']'
505  return boundsStr
506}
507
508var validateTimezoneBoundaries = function () {
509  const numZones = Object.keys(zoneCfg).length
510  const validationProgress = new ProgressStats(
511    'Validation',
512    numZones * (numZones + 1) / 2
513  )
514
515  console.log('do validation... this may take a few minutes')
516  var allZonesOk = true
517  var zones = Object.keys(zoneCfg)
518  var lastPct = 0
519  var compareTzid, tzid, zoneGeom
520
521  for (var i = 0; i < zones.length; i++) {
522    tzid = zones[i]
523    zoneGeom = getDistZoneGeom(tzid)
524
525    for (var j = i + 1; j < zones.length; j++) {
526      const curPct = Math.floor(validationProgress.getPercentage())
527      if (curPct % 10 === 0 && curPct !== lastPct) {
528        validationProgress.printStats('Validating zones', true)
529        lastPct = curPct
530      }
531      compareTzid = zones[j]
532
533      var compareZoneGeom = getDistZoneGeom(compareTzid)
534
535      var intersects = false
536      try {
537        intersects = debugGeo('intersects', zoneGeom, compareZoneGeom)
538      } catch (e) {
539        console.warn('warning, encountered intersection error with zone ' + tzid + ' and ' + compareTzid)
540      }
541      if (intersects) {
542        var intersectedGeom = debugGeo('intersection', zoneGeom, compareZoneGeom)
543        var intersectedArea = intersectedGeom.getArea()
544
545        if (intersectedArea > 0.0001) {
546          // check if the intersected area(s) are one of the expected areas of overlap
547          const allowedOverlapBounds = expectedZoneOverlaps[`${tzid}-${compareTzid}`] || expectedZoneOverlaps[`${compareTzid}-${tzid}`]
548          const overlapsGeoJson = geoJsonWriter.write(intersectedGeom)
549
550          // these zones are allowed to overlap in certain places, make sure the
551          // found overlap(s) all fit within the expected areas of overlap
552          if (allowedOverlapBounds) {
553            // if the overlaps are a multipolygon, make sure each individual
554            // polygon of overlap fits within at least one of the expected
555            // overlaps
556            let overlapsPolygons
557            switch (overlapsGeoJson.type) {
558              case 'MultiPolygon':
559                overlapsPolygons = overlapsGeoJson.coordinates.map(
560                  polygonCoords => ({
561                    coordinates: polygonCoords,
562                    type: 'Polygon'
563                  })
564                )
565                break
566              case 'Polygon':
567                overlapsPolygons = [overlapsGeoJson]
568                break
569              case 'GeometryCollection':
570                overlapsPolygons = []
571                overlapsGeoJson.geometries.forEach(geom => {
572                  if (geom.type === 'Polygon') {
573                    overlapsPolygons.push(geom)
574                  } else if (geom.type === 'MultiPolygon') {
575                    geom.coordinates.forEach(polygonCoords => {
576                      overlapsPolygons.push({
577                        coordinates: polygonCoords,
578                        type: 'Polygon'
579                      })
580                    })
581                  }
582                })
583                break
584              default:
585                console.error('unexpected geojson overlap type')
586                console.log(overlapsGeoJson)
587                break
588            }
589
590            let allOverlapsOk = true
591            overlapsPolygons.forEach((polygon, idx) => {
592              const bounds = bbox(polygon)
593              const polygonArea = area.geometry(polygon)
594              if (
595                polygonArea > 10 && // ignore small polygons
596                !allowedOverlapBounds.some(allowedBounds =>
597                  allowedBounds.bounds[0] <= bounds[0] && // minX
598                    allowedBounds.bounds[1] <= bounds[1] && // minY
599                    allowedBounds.bounds[2] >= bounds[2] && // maxX
600                    allowedBounds.bounds[3] >= bounds[3] // maxY
601                )
602              ) {
603                console.error(`Unexpected intersection (${polygonArea} area) with bounds: ${formatBounds(bounds)}`)
604                allOverlapsOk = false
605              }
606            })
607
608            if (allOverlapsOk) continue
609          }
610
611          // at least one unexpected overlap found, output an error and write debug file
612          console.error('Validation error: ' + tzid + ' intersects ' + compareTzid + ' area: ' + intersectedArea)
613          const debugFilename = tzid.replace(/\//g, '-') + '-' + compareTzid.replace(/\//g, '-') + '-overlap.json'
614          fs.writeFileSync(
615            debugFilename,
616            JSON.stringify(overlapsGeoJson)
617          )
618          console.error('wrote overlap area as file ' + debugFilename)
619          console.error('To read more about this error, please visit https://git.io/vx6nx')
620          allZonesOk = false
621        }
622      }
623      validationProgress.logNext()
624    }
625  }
626
627  return allZonesOk ? null : 'Zone validation unsuccessful'
628}
629
630let oceanZoneBoundaries
631let oceanZones = [
632  { tzid: 'Etc/GMT-12', left: 172.5, right: 180 },
633  { tzid: 'Etc/GMT-11', left: 157.5, right: 172.5 },
634  { tzid: 'Etc/GMT-10', left: 142.5, right: 157.5 },
635  { tzid: 'Etc/GMT-9', left: 127.5, right: 142.5 },
636  { tzid: 'Etc/GMT-8', left: 112.5, right: 127.5 },
637  { tzid: 'Etc/GMT-7', left: 97.5, right: 112.5 },
638  { tzid: 'Etc/GMT-6', left: 82.5, right: 97.5 },
639  { tzid: 'Etc/GMT-5', left: 67.5, right: 82.5 },
640  { tzid: 'Etc/GMT-4', left: 52.5, right: 67.5 },
641  { tzid: 'Etc/GMT-3', left: 37.5, right: 52.5 },
642  { tzid: 'Etc/GMT-2', left: 22.5, right: 37.5 },
643  { tzid: 'Etc/GMT-1', left: 7.5, right: 22.5 },
644  { tzid: 'Etc/GMT', left: -7.5, right: 7.5 },
645  { tzid: 'Etc/GMT+1', left: -22.5, right: -7.5 },
646  { tzid: 'Etc/GMT+2', left: -37.5, right: -22.5 },
647  { tzid: 'Etc/GMT+3', left: -52.5, right: -37.5 },
648  { tzid: 'Etc/GMT+4', left: -67.5, right: -52.5 },
649  { tzid: 'Etc/GMT+5', left: -82.5, right: -67.5 },
650  { tzid: 'Etc/GMT+6', left: -97.5, right: -82.5 },
651  { tzid: 'Etc/GMT+7', left: -112.5, right: -97.5 },
652  { tzid: 'Etc/GMT+8', left: -127.5, right: -112.5 },
653  { tzid: 'Etc/GMT+9', left: -142.5, right: -127.5 },
654  { tzid: 'Etc/GMT+10', left: -157.5, right: -142.5 },
655  { tzid: 'Etc/GMT+11', left: -172.5, right: -157.5 },
656  { tzid: 'Etc/GMT+12', left: -180, right: -172.5 }
657]
658
659if (includedZones.length > 0) {
660  oceanZones = oceanZones.filter(oceanZone => includedZones.indexOf(oceanZone) > -1)
661}
662if (excludedZones.length > 0) {
663  oceanZones = oceanZones.filter(oceanZone => excludedZones.indexOf(oceanZone) === -1)
664}
665
666var addOceans = function (callback) {
667  console.log('adding ocean boundaries')
668  const zones = Object.keys(zoneCfg)
669
670  const oceanProgress = new ProgressStats(
671    'Oceans',
672    oceanZones.length
673  )
674
675  oceanZoneBoundaries = oceanZones.map(zone => {
676    oceanProgress.beginTask(zone.tzid, true)
677    const geoJson = polygon([[
678      [zone.left, 90],
679      [zone.left, -90],
680      [zone.right, -90],
681      [zone.right, 90],
682      [zone.left, 90]
683    ]]).geometry
684
685    let geom = geoJsonToGeom(geoJson)
686
687    // diff against every zone
688    zones.forEach(distZone => {
689      geom = debugGeo('diff', geom, getDistZoneGeom(distZone))
690    })
691
692    return {
693      geom: postProcessZone(geom, true),
694      tzid: zone.tzid
695    }
696  })
697
698  callback()
699}
700
701var combineAndWriteZones = function (callback) {
702  const regularWriter = new FeatureWriterStream(distDir + '/combined.json')
703  const oceanWriter = new FeatureWriterStream(distDir + '/combined-with-oceans.json')
704  var zones = Object.keys(zoneCfg)
705
706  zones.forEach(zoneName => {
707    const feature = {
708      type: 'Feature',
709      properties: { tzid: zoneName },
710      geometry: geomToGeoJson(getDistZoneGeom(zoneName))
711    }
712    const stringified = JSON.stringify(feature)
713    regularWriter.add(stringified)
714    oceanWriter.add(stringified)
715  })
716  oceanZoneBoundaries.forEach(boundary => {
717    var feature = {
718      type: 'Feature',
719      properties: { tzid: boundary.tzid },
720      geometry: boundary.geom
721    }
722    oceanWriter.add(JSON.stringify(feature))
723  })
724  asynclib.parallel([
725    cb => regularWriter.end(cb),
726    cb => oceanWriter.end(cb)
727  ], callback)
728}
729
730var downloadLastRelease = function (cb) {
731  // download latest release info
732  https.get(
733    {
734      headers: { 'user-agent': 'timezone-boundary-builder' },
735      host: 'api.github.com',
736      path: '/repos/evansiroky/timezone-boundary-builder/releases/latest'
737    },
738    function (res) {
739      var data = ''
740      res.on('data', function (chunk) {
741        data += chunk
742      })
743      res.on('end', function () {
744        data = JSON.parse(data)
745        // determine last release version name and download link
746        const lastReleaseName = data.name
747        lastReleaseJSONfile = `./dist/${lastReleaseName}.json`
748        let lastReleaseDownloadUrl
749        for (var i = 0; i < data.assets.length; i++) {
750          if (data.assets[i].browser_download_url.indexOf('timezones-with-oceans.geojson') > -1) {
751            lastReleaseDownloadUrl = data.assets[i].browser_download_url
752          }
753        }
754        if (!lastReleaseDownloadUrl) {
755          return cb(new Error('geojson not found'))
756        }
757
758        // check for file that got downloaded
759        fs.stat(lastReleaseJSONfile, function (err) {
760          if (!err) {
761            // file found, skip download steps
762            return cb()
763          }
764          // file not found, download
765          console.log(`Downloading latest release to ${lastReleaseJSONfile}.zip`)
766          https.get({
767            headers: { 'user-agent': 'timezone-boundary-builder' },
768            host: 'github.com',
769            path: lastReleaseDownloadUrl.replace('https://github.com', '')
770          }, function (response) {
771            var file = fs.createWriteStream(`${lastReleaseJSONfile}.zip`)
772            response.pipe(file)
773            file.on('finish', function () {
774              file.close((err) => {
775                if (err) return cb(err)
776                // unzip file
777                console.log('unzipping latest release')
778                exec(
779                  `unzip -o ${lastReleaseJSONfile} -d dist`,
780                  err => {
781                    if (err) { return cb(err) }
782                    console.log('unzipped file')
783                    console.log('moving unzipped file')
784                    // might need to change this after changes to how files are
785                    // zipped after 2020a
786                    fs.copyFile(
787                      path.join(
788                        'dist',
789                        'dist',
790                        'combined-with-oceans.json'
791                      ),
792                      lastReleaseJSONfile,
793                      cb
794                    )
795                  }
796                )
797              })
798            })
799          }).on('error', cb)
800        })
801      })
802    }
803  )
804}
805
806var analyzeChangesFromLastRelease = function (cb) {
807  // load last release data into memory
808  console.log('loading previous release into memory')
809  const lastReleaseData = require(lastReleaseJSONfile)
810
811  // load each feature's geojson into JSTS format and then organized by tzid
812  const lastReleaseZones = {}
813  lastReleaseData.features.forEach(
814    feature => {
815      lastReleaseZones[feature.properties.tzid] = feature
816    }
817  )
818
819  // generate set of keys from last release and current
820  const zoneNames = new Set()
821  Object.keys(distZones).forEach(zoneName => zoneNames.add(zoneName))
822  Object.keys(lastReleaseZones).forEach(zoneName => zoneNames.add(zoneName))
823
824  // create diff for each zone
825  const analysisProgress = new ProgressStats(
826    'Analyzing diffs',
827    zoneNames.size
828  )
829  const additionsWriter = new FeatureWriterStream(distDir + '/additions.json')
830  const removalsWriter = new FeatureWriterStream(distDir + '/removals.json')
831  zoneNames.forEach(zoneName => {
832    analysisProgress.beginTask(zoneName, true)
833    if (distZones[zoneName] && lastReleaseZones[zoneName]) {
834      // some zones take forever to diff unless they are buffered, so buffer by
835      // just a small amount
836      const lastReleaseGeom = geoJsonToGeom(
837        lastReleaseZones[zoneName].geometry
838      ).buffer(bufferDistance)
839      const curDataGeom = getDistZoneGeom(zoneName).buffer(bufferDistance)
840
841      // don't diff equal geometries
842      if (curDataGeom.equals(lastReleaseGeom)) return
843
844      // diff current - last = additions
845      const addition = debugGeo(
846        'diff',
847        curDataGeom,
848        lastReleaseGeom,
849        false,
850        true
851      )
852      if (addition.getArea() > 0.0001) {
853        additionsWriter.add(JSON.stringify({
854          type: 'Feature',
855          properties: { tzid: zoneName },
856          geometry: geomToGeoJson(addition)
857        }))
858      }
859
860      // diff last - current = removals
861      const removal = debugGeo(
862        'diff',
863        lastReleaseGeom,
864        curDataGeom,
865        false,
866        true
867      )
868      if (removal.getArea() > 0.0001) {
869        removalsWriter.add(JSON.stringify({
870          type: 'Feature',
871          properties: { tzid: zoneName },
872          geometry: geomToGeoJson(removal)
873        }))
874      }
875    } else if (distZones[zoneName]) {
876      additionsWriter.add(JSON.stringify({
877        type: 'Feature',
878        properties: { tzid: zoneName },
879        geometry: geomToGeoJson(getDistZoneGeom(zoneName))
880      }))
881    } else {
882      removalsWriter.add(JSON.stringify(lastReleaseZones[zoneName]))
883    }
884  })
885
886  // write files
887  asynclib.parallel([
888    wcb => additionsWriter.end(wcb),
889    wcb => removalsWriter.end(wcb)
890  ], cb)
891}
892
893const autoScript = {
894  makeDownloadsDir: function (cb) {
895    overallProgress.beginTask('Creating downloads dir')
896    safeMkdir(downloadsDir, cb)
897  },
898  makeDistDir: function (cb) {
899    overallProgress.beginTask('Creating dist dir')
900    safeMkdir(distDir, cb)
901  },
902  getOsmBoundaries: ['makeDownloadsDir', function (results, cb) {
903    overallProgress.beginTask('Downloading osm boundaries')
904    asynclib.eachSeries(Object.keys(osmBoundarySources), downloadOsmBoundary, cb)
905  }],
906  cleanDownloadFolder: ['makeDistDir', 'getOsmBoundaries', function (results, cb) {
907    overallProgress.beginTask('cleanDownloadFolder')
908    const downloadedFilenames = Object.keys(osmBoundarySources).map(name => `${name}.json`)
909    fs.readdir(downloadsDir, (err, files) => {
910      if (err) return cb(err)
911      asynclib.each(
912        files,
913        (file, fileCb) => {
914          if (downloadedFilenames.indexOf(file) === -1) {
915            return fs.unlink(path.join(downloadsDir, file), fileCb)
916          }
917          fileCb()
918        },
919        cb
920      )
921    })
922  }],
923  zipInputData: ['cleanDownloadFolder', function (results, cb) {
924    overallProgress.beginTask('Zipping up input data')
925    exec('zip -j ' + distDir + '/input-data.zip ' + downloadsDir +
926         '/* timezones.json osmBoundarySources.json expectedZoneOverlaps.json', cb)
927  }],
928  downloadLastRelease: ['makeDistDir', function (results, cb) {
929    if (argv.skip_analyze_diffs) {
930      overallProgress.beginTask('WARNING: Skipping download of last release for analysis!')
931      cb()
932    } else {
933      overallProgress.beginTask('Downloading last release for analysis')
934      downloadLastRelease(cb)
935    }
936  }],
937  createZones: ['makeDistDir', 'getOsmBoundaries', function (results, cb) {
938    overallProgress.beginTask('Creating timezone boundaries')
939    asynclib.each(Object.keys(zoneCfg), makeTimezoneBoundary, cb)
940  }],
941  validateZones: ['createZones', function (results, cb) {
942    overallProgress.beginTask('Validating timezone boundaries')
943    loadDistZonesIntoMemory()
944    if (argv.skip_validation) {
945      console.warn('WARNING: Skipping validation!')
946      cb()
947    } else {
948      cb(validateTimezoneBoundaries())
949    }
950  }],
951  addOceans: ['validateZones', function (results, cb) {
952    overallProgress.beginTask('Adding oceans')
953    addOceans(cb)
954  }],
955  mergeZones: ['addOceans', function (results, cb) {
956    overallProgress.beginTask('Merging zones')
957    combineAndWriteZones(cb)
958  }],
959  zipGeoJson: ['mergeZones', function (results, cb) {
960    if (argv.skip_zip) {
961      overallProgress.beginTask('Skipping zip')
962      return cb()
963    }
964    overallProgress.beginTask('Zipping geojson')
965    const zipFile = distDir + '/timezones.geojson.zip'
966    const jsonFile = distDir + '/combined.json'
967    exec('zip -j ' + zipFile + ' ' + jsonFile, cb)
968  }],
969  zipGeoJsonWithOceans: ['mergeZones', function (results, cb) {
970    if (argv.skip_zip) {
971      overallProgress.beginTask('Skipping with oceans zip')
972      return cb()
973    }
974    overallProgress.beginTask('Zipping geojson with oceans')
975    const zipFile = distDir + '/timezones-with-oceans.geojson.zip'
976    const jsonFile = distDir + '/combined-with-oceans.json'
977    exec('zip -j ' + zipFile + ' ' + jsonFile, cb)
978  }],
979  makeShapefile: ['mergeZones', function (results, cb) {
980    if (argv.skip_shapefile) {
981      overallProgress.beginTask('Skipping shapefile creation')
982      return cb()
983    }
984    overallProgress.beginTask('Converting from geojson to shapefile')
985    const shapeFileGlob = distDir + '/combined-shapefile.*'
986    rimraf.sync(shapeFileGlob)
987    const shapeFile = distDir + '/combined-shapefile.shp'
988    const jsonFile = distDir + '/combined.json'
989    exec(
990      'ogr2ogr -f "ESRI Shapefile" ' + shapeFile + ' ' + jsonFile,
991      function (err, stdout, stderr) {
992        if (err) { return cb(err) }
993        const shapeFileZip = distDir + '/timezones.shapefile.zip'
994        exec('zip -j ' + shapeFileZip + ' ' + shapeFileGlob, cb)
995      }
996    )
997  }],
998  makeShapefileWithOceans: ['mergeZones', function (results, cb) {
999    if (argv.skip_shapefile) {
1000      overallProgress.beginTask('Skipping with oceans shapefile creation')
1001      return cb()
1002    }
1003    overallProgress.beginTask('Converting from geojson with oceans to shapefile')
1004    const shapeFileGlob = distDir + '/combined-shapefile-with-oceans.*'
1005    rimraf.sync(shapeFileGlob)
1006    const shapeFile = distDir + '/combined-shapefile-with-oceans.shp'
1007    const jsonFile = distDir + '/combined-with-oceans.json'
1008    exec(
1009      'ogr2ogr -f "ESRI Shapefile" ' + shapeFile + ' ' + jsonFile,
1010      function (err, stdout, stderr) {
1011        if (err) { return cb(err) }
1012        const shapeFileZip = distDir + '/timezones-with-oceans.shapefile.zip'
1013        exec('zip -j ' + shapeFileZip + ' ' + shapeFileGlob, cb)
1014      }
1015    )
1016  }],
1017  makeListOfTimeZoneNames: function (cb) {
1018    overallProgress.beginTask('Writing timezone names to file')
1019    let zoneNames = Object.keys(zoneCfg)
1020    oceanZones.forEach(oceanZone => {
1021      zoneNames.push(oceanZone.tzid)
1022    })
1023    if (includedZones.length > 0) {
1024      zoneNames = zoneNames.filter(zoneName => includedZones.indexOf(zoneName) > -1)
1025    }
1026    if (excludedZones.length > 0) {
1027      zoneNames = zoneNames.filter(zoneName => excludedZones.indexOf(zoneName) === -1)
1028    }
1029    fs.writeFile(
1030      distDir + '/timezone-names.json',
1031      JSON.stringify(zoneNames),
1032      cb
1033    )
1034  },
1035  analyzeChangesFromLastRelease: ['downloadLastRelease', 'mergeZones', function (results, cb) {
1036    if (argv.skip_analyze_diffs) {
1037      overallProgress.beginTask('WARNING: Skipping analysis of changes from last release!')
1038      cb()
1039    } else {
1040      overallProgress.beginTask('Analyzing changes from last release')
1041      analyzeChangesFromLastRelease(cb)
1042    }
1043  }]
1044}
1045
1046const overallProgress = new ProgressStats('Overall', Object.keys(autoScript).length)
1047
1048asynclib.auto(autoScript, function (err, results) {
1049  console.log('done')
1050  if (err) {
1051    console.log('error!', err)
1052  }
1053})
1054