#!/bin/bash PROGNAME=${0##*/} set -e # When preparing a camp GPX file, we use old holidays and Overpass Turbo to # collected candidates. The problem with that is that there are bound to be # duplicates in the GPX file. There are *two* types of duplicates: # 1) camp from Overpass Turbo is already in our spreadsheets but with a # whole different set of attributes (because we strip the attributes down # to only what we need) and possibly even with a very slightly different # coordinate. # 2) camp is in our two or more of our spreadsheets. # This script reports these two issues as, respectively, "already catalogued?" # (meaning it's *probably* a duplicate) and "duplicate" (meaning it's *definitely* # a duplicate. # The fixes should be: # 1) check OSM to see if it is same camp site. If it is then delete the uncatalogued # entry from Overpass Turbo and if it isn't then retain it by giving it a # '-camp-000'-like name. (The rest of the details about the camp # will be collected later.) # 2) remove one of them. If only one of them is not starred then remove that one, # else it doesn't matter which you remove. # Configurable stuff TOLERANCE=0.004 # Deliberately quite high; tune later #TOLERANCE=0.0001035 # Other globals # Includes . $(miniade) || { echo "${0#*/}: ERROR: miniade failed (hint: run 'miniade' to see error)" >&2; exit 1; } main() { # Process arguments [ $# = 1 ] || usage MERGED_GPX=$1 # Sanity checks and derivations [[ $MERGED_GPX =~ \.gpx$ ]] || error "not a GPX file?" CSV_FILE=/tmp/$(basename $MERGED_GPX .gpx).csv # Guts info "converting merged GPX to CSV ..." gpsbabel -i gpx -f $MERGED_GPX -o unicsv -F $CSV_FILE info "loading into sqlite ..." rm -f /tmp/$PROGNAME.sqlite sqlite3 /tmp/$PROGNAME.sqlite < ] " >&2 exit 1 } main "$@"