diff --git a/main.go b/main.go index 6de25d9..2c9201c 100644 --- a/main.go +++ b/main.go @@ -45,22 +45,17 @@ var cli struct { } `cmd:"" help:"Fetch one tile from a local or remote archive and output on stdout."` Extract struct { - Input string `arg:"" help:"Input local or remote archive."` - Output string `arg:"" help:"Output archive." type:"path"` - Bucket string `help:"Remote bucket of input archive."` - Region string `help:"local GeoJSON Polygon or MultiPolygon file for area of interest." type:"existingfile"` - Maxzoom int8 `default:-1 help:"Maximum zoom level, inclusive."` - DownloadThreads int `default:4 help:"Number of download threads."` - DryRun bool `help:"Calculate tiles to extract, but don't download them."` - Overfetch float32 `default:0.05 help:"What ratio of extra data to download to minimize # requests; 0.2 is 20%"` + Input string `arg:"" help:"Input local or remote archive."` + Output string `arg:"" help:"Output archive." type:"path"` + Bucket string `help:"Remote bucket of input archive."` + Region string `help:"local GeoJSON Polygon or MultiPolygon file for area of interest." type:"existingfile"` + Bbox string `help:"bbox area of interest" type:"string"` + Maxzoom int8 `default:-1 help:"Maximum zoom level, inclusive."` + DownloadThreads int `default:4 help:"Number of download threads."` + DryRun bool `help:"Calculate tiles to extract, but don't download them."` + Overfetch float32 `default:0.05 help:"What ratio of extra data to download to minimize # requests; 0.2 is 20%"` } `cmd:"" help:"Create an archive from a larger archive for a subset of zoom levels or geographic region."` - Makesync struct { - Input string `arg:"" type:"existingfile"` - BlockSize int `default:1000 help:"The block size, in # of tiles."` - HashFunction string `default:fnv1a help:"The hash function."` - } `cmd:"" help:"Generates an **experimental** sync control file (.pmtiles.sync) for a local archive."` - Stats struct { Input string `arg:"" type:"existingfile"` } `cmd:"" help:"Add a vector tile statistics file (.tilestats.tsv.gz) used for further analysis with DuckDB."` @@ -131,7 +126,7 @@ func main() { logger.Printf("Serving %s %s on port %d with Access-Control-Allow-Origin: %s\n", cli.Serve.Bucket, cli.Serve.Path, cli.Serve.Port, cli.Serve.Cors) logger.Fatal(http.ListenAndServe(":"+strconv.Itoa(cli.Serve.Port), nil)) case "extract ": - err := pmtiles.Extract(logger, cli.Extract.Bucket, cli.Extract.Input, cli.Extract.Maxzoom, cli.Extract.Region, cli.Extract.Output, cli.Extract.DownloadThreads, cli.Extract.Overfetch, cli.Extract.DryRun) + err := pmtiles.Extract(logger, cli.Extract.Bucket, cli.Extract.Input, cli.Extract.Maxzoom, cli.Extract.Region, cli.Extract.Bbox, cli.Extract.Output, cli.Extract.DownloadThreads, cli.Extract.Overfetch, cli.Extract.DryRun) if err != nil { logger.Fatalf("Failed to extract, %v", err) } diff --git a/pmtiles/extract.go b/pmtiles/extract.go index 476b982..56bd8e7 100644 --- a/pmtiles/extract.go +++ b/pmtiles/extract.go @@ -7,6 +7,7 @@ import ( "fmt" "github.com/RoaringBitmap/roaring/roaring64" "github.com/dustin/go-humanize" + "github.com/paulmach/orb" "github.com/schollz/progressbar/v3" "golang.org/x/sync/errgroup" "io" @@ -248,7 +249,7 @@ func MergeRanges(ranges []SrcDstRange, overfetch float32) (*list.List, uint64) { // 10. write the leaf directories (if any) // 11. Get all tiles, and write directly to the output. -func Extract(logger *log.Logger, bucketURL string, key string, maxzoom int8, region_file string, output string, download_threads int, overfetch float32, dry_run bool) error { +func Extract(logger *log.Logger, bucketURL string, key string, maxzoom int8, region_file string, bbox string, output string, download_threads int, overfetch float32, dry_run bool) error { // 1. fetch the header fmt.Println("WARNING: extract is an experimental feature and results may not be suitable for production use.") @@ -297,16 +298,29 @@ func Extract(logger *log.Logger, bucketURL string, key string, maxzoom int8, reg } var relevant_set *roaring64.Bitmap - if region_file != "" { + if region_file != "" || bbox != "" { + if region_file != "" && bbox != "" { + return fmt.Errorf("Only one of region and bbox can be specified.") + } - // 2. construct a relevance bitmap - dat, _ := ioutil.ReadFile(region_file) - multipolygon, err := UnmarshalRegion(dat) + var multipolygon orb.MultiPolygon - if err != nil { - return err + if region_file != "" { + dat, _ := ioutil.ReadFile(region_file) + multipolygon, err = UnmarshalRegion(dat) + + if err != nil { + return err + } + } else { + multipolygon, err = BboxRegion(bbox) + if err != nil { + return err + } } + // 2. construct a relevance bitmap + bound := multipolygon.Bound() boundary_set, interior_set := bitmapMultiPolygon(uint8(maxzoom), multipolygon) diff --git a/pmtiles/region.go b/pmtiles/region.go index b73ea69..2ecc8bd 100644 --- a/pmtiles/region.go +++ b/pmtiles/region.go @@ -4,8 +4,31 @@ import ( "fmt" "github.com/paulmach/orb" "github.com/paulmach/orb/geojson" + "strconv" + "strings" ) +func BboxRegion(bbox string) (orb.MultiPolygon, error) { + parts := strings.Split(bbox, ",") + min_lon, err := strconv.ParseFloat(parts[0], 64) + if err != nil { + return nil, err + } + min_lat, err := strconv.ParseFloat(parts[1], 64) + if err != nil { + return nil, err + } + max_lon, err := strconv.ParseFloat(parts[2], 64) + if err != nil { + return nil, err + } + max_lat, err := strconv.ParseFloat(parts[3], 64) + if err != nil { + return nil, err + } + return orb.MultiPolygon{{{{min_lon, max_lat}, {max_lon, max_lat}, {max_lon, min_lat}, {min_lon, min_lat}, {min_lon, max_lat}}}}, nil +} + func UnmarshalRegion(data []byte) (orb.MultiPolygon, error) { fc, err := geojson.UnmarshalFeatureCollection(data) diff --git a/pmtiles/region_test.go b/pmtiles/region_test.go index ad17466..d33ffa8 100644 --- a/pmtiles/region_test.go +++ b/pmtiles/region_test.go @@ -5,6 +5,15 @@ import ( "testing" ) +func TestBboxRegion(t *testing.T) { + result, err := BboxRegion("-1.906033,50.680367,1.097501,52.304934") + assert.Nil(t, err) + assert.Equal(t, -1.906033, result[0][0][0][0]) + assert.Equal(t, 52.304934, result[0][0][0][1]) + assert.Equal(t, 1.097501, result[0][0][2][0]) + assert.Equal(t, 50.680367, result[0][0][2][1]) +} + func TestRawPolygonRegion(t *testing.T) { result, err := UnmarshalRegion([]byte(`{ "type": "Polygon",