diff --git a/crawler/5_routing.py b/crawler/5_routing.py index 81b58df..1ce61df 100644 --- a/crawler/5_routing.py +++ b/crawler/5_routing.py @@ -1,14 +1,15 @@ +import pathlib from data_access import Listing from tqdm import tqdm from geopy.distance import geodesic import logging -def calculate_route(): +def calculate_route(listing_paths: list[str]): log = logging.getLogger(__name__) log.setLevel(logging.INFO) - listings = Listing.get_all_listings() + listings = Listing.get_all_listings(listing_paths) BROCK_STREET_LAT_LONG = 51.52570434674584, -0.13956495005056113 # reduce listings to everything within 7 miles @@ -49,7 +50,8 @@ def calculate_route(): def main(): - calculate_route() + listing_paths = sorted(list(pathlib.Path("data/rs").glob("*/listing.json"))) + calculate_route(listing_paths) if __name__ == "__main__": diff --git a/crawler/main.py b/crawler/main.py index 6fe413c..cc2e61d 100644 --- a/crawler/main.py +++ b/crawler/main.py @@ -84,9 +84,12 @@ def detect_floorplan(ctx: click.core.Context): @cli.command() -def routing(): - click.echo('Running routing') - routing_module.calculate_route() +@click.pass_context +def routing(ctx: click.core.Context): + data_dir = ctx.obj['data_dir'] + click.echo(f'Running routing for listings in {data_dir}') + listing_paths = sorted(list(pathlib.Path(data_dir).glob("*/listing.json"))) + routing_module.calculate_route(listing_paths) if __name__ == '__main__':