From 0801aaf20002a51b511b927594a682a8337d4275 Mon Sep 17 00:00:00 2001 From: Kadir Date: Sun, 14 Sep 2025 19:44:03 +0100 Subject: [PATCH] More ruff fixes (#2) * adding ruff auto check for pull requests as well as fixing all ruff errors * More ruff fixes: forgot half of the ruff checks Forgot to do a git add all :D --------- Co-authored-by: Kadir --- crawler/5_routing.py | 2 +- crawler/alembic/versions/6363b18a22ca_initial.py | 4 ++-- crawler/api/app.py | 2 +- crawler/data_access.py | 6 ------ crawler/frontend/src/components/Map.tsx | 8 ++++---- crawler/listing_processor.py | 6 +++--- crawler/main.py | 1 - crawler/models/listing.py | 2 +- crawler/proof_of_concept/listings.py | 1 - crawler/proof_of_concept/routing_routing.py | 2 +- crawler/pyproject.toml | 7 +++++++ crawler/rec/rightmove_parser.py | 8 -------- crawler/repositories/listing_repository.py | 4 ++-- crawler/ui_exporter.py | 2 +- 14 files changed, 23 insertions(+), 32 deletions(-) delete mode 100644 crawler/rec/rightmove_parser.py diff --git a/crawler/5_routing.py b/crawler/5_routing.py index 455ccd5..090316b 100644 --- a/crawler/5_routing.py +++ b/crawler/5_routing.py @@ -36,7 +36,7 @@ async def update_routing_info( routes_data = routing.transit_route( listing.latitude, - listing.longtitude, + listing.longitude, destination_mode.destination_address, destination_mode.travel_mode, ) diff --git a/crawler/alembic/versions/6363b18a22ca_initial.py b/crawler/alembic/versions/6363b18a22ca_initial.py index 22b52c9..92e8871 100644 --- a/crawler/alembic/versions/6363b18a22ca_initial.py +++ b/crawler/alembic/versions/6363b18a22ca_initial.py @@ -29,7 +29,7 @@ def upgrade() -> None: sa.Column('square_meters', sa.Float(), nullable=True), sa.Column('agency', sqlmodel.sql.sqltypes.AutoString(), nullable=True), sa.Column('council_tax_band', sqlmodel.sql.sqltypes.AutoString(), nullable=True), - sa.Column('longtitude', sa.Float(), nullable=False), + sa.Column('longitude', sa.Float(), nullable=False), sa.Column('latitude', sa.Float(), nullable=False), sa.Column('price_history_json', sa.TEXT(), nullable=False), sa.Column('listing_site', sa.Enum('RIGHTMOVE', name='listingsite'), nullable=False), @@ -49,7 +49,7 @@ def upgrade() -> None: sa.Column('square_meters', sa.Float(), nullable=True), sa.Column('agency', sqlmodel.sql.sqltypes.AutoString(), nullable=True), sa.Column('council_tax_band', sqlmodel.sql.sqltypes.AutoString(), nullable=True), - sa.Column('longtitude', sa.Float(), nullable=False), + sa.Column('longitude', sa.Float(), nullable=False), sa.Column('latitude', sa.Float(), nullable=False), sa.Column('price_history_json', sa.TEXT(), nullable=False), sa.Column('listing_site', sa.Enum('RIGHTMOVE', name='listingsite'), nullable=False), diff --git a/crawler/api/app.py b/crawler/api/app.py index 3be40c4..2100eff 100644 --- a/crawler/api/app.py +++ b/crawler/api/app.py @@ -101,7 +101,7 @@ async def get_task_status( task_result = listing_tasks.dump_listings_task.AsyncResult(task_id) try: result = json.dumps(task_result.result) - except: + except Exception: result = str(task_result.result) return { diff --git a/crawler/data_access.py b/crawler/data_access.py index db30b0c..adcbc51 100644 --- a/crawler/data_access.py +++ b/crawler/data_access.py @@ -399,13 +399,7 @@ class Listing: for item in data ] - @property - def longtitude(self) -> float: - return self.detailobject["property"]["longitude"] - @property - def latitude(self) -> float: - return self.detailobject["property"]["latitude"] @property def listing_site(self) -> ListingSite: diff --git a/crawler/frontend/src/components/Map.tsx b/crawler/frontend/src/components/Map.tsx index 7471f97..0b1337c 100644 --- a/crawler/frontend/src/components/Map.tsx +++ b/crawler/frontend/src/components/Map.tsx @@ -195,18 +195,18 @@ export function Map( .call(xAxis); } - function openListingsDialog(longtitude: number, latitude: number) { + function openListingsDialog(longitude: number, latitude: number) { const searchBuffer = 0.001 // ~100m const properties = heatmap._tree.search({ - minX: longtitude - searchBuffer, - maxX: longtitude + searchBuffer, + minX: longitude - searchBuffer, + maxX: longitude + searchBuffer, minY: latitude - searchBuffer, maxY: latitude + searchBuffer }) if (properties.length > 0) { const listingDialogPopup = getListingDialog(properties); new mapboxgl.Popup() - .setLngLat([longtitude, latitude]) + .setLngLat([longitude, latitude]) .setHTML(renderToString(listingDialogPopup)) .setMaxWidth("500px") .addTo(mapRef.current); diff --git a/crawler/listing_processor.py b/crawler/listing_processor.py index 0ae7f26..020016f 100644 --- a/crawler/listing_processor.py +++ b/crawler/listing_processor.py @@ -113,7 +113,7 @@ class FetchListingDetailsStep(Step): council_tax_band=listing_details["property"]["councilTaxInfo"]["content"][ 0 ]["value"], - longtitude=listing_details["property"]["longitude"], + longitude=listing_details["property"]["longitude"], latitude=listing_details["property"]["latitude"], price_history_json="{}", # TODO: should upsert from existing listing_site=ListingSite.RIGHTMOVE, @@ -153,8 +153,8 @@ class FetchImagesStep(Step): "floorplans", [] ) client_timeout = aiohttp.ClientTimeout(total=30) - for floorplan in all_floorplans: - url = floorplan["url"] + for floorplan_obj in all_floorplans: + url = floorplan_obj["url"] picname = url.split("/")[-1] floorplan_path = Path(base_path, str(listing.id), "floorplans", picname) if floorplan_path.exists(): diff --git a/crawler/main.py b/crawler/main.py index 6c05f4e..a7acbf9 100644 --- a/crawler/main.py +++ b/crawler/main.py @@ -185,7 +185,6 @@ def dump_images(ctx: click.core.Context): @cli.command() @click.pass_context def detect_floorplan(ctx: click.core.Context): - data_dir = ctx.obj["data_dir"] click.echo(f"Running detect_floorplan for listings stored in {engine.url}") repository = ListingRepository(engine=engine) asyncio.run(detect_floorplan_module.detect_floorplan(repository)) diff --git a/crawler/models/listing.py b/crawler/models/listing.py index 024bd8b..cd0584b 100644 --- a/crawler/models/listing.py +++ b/crawler/models/listing.py @@ -59,7 +59,7 @@ class Listing(SQLModel, table=False): square_meters: float | None = Field(default=None, nullable=True, index=True) agency: str | None = Field(default=None, nullable=True) council_tax_band: str | None = Field(default=None, nullable=True) - longtitude: float = Field(nullable=False) + longitude: float = Field(nullable=False) latitude: float = Field(nullable=False) # price_history: List[Dict[str, Any]] = Field(default_factory=list, sa_type=JSON) price_history_json: str = Field(sa_type=TEXT) diff --git a/crawler/proof_of_concept/listings.py b/crawler/proof_of_concept/listings.py index ab36150..eb5b807 100644 --- a/crawler/proof_of_concept/listings.py +++ b/crawler/proof_of_concept/listings.py @@ -33,7 +33,6 @@ response = requests.get( verify=False, ) -import requests headers = { "Host": "api.rightmove.co.uk", diff --git a/crawler/proof_of_concept/routing_routing.py b/crawler/proof_of_concept/routing_routing.py index 6f043c5..d0990d7 100644 --- a/crawler/proof_of_concept/routing_routing.py +++ b/crawler/proof_of_concept/routing_routing.py @@ -61,7 +61,7 @@ def extract_time(d): distance_per_transit[step["travelMode"]] += step.get("distanceMeters", 0) print( - f"dis {distance}, dur {duration}, duration per transit {dict(duration_per_transit)}, distance per transit {dict(distance_per_transit)}" + f"dis {distance}, dur {duration}, duration per transit {dict(duration_per_transit)}, distance per transit {dict(distance_per_transit)}, duration_static {duration_static}" ) diff --git a/crawler/pyproject.toml b/crawler/pyproject.toml index 79fd5a7..b5b3932 100644 --- a/crawler/pyproject.toml +++ b/crawler/pyproject.toml @@ -46,3 +46,10 @@ podman-compose = "^1.5.0" [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" + +[tool.ruff] +# List of rules (error codes) to ignore +lint.ignore = [ + "E741", # Ambigious name +] +exclude = ["*.ipynb"] \ No newline at end of file diff --git a/crawler/rec/rightmove_parser.py b/crawler/rec/rightmove_parser.py deleted file mode 100644 index 350af98..0000000 --- a/crawler/rec/rightmove_parser.py +++ /dev/null @@ -1,8 +0,0 @@ -def parse_listing_json_entry(d): - id = d["identifier"] - # address = d['address'] - propertyType = d["propertyType"] - price = d["price"] - latitude = d["latitude"] - longitude = d["longitude"] - updated_date = d["updateDate"] diff --git a/crawler/repositories/listing_repository.py b/crawler/repositories/listing_repository.py index fa66597..c2bcaf8 100644 --- a/crawler/repositories/listing_repository.py +++ b/crawler/repositories/listing_repository.py @@ -160,7 +160,7 @@ class ListingRepository: square_meters=await listing.sqm_ocr(), agency=listing.agency, council_tax_band=listing.councilTaxBand, - longtitude=listing.longtitude, + longitude=listing.longitude, latitude=listing.latitude, price_history_json=modelListing.serialize_price_history( listing.priceHistory @@ -180,7 +180,7 @@ class ListingRepository: square_meters=await listing.sqm_ocr(), agency=listing.agency, council_tax_band=listing.councilTaxBand, - longtitude=listing.longtitude, + longitude=listing.longitude, latitude=listing.latitude, price_history_json=modelListing.serialize_price_history( listing.priceHistory diff --git a/crawler/ui_exporter.py b/crawler/ui_exporter.py index de762cf..d85b5a7 100644 --- a/crawler/ui_exporter.py +++ b/crawler/ui_exporter.py @@ -46,7 +46,7 @@ async def export_immoweb( }, "geometry": { "coordinates": [ - listing.longtitude, + listing.longitude, listing.latitude, ], "type": "Point",