wrongmove/crawler/.env.sample

17 lines
1.1 KiB
Text

# Copy me to .env and source me
export ROUTING_API_KEY="<CHANGE ME>" # fetch from https://console.cloud.google.com/google/maps-apis/; prices - https://developers.google.com/maps/billing-and-pricing/pricing
# export DB_CONNECTION_STRING="mysql://wrongmove:wrongmove@localhost:3306/wrongmove" # example for mysql
export DB_CONNECTION_STRING="sqlite:///data/wrongmove.db" # by default use SQLite locally
export CELERY_BROKER_URL="redis://localhost:6379/0" # processing background tasks
export CELERY_RESULT_BACKEND="redis://localhost:6379/1"
# Periodic scraping schedules (JSON array)
# Each schedule has: name, enabled, hour, minute, day_of_week, listing_type, min/max_bedrooms, min/max_price, district_names, furnish_types
# Cron fields: minute (0-59), hour (0-23), day_of_week (0-6, 0=Sunday)
# Example:
# SCRAPE_SCHEDULES='[{"name":"Daily RENT","listing_type":"RENT","hour":"2","min_bedrooms":2,"max_bedrooms":3,"min_price":2000,"max_price":4000}]'
# Multiple schedules:
# SCRAPE_SCHEDULES='[{"name":"RENT 2am","listing_type":"RENT","hour":"2"},{"name":"BUY 4am","listing_type":"BUY","hour":"4"}]'
SCRAPE_SCHEDULES=