fix session expired and try variant timing
This commit is contained in:
parent
867c6f3152
commit
b4e84e430e
6 changed files with 211 additions and 9 deletions
48
main.py
48
main.py
|
|
@ -12,6 +12,7 @@ from pathlib import Path
|
|||
from autoclean_debug import autoclean_debug_material
|
||||
from datetime import datetime, timezone
|
||||
import time
|
||||
import random
|
||||
|
||||
|
||||
# --- Environment & Logging Setup ---
|
||||
|
|
@ -46,7 +47,15 @@ logger = logging.getLogger(__name__) # Use named logger
|
|||
logger.info("Bot starting | Logs: data/monitor.log + console")
|
||||
|
||||
# Interval (seconds) between checks for new listings
|
||||
CHECK_INTERVAL = int(os.getenv("CHECK_INTERVAL", 300)) # Default: 300 seconds
|
||||
CHECK_INTERVAL = int(os.getenv("CHECK_INTERVAL", 150)) # Default: 150 seconds (2.5 min)
|
||||
CHECK_INTERVAL_VARIANCE = int(os.getenv("CHECK_INTERVAL_VARIANCE", 30)) # Default: ±30 seconds randomization
|
||||
|
||||
def get_randomized_interval() -> int:
|
||||
"""Get check interval with random variance to avoid bot detection patterns."""
|
||||
variance = random.randint(-CHECK_INTERVAL_VARIANCE, CHECK_INTERVAL_VARIANCE)
|
||||
interval = CHECK_INTERVAL + variance
|
||||
# Ensure minimum of 60 seconds to avoid excessive load
|
||||
return max(60, interval)
|
||||
|
||||
|
||||
def validate_config() -> bool:
|
||||
|
|
@ -139,9 +148,11 @@ async def main() -> None:
|
|||
|
||||
|
||||
try:
|
||||
logger.info(f"Bot is now running. Refreshing every {CHECK_INTERVAL} seconds...")
|
||||
logger.info(f"Bot is now running. Refreshing every {CHECK_INTERVAL}±{CHECK_INTERVAL_VARIANCE}s (randomized)...")
|
||||
last_clean = 0
|
||||
CLEAN_INTERVAL = 48 * 3600 # 48 hours in seconds
|
||||
zero_listings_count = 0 # Track consecutive zero-listing fetches
|
||||
MAX_ZERO_LISTINGS = 3 # Re-login after 3 consecutive zero fetches
|
||||
while True:
|
||||
now = asyncio.get_event_loop().time()
|
||||
# Autoclean debug material every 48 hours
|
||||
|
|
@ -158,11 +169,30 @@ async def main() -> None:
|
|||
# Check if monitoring is enabled before fetching listings
|
||||
if not state_manager.is_monitoring_enabled():
|
||||
logger.debug("Monitoring is paused, skipping listing check")
|
||||
await asyncio.sleep(CHECK_INTERVAL)
|
||||
sleep_interval = get_randomized_interval()
|
||||
await asyncio.sleep(sleep_interval)
|
||||
_flush_rotating_file_handlers()
|
||||
continue
|
||||
|
||||
current_listings = await app_handler.fetch_listings()
|
||||
|
||||
# Session validation: detect when listings fetch returns empty repeatedly
|
||||
if not current_listings or len(current_listings) == 0:
|
||||
zero_listings_count += 1
|
||||
logger.warning(f"Zero listings fetched ({zero_listings_count}/{MAX_ZERO_LISTINGS})")
|
||||
|
||||
if zero_listings_count >= MAX_ZERO_LISTINGS:
|
||||
logger.warning("Session likely expired - forcing re-login")
|
||||
state_manager.set_logged_in(False)
|
||||
zero_listings_count = 0
|
||||
await asyncio.sleep(5)
|
||||
continue
|
||||
else:
|
||||
# Reset counter on successful fetch
|
||||
if zero_listings_count > 0:
|
||||
logger.info(f"Session recovered - listings fetched successfully")
|
||||
zero_listings_count = 0
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"💥 Browser crash: {e}")
|
||||
logger.info("Recovering...")
|
||||
|
|
@ -186,7 +216,9 @@ async def main() -> None:
|
|||
|
||||
if not current_listings:
|
||||
logger.warning("No listings fetched")
|
||||
await asyncio.sleep(CHECK_INTERVAL)
|
||||
sleep_interval = get_randomized_interval()
|
||||
logger.debug(f"Sleeping for {sleep_interval}s (randomized)")
|
||||
await asyncio.sleep(sleep_interval)
|
||||
_flush_rotating_file_handlers()
|
||||
continue
|
||||
previous_listings = app_handler.load_previous_listings()
|
||||
|
|
@ -208,7 +240,9 @@ async def main() -> None:
|
|||
}
|
||||
app_handler.save_application(result)
|
||||
app_handler.save_listings(current_listings)
|
||||
await asyncio.sleep(CHECK_INTERVAL)
|
||||
sleep_interval = get_randomized_interval()
|
||||
logger.debug(f"Sleeping for {sleep_interval}s (randomized)")
|
||||
await asyncio.sleep(sleep_interval)
|
||||
_flush_rotating_file_handlers()
|
||||
continue
|
||||
new_listings = app_handler.find_new_listings(current_listings, previous_listings)
|
||||
|
|
@ -221,7 +255,9 @@ async def main() -> None:
|
|||
application_results = await app_handler.apply_to_listings(new_listings)
|
||||
app_handler.notify_new_listings(new_listings, application_results)
|
||||
app_handler.save_listings(current_listings)
|
||||
await asyncio.sleep(CHECK_INTERVAL)
|
||||
sleep_interval = get_randomized_interval()
|
||||
logger.debug(f"Next check in {sleep_interval}s")
|
||||
await asyncio.sleep(sleep_interval)
|
||||
_flush_rotating_file_handlers()
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
logger.info("Shutting down...")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue