Skip to content

Commit

Permalink
add error reporting
Browse files Browse the repository at this point in the history
  • Loading branch information
mevljas committed Feb 2, 2025
1 parent bae3480 commit a3abf20
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 5 deletions.
4 changes: 3 additions & 1 deletion .idea/misc.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 4 additions & 1 deletion services/discord_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ async def my_background_task(self):
)

# Run the spider.
channel_listings = await run_spider(database_manager=database_manager)
channel_listings, error = await run_spider(database_manager=database_manager)

for channel_id, listings in channel_listings.items():
logging.debug("Sending listings to channel %s.", channel_id)
Expand Down Expand Up @@ -103,6 +103,9 @@ async def my_background_task(self):

await channel.send(embed=embed)

if error:
await channel.send("An error occurred while scanning the website.")

logging.info("Scan finished.")

@my_background_task.before_loop
Expand Down
10 changes: 7 additions & 3 deletions spider/spider.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,10 @@
from services.extract_service import parse_page


async def run_spider(database_manager: DatabaseManager):
async def run_spider(database_manager: DatabaseManager) -> tuple[dict, bool]:
"""
Setups the playwright library and starts the crawler.
Setups the playwright library and runs the crawler.
Returns a dictionary with listings and a boolean indicating if an error occurred.
"""
logger.info("Spider started.")

Expand Down Expand Up @@ -47,6 +48,8 @@ async def run_spider(database_manager: DatabaseManager):

more_pages = True

error = False

index = 1

results = {}
Expand All @@ -68,6 +71,7 @@ async def run_spider(database_manager: DatabaseManager):
results.update(results_tmp)
except Exception as e: # pylint: disable=broad-except
logger.error("Error parsing page: %s", e)
error = True
index += 1

for nepremicnine_id, new_data in results.items():
Expand Down Expand Up @@ -115,7 +119,7 @@ async def run_spider(database_manager: DatabaseManager):
await browser.close()
logger.info("Spider finished. Found %d new listings.", len(discord_listings))

return discord_listings
return discord_listings, error


async def read_config():
Expand Down

0 comments on commit a3abf20

Please sign in to comment.