From e73752c0152b18ae9081bf591b78b7e596454e9f Mon Sep 17 00:00:00 2001 From: agres Date: Sun, 23 Mar 2025 21:54:44 +0100 Subject: [PATCH] Fixed counter issue where the accumilation would be calculated wrongly and the requests were not at the fullest capacity --- src/scraper.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/scraper.py b/src/scraper.py index e488e35..489b72f 100644 --- a/src/scraper.py +++ b/src/scraper.py @@ -60,21 +60,25 @@ def _scrape_missing_info(bearer_token_simple: str, table_name: Table, id_field_n ids = [] processed_ids = set() - for i, id_value in enumerate(all_ids_missing): + counter = 0 + + for id_value in all_ids_missing: id_value_str = id_value[0] if id_value_str not in processed_ids: ids.append(id_value_str) processed_ids.add(id_value_str) + counter += 1 - if (i + 1) % limit == 0: + if (counter + 1) % limit == 0 and len(ids) > 0: ids_tuple = tuple(ids) ids.clear() response = get_multiple_field_information(bearer_token_simple, endpoint_name, limit, *ids_tuple) _add_data_to_database(table_name, response) + counter = 0 - if ids: + if len(ids) > 0: ids_tuple = tuple(ids) ids.clear() response = get_multiple_field_information(bearer_token_simple, endpoint_name, limit, *ids_tuple)