diff --git a/padelclub_backend/urls.py b/padelclub_backend/urls.py
index bf418f8..b39b324 100644
--- a/padelclub_backend/urls.py
+++ b/padelclub_backend/urls.py
@@ -18,7 +18,7 @@ from django.urls import include, path
from django.conf import settings
from django.conf.urls.static import static
-from tournaments.admin_utils import download_french_padel_rankings, debug_tools_page, test_player_details_apis, explore_fft_api_endpoints, get_player_license_info, bulk_license_lookup, search_player_by_name, enrich_rankings_with_licenses
+from tournaments.admin_utils import download_french_padel_rankings, debug_tools_page, test_player_details_apis, explore_fft_api_endpoints, get_player_license_info, bulk_license_lookup, search_player_by_name, enrich_rankings_with_licenses, gather_monthly_tournaments_and_umpires
urlpatterns = [
@@ -38,6 +38,11 @@ urlpatterns = [
path('kingdom/', admin.site.urls),
path('api-auth/', include('rest_framework.urls')),
path('dj-auth/', include('django.contrib.auth.urls')),
+ path(
+ "kingdom/debug/gather-monthly-umpires/",
+ gather_monthly_tournaments_and_umpires,
+ name="gather_monthly_umpires",
+ ),
]
diff --git a/tournaments/admin_utils.py b/tournaments/admin_utils.py
index d3f521d..9ec3c98 100644
--- a/tournaments/admin_utils.py
+++ b/tournaments/admin_utils.py
@@ -14,11 +14,15 @@ from django.template import Template, Context
from django.middleware.csrf import get_token
import concurrent.futures
from functools import partial
+import csv
+import io
+from api.utils import scrape_fft_all_tournaments, get_umpire_data
-default_sexe = 'H'
+default_sexe = "H"
default_id_homologation = "82546485"
default_session_id = "JSESSIONID=E3DE6A54D5367D48B0CFA970E09EB422; AWSALB=UlkEmLYVxfS3RNwiNeNygqdqjroNzOZF3D9k6nR+NP6YPG3r6JLIzOqtw3nV1aVKsyNMldzeFOmVy/V1OPf7LNVW/sckdD1EprkGtgqjX8N8DpihxhTGtTm+0sX1; AWSALBCORS=UlkEmLYVxfS3RNwiNeNygqdqjroNzOZF3D9k6nR+NP6YPG3r6JLIzOqtw3nV1aVKsyNMldzeFOmVy/V1OPf7LNVW/sckdD1EprkGtgqjX8N8DpihxhTGtTm+0sX1; datadome=K3v~wZc~sLs5C7D4p0OoS3jOXGpeDfai9vk~TDPw2mSFbxqpfjUcR68wvPaYXHYqXgAHOrFnrBGpoyNepJ6bXfncdSmYOUfMNPbAtvBBo67zZTxxSeogLiLu1U1_5Txo; TCID=; tc_cj_v2=%5Ecl_%5Dny%5B%5D%5D_mmZZZZZZKQOKSRRSNRRNQZZZ%5D777_rn_lh%5BfyfcheZZZ%2F%20%290%2BH%2C0%200%20G%24%2FH%29%20%2FZZZKQOLJNPJONLMPZZZ%5D777%5Ecl_%5Dny%5B%5D%5D_mmZZZZZZKQOLJNPJSSQMNZZZ%5D777_rn_lh%5BfyfcheZZZ%2F%20%290%2BH%2C0%200%20G%24%2FH%29%20%2FZZZKQOLJNPRRONPSZZZ%5D777%5Ecl_%5Dny%5B%5D%5D_mmZZZZZZKQOLJNQLKOMSOZZZ%5D777_rn_lh%5BfyfcheZZZ%2F%20%290%2BH%2C0%200%20G%24%2FH%29%20%2FZZZKQOLJPMNSNOJKZZZ%5D777%5Ecl_%5Dny%5B%5D%5D_mmZZZZZZKQOLJPMRPKRKMZZZ%5D777_rn_lh%5BfyfcheZZZ%2F%20%290%2BH%2C0%200%20G%24%2FH%29%20%2FZZZKQOLJQSONNLNQZZZ%5D777%5Ecl_%5Dny%5B%5D%5D_mmZZZZZZKQOLKMOPOJKSLZZZ%5D777_rn_lh%5BfyfcheZZZ%2F%20%290%2BH%2C0%200%20G%24%2FH%29%20%2FZZZKQONMQSSNRPKQZZZ%5D; tc_cj_v2_cmp=; tc_cj_v2_med=; xtan=-; xtant=1; pa_vid=%22mckhos3iasswydjm%22; _pcid=%7B%22browserId%22%3A%22mckhos3iasswydjm%22%2C%22_t%22%3A%22ms8wm9hs%7Cmckhos5s%22%7D; _pctx=%7Bu%7DN4IgrgzgpgThIC4B2YA2qA05owMoBcBDfSREQpAeyRCwgEt8oBJAE0RXSwH18yBbCAA4A7vwCcACwgAffgGMA1pMoQArPAC%2BQA; TCPID=125629554310878226394; xtvrn=$548419$"
+
def calculate_age_from_birth_date(birth_date_str):
"""
Calculate age from birth date string (format: DD/MM/YYYY)
@@ -29,13 +33,18 @@ def calculate_age_from_birth_date(birth_date_str):
try:
# Parse French date format DD/MM/YYYY
- birth_date = datetime.strptime(birth_date_str, '%d/%m/%Y')
+ birth_date = datetime.strptime(birth_date_str, "%d/%m/%Y")
today = datetime.now()
- age = today.year - birth_date.year - ((today.month, today.day) < (birth_date.month, birth_date.day))
+ age = (
+ today.year
+ - birth_date.year
+ - ((today.month, today.day) < (birth_date.month, birth_date.day))
+ )
return age
except (ValueError, TypeError):
return None
+
def find_best_license_match(license_results, player):
"""
Find the best matching license from multiple results using ageSportif comparison
@@ -50,11 +59,10 @@ def find_best_license_match(license_results, player):
"""
# Get player's age from ranking data for duplicate matching
- player_age_sportif = player.get('ageSportif')
- rank = player.get('classement')
- lastname = player.get('nom')
- firstname = player.get('prenom')
-
+ player_age_sportif = player.get("ageSportif")
+ rank = player.get("classement")
+ lastname = player.get("nom")
+ firstname = player.get("prenom")
if not license_results:
return None, {"reason": "no_results"}
@@ -62,40 +70,37 @@ def find_best_license_match(license_results, player):
# First, filter out players without valid classement data
def has_valid_classement(license_data, rank):
"""Check if a license has valid classement data"""
- classement = license_data.get('classement', {})
+ classement = license_data.get("classement", {})
if not classement:
return False
# Check if any of the key classement fields have meaningful data
- date_fr = classement.get('dateFr', '').strip()
- rang = classement.get('rang')
- points = classement.get('points')
- date = classement.get('date')
+ date_fr = classement.get("dateFr", "").strip()
+ rang = classement.get("rang")
+ points = classement.get("points")
+ date = classement.get("date")
# Consider it valid if at least one of these conditions is met:
# - dateFr is not empty
# - rang is not None
# - points is not None (and > 0)
# - date is not None
- return (
- rang is not None and rang == rank
- )
+ return rang is not None and rang == rank
# First, filter out players without valid classement data
def has_valid_name(license_data, firstname, lastname):
- lk_firstname = license_data.get('prenom', '')
- lk_lastname = license_data.get('nom', '')
+ lk_firstname = license_data.get("prenom", "")
+ lk_lastname = license_data.get("nom", "")
if not lk_firstname and not lk_lastname:
return False
- return (
- lk_firstname == firstname and lk_lastname == lastname
- )
+ return lk_firstname == firstname and lk_lastname == lastname
# Filter license results to only include those with valid classement
valid_license_results = [
- license_data for license_data in license_results
+ license_data
+ for license_data in license_results
if has_valid_name(license_data, firstname, lastname)
if has_valid_classement(license_data, rank)
]
@@ -105,7 +110,7 @@ def find_best_license_match(license_results, player):
return None, {
"reason": "no_valid_classement",
"original_count": len(license_results),
- "filtered_count": 0
+ "filtered_count": 0,
}
# If only one valid result, return it
@@ -114,7 +119,7 @@ def find_best_license_match(license_results, player):
"reason": "single_valid_result",
"original_count": len(license_results),
"filtered_count": 1,
- "age_match": "n/a"
+ "age_match": "n/a",
}
# If we don't have ageSportif from ranking, take the first valid match
@@ -123,16 +128,16 @@ def find_best_license_match(license_results, player):
"reason": "no_age_data_used_first_valid",
"original_count": len(license_results),
"filtered_count": len(valid_license_results),
- "used_first_result": True
+ "used_first_result": True,
}
best_match = None
- best_age_diff = float('inf')
+ best_age_diff = float("inf")
match_details = []
best_match_count = 0
for i, license_data in enumerate(valid_license_results):
- birth_date_fr = license_data.get('dateNaissanceFr')
+ birth_date_fr = license_data.get("dateNaissanceFr")
calculated_age = calculate_age_from_birth_date(birth_date_fr)
match_detail = {
@@ -141,8 +146,8 @@ def find_best_license_match(license_results, player):
"calculated_age": calculated_age,
"player_age_sportif": player_age_sportif,
"age_difference": None,
- "license": license_data.get('licence'),
- "classement": license_data.get('classement', {})
+ "license": license_data.get("licence"),
+ "classement": license_data.get("classement", {}),
}
if calculated_age is not None:
@@ -165,7 +170,7 @@ def find_best_license_match(license_results, player):
"original_count": len(license_results),
"filtered_count": len(valid_license_results),
"used_first_result": True,
- "match_details": match_details
+ "match_details": match_details,
}
return valid_license_results[0], match_info
else:
@@ -175,7 +180,7 @@ def find_best_license_match(license_results, player):
"best_age_difference": best_age_diff,
"total_candidates": len(license_results),
"valid_candidates": len(valid_license_results),
- "match_details": match_details
+ "match_details": match_details,
}
return best_match, match_info
else:
@@ -184,10 +189,11 @@ def find_best_license_match(license_results, player):
"best_age_difference": best_age_diff,
"total_candidates": len(license_results),
"valid_candidates": len(valid_license_results),
- "match_details": match_details
+ "match_details": match_details,
}
return None, match_info
+
@staff_member_required
def test_player_details_apis(request):
"""
@@ -196,94 +202,100 @@ def test_player_details_apis(request):
# Sample idCrm values from your data
test_ids = [5417299111, 9721526122]
- results = {
- "timestamp": datetime.now().isoformat(),
- "test_results": []
- }
+ results = {"timestamp": datetime.now().isoformat(), "test_results": []}
for idCrm in test_ids:
- player_results = {
- "idCrm": idCrm,
- "tests": []
- }
+ player_results = {"idCrm": idCrm, "tests": []}
# Test 1: Try player detail endpoint
try:
url = f"https://tenup.fft.fr/back/public/v1/joueurs/{idCrm}"
response = requests.get(url, timeout=10)
- player_results["tests"].append({
- "method": "GET",
- "url": url,
- "status_code": response.status_code,
- "response_preview": response.text[:500] if response.status_code == 200 else response.text
- })
+ player_results["tests"].append(
+ {
+ "method": "GET",
+ "url": url,
+ "status_code": response.status_code,
+ "response_preview": response.text[:500]
+ if response.status_code == 200
+ else response.text,
+ }
+ )
except Exception as e:
- player_results["tests"].append({
- "method": "GET",
- "url": url,
- "error": str(e)
- })
+ player_results["tests"].append(
+ {"method": "GET", "url": url, "error": str(e)}
+ )
# Test 2: Try with different endpoint structure
try:
url = f"https://tenup.fft.fr/back/public/v1/players/{idCrm}"
response = requests.get(url, timeout=10)
- player_results["tests"].append({
- "method": "GET",
- "url": url,
- "status_code": response.status_code,
- "response_preview": response.text[:500] if response.status_code == 200 else response.text
- })
+ player_results["tests"].append(
+ {
+ "method": "GET",
+ "url": url,
+ "status_code": response.status_code,
+ "response_preview": response.text[:500]
+ if response.status_code == 200
+ else response.text,
+ }
+ )
except Exception as e:
- player_results["tests"].append({
- "method": "GET",
- "url": url,
- "error": str(e)
- })
+ player_results["tests"].append(
+ {"method": "GET", "url": url, "error": str(e)}
+ )
# Test 3: Try POST with idCrm in body
try:
url = "https://tenup.fft.fr/back/public/v1/joueurs/detail"
payload = {"idCrm": idCrm}
- response = requests.post(url, json=payload, headers={'Content-Type': 'application/json'}, timeout=10)
- player_results["tests"].append({
- "method": "POST",
- "url": url,
- "payload": payload,
- "status_code": response.status_code,
- "response_preview": response.text[:500] if response.status_code == 200 else response.text
- })
+ response = requests.post(
+ url,
+ json=payload,
+ headers={"Content-Type": "application/json"},
+ timeout=10,
+ )
+ player_results["tests"].append(
+ {
+ "method": "POST",
+ "url": url,
+ "payload": payload,
+ "status_code": response.status_code,
+ "response_preview": response.text[:500]
+ if response.status_code == 200
+ else response.text,
+ }
+ )
except Exception as e:
- player_results["tests"].append({
- "method": "POST",
- "url": url,
- "payload": payload,
- "error": str(e)
- })
+ player_results["tests"].append(
+ {"method": "POST", "url": url, "payload": payload, "error": str(e)}
+ )
# Test 4: Try the classements endpoint with more parameters
try:
url = "https://tenup.fft.fr/back/public/v1/classements/recherche"
- payload = {
- "pratique": "padel",
- "sexe": default_sexe,
- "idCrm": idCrm
- }
- response = requests.post(url, json=payload, headers={'Content-Type': 'application/json'}, timeout=10)
- player_results["tests"].append({
- "method": "POST",
- "url": url,
- "payload": payload,
- "status_code": response.status_code,
- "response_preview": response.text[:500] if response.status_code == 200 else response.text
- })
+ payload = {"pratique": "padel", "sexe": default_sexe, "idCrm": idCrm}
+ response = requests.post(
+ url,
+ json=payload,
+ headers={"Content-Type": "application/json"},
+ timeout=10,
+ )
+ player_results["tests"].append(
+ {
+ "method": "POST",
+ "url": url,
+ "payload": payload,
+ "status_code": response.status_code,
+ "response_preview": response.text[:500]
+ if response.status_code == 200
+ else response.text,
+ }
+ )
except Exception as e:
- player_results["tests"].append({
- "method": "POST",
- "url": url,
- "payload": payload,
- "error": str(e)
- })
+ player_results["tests"].append(
+ {"method": "POST", "url": url, "payload": payload, "error": str(e)}
+ )
results["test_results"].append(player_results)
time.sleep(0.5) # Small delay between tests
@@ -295,11 +307,12 @@ def test_player_details_apis(request):
# Return results as downloadable JSON
http_response = HttpResponse(
json.dumps(results, indent=2, ensure_ascii=False),
- content_type='application/json; charset=utf-8'
+ content_type="application/json; charset=utf-8",
)
- http_response['Content-Disposition'] = f'attachment; filename="{filename}"'
+ http_response["Content-Disposition"] = f'attachment; filename="{filename}"'
return http_response
+
@staff_member_required
def explore_fft_api_endpoints(request):
"""
@@ -320,32 +333,34 @@ def explore_fft_api_endpoints(request):
"/search",
"/recherche",
"/tournaments",
- "/tournois"
+ "/tournois",
]
results = {
"base_url": base_url,
"timestamp": datetime.now().isoformat(),
- "endpoint_tests": []
+ "endpoint_tests": [],
}
for endpoint in endpoints_to_test:
try:
url = base_url + endpoint
response = requests.get(url, timeout=10)
- results["endpoint_tests"].append({
- "endpoint": endpoint,
- "url": url,
- "status_code": response.status_code,
- "content_type": response.headers.get('content-type', ''),
- "response_preview": response.text[:300] if len(response.text) < 1000 else response.text[:300] + "..."
- })
+ results["endpoint_tests"].append(
+ {
+ "endpoint": endpoint,
+ "url": url,
+ "status_code": response.status_code,
+ "content_type": response.headers.get("content-type", ""),
+ "response_preview": response.text[:300]
+ if len(response.text) < 1000
+ else response.text[:300] + "...",
+ }
+ )
except Exception as e:
- results["endpoint_tests"].append({
- "endpoint": endpoint,
- "url": url,
- "error": str(e)
- })
+ results["endpoint_tests"].append(
+ {"endpoint": endpoint, "url": url, "error": str(e)}
+ )
time.sleep(0.2) # Small delay
@@ -356,28 +371,27 @@ def explore_fft_api_endpoints(request):
# Return results as downloadable JSON
http_response = HttpResponse(
json.dumps(results, indent=2, ensure_ascii=False),
- content_type='application/json; charset=utf-8'
+ content_type="application/json; charset=utf-8",
)
- http_response['Content-Disposition'] = f'attachment; filename="{filename}"'
+ http_response["Content-Disposition"] = f'attachment; filename="{filename}"'
return http_response
+
@staff_member_required
def download_french_padel_rankings(request):
"""
Download French padel rankings from FFT API and save locally for later enrichment
"""
- if request.method == 'POST':
- start_tranche = int(request.POST.get('start_tranche', 1001))
- end_tranche = int(request.POST.get('end_tranche', 1222))
- save_locally = request.POST.get('save_locally', 'true') == 'true'
- sexe = request.POST.get('sexe', default_sexe)
+ if request.method == "POST":
+ start_tranche = int(request.POST.get("start_tranche", 1001))
+ end_tranche = int(request.POST.get("end_tranche", 1222))
+ save_locally = request.POST.get("save_locally", "true") == "true"
+ sexe = request.POST.get("sexe", default_sexe)
try:
# API endpoint and parameters
url = "https://tenup.fft.fr/back/public/v1/classements/recherche"
- headers = {
- 'Content-Type': 'application/json'
- }
+ headers = {"Content-Type": "application/json"}
all_players = []
total_tranches = end_tranche - start_tranche + 1
@@ -386,65 +400,89 @@ def download_french_padel_rankings(request):
failed_tranches_list = []
retry_stats = {} # Track retry attempts per tranche
- print(f"Starting to fetch tranches {start_tranche} to {end_tranche} ({total_tranches} total)...")
+ print(
+ f"Starting to fetch tranches {start_tranche} to {end_tranche} ({total_tranches} total)..."
+ )
def fetch_tranche_with_retry(tranche, max_retries=10):
"""
Fetch a single tranche with retry logic
Returns: (success, players_data, retry_count)
"""
- payload = {
- "pratique": "padel",
- "sexe": sexe,
- "tranche": tranche
- }
+ payload = {"pratique": "padel", "sexe": sexe, "tranche": tranche}
for attempt in range(max_retries + 1): # +1 for initial attempt
try:
- response = requests.post(url, json=payload, headers=headers, timeout=30)
+ response = requests.post(
+ url, json=payload, headers=headers, timeout=30
+ )
if response.status_code == 200:
json_data = response.json()
- if 'joueurs' in json_data and json_data['joueurs']:
+ if "joueurs" in json_data and json_data["joueurs"]:
# Add metadata to each player for enrichment tracking
- for player in json_data['joueurs']:
- player['source_tranche'] = tranche
- player['license_lookup_status'] = 'not_attempted'
- player['license_data'] = None
+ for player in json_data["joueurs"]:
+ player["source_tranche"] = tranche
+ player["license_lookup_status"] = "not_attempted"
+ player["license_data"] = None
if attempt > 0:
- print(f"Tranche {tranche}: SUCCESS after {attempt} retries - Found {len(json_data['joueurs'])} players")
+ print(
+ f"Tranche {tranche}: SUCCESS after {attempt} retries - Found {len(json_data['joueurs'])} players"
+ )
else:
- print(f"Tranche {tranche}: Found {len(json_data['joueurs'])} players")
+ print(
+ f"Tranche {tranche}: Found {len(json_data['joueurs'])} players"
+ )
- return True, json_data['joueurs'], attempt
+ return True, json_data["joueurs"], attempt
else:
if attempt > 0:
- print(f"Tranche {tranche}: SUCCESS after {attempt} retries - No players found")
+ print(
+ f"Tranche {tranche}: SUCCESS after {attempt} retries - No players found"
+ )
else:
print(f"Tranche {tranche}: No players found")
return True, [], attempt
else:
if attempt < max_retries:
- print(f"Tranche {tranche}: HTTP {response.status_code} - Retry {attempt + 1}/{max_retries}")
- time.sleep(min(2 ** attempt, 10)) # Exponential backoff, max 10 seconds
+ print(
+ f"Tranche {tranche}: HTTP {response.status_code} - Retry {attempt + 1}/{max_retries}"
+ )
+ time.sleep(
+ min(2**attempt, 10)
+ ) # Exponential backoff, max 10 seconds
else:
- print(f"Tranche {tranche}: FAILED after {max_retries} retries - HTTP {response.status_code}")
+ print(
+ f"Tranche {tranche}: FAILED after {max_retries} retries - HTTP {response.status_code}"
+ )
except requests.exceptions.RequestException as e:
if attempt < max_retries:
- print(f"Tranche {tranche}: Network error - {str(e)} - Retry {attempt + 1}/{max_retries}")
- time.sleep(min(2 ** attempt, 10)) # Exponential backoff, max 10 seconds
+ print(
+ f"Tranche {tranche}: Network error - {str(e)} - Retry {attempt + 1}/{max_retries}"
+ )
+ time.sleep(
+ min(2**attempt, 10)
+ ) # Exponential backoff, max 10 seconds
else:
- print(f"Tranche {tranche}: FAILED after {max_retries} retries - Network error: {str(e)}")
+ print(
+ f"Tranche {tranche}: FAILED after {max_retries} retries - Network error: {str(e)}"
+ )
except Exception as e:
if attempt < max_retries:
- print(f"Tranche {tranche}: Unexpected error - {str(e)} - Retry {attempt + 1}/{max_retries}")
- time.sleep(min(2 ** attempt, 10)) # Exponential backoff, max 10 seconds
+ print(
+ f"Tranche {tranche}: Unexpected error - {str(e)} - Retry {attempt + 1}/{max_retries}"
+ )
+ time.sleep(
+ min(2**attempt, 10)
+ ) # Exponential backoff, max 10 seconds
else:
- print(f"Tranche {tranche}: FAILED after {max_retries} retries - Unexpected error: {str(e)}")
+ print(
+ f"Tranche {tranche}: FAILED after {max_retries} retries - Unexpected error: {str(e)}"
+ )
return False, [], max_retries
@@ -466,14 +504,20 @@ def download_french_padel_rankings(request):
if tranche % 10 == 0:
time.sleep(0.1)
current_progress = tranche - start_tranche + 1
- print(f"Progress: {current_progress}/{total_tranches} tranches processed...")
+ print(
+ f"Progress: {current_progress}/{total_tranches} tranches processed..."
+ )
print(f"Completed! Total players found: {len(all_players)}")
print(f"Successful calls: {successful_calls}, Failed calls: {failed_calls}")
# Enhanced retry statistics logging
retry_summary = {}
- tranches_with_retries = [t for t, c in retry_stats.items() if c > 0 and t not in failed_tranches_list]
+ tranches_with_retries = [
+ t
+ for t, c in retry_stats.items()
+ if c > 0 and t not in failed_tranches_list
+ ]
if tranches_with_retries:
print(f"Tranches that required retries: {len(tranches_with_retries)}")
for tranche in sorted(tranches_with_retries):
@@ -485,13 +529,17 @@ def download_french_padel_rankings(request):
print("Retry distribution:")
for retry_count in sorted(retry_summary.keys()):
- print(f" {retry_summary[retry_count]} tranches needed {retry_count} retries")
+ print(
+ f" {retry_summary[retry_count]} tranches needed {retry_count} retries"
+ )
else:
print("No retries were needed!")
if failed_tranches_list:
print(f"Failed tranches: {failed_tranches_list}")
- failed_retry_counts = [retry_stats.get(t, 0) for t in failed_tranches_list]
+ failed_retry_counts = [
+ retry_stats.get(t, 0) for t in failed_tranches_list
+ ]
print(f"All failed tranches attempted maximum retries (10)")
else:
print("No failed tranches - all requests successful!")
@@ -511,55 +559,62 @@ def download_french_padel_rankings(request):
"tranches_with_retries": len(tranches_with_retries),
"retry_stats_per_tranche": retry_stats,
"retry_distribution": retry_summary,
- "max_retries_attempted": 10
+ "max_retries_attempted": 10,
},
"last_enrichment_update": None,
"enrichment_progress": {
"players_with_licenses": 0,
"players_without_licenses": len(all_players),
- "last_processed_index": -1
+ "last_processed_index": -1,
},
"parameters": {
"pratique": "padel",
"sexe": sexe,
"tranche_start": start_tranche,
- "tranche_end": end_tranche
- }
+ "tranche_end": end_tranche,
+ },
},
- "joueurs": all_players
+ "joueurs": all_players,
}
# Save locally if requested
if save_locally:
- rankings_dir = os.path.join(settings.BASE_DIR, 'data', 'rankings')
+ rankings_dir = os.path.join(settings.BASE_DIR, "data", "rankings")
os.makedirs(rankings_dir, exist_ok=True)
filename = f"french_padel_rankings_{start_tranche}-{end_tranche}_{timestamp}.json"
local_file_path = os.path.join(rankings_dir, filename)
- with open(local_file_path, 'w', encoding='utf-8') as f:
+ with open(local_file_path, "w", encoding="utf-8") as f:
json.dump(final_data, f, indent=2, ensure_ascii=False)
print(f"Rankings saved locally to: {local_file_path}")
- messages.success(request, f"Rankings saved locally to: {local_file_path}")
+ messages.success(
+ request, f"Rankings saved locally to: {local_file_path}"
+ )
# Create download response
download_filename = f"french_padel_rankings_{start_tranche}-{end_tranche}_{timestamp}.json"
http_response = HttpResponse(
json.dumps(final_data, indent=2, ensure_ascii=False),
- content_type='application/json; charset=utf-8'
+ content_type="application/json; charset=utf-8",
+ )
+ http_response["Content-Disposition"] = (
+ f'attachment; filename="{download_filename}"'
)
- http_response['Content-Disposition'] = f'attachment; filename="{download_filename}"'
return http_response
else:
- messages.error(request, f"No players found in tranches {start_tranche}-{end_tranche}.")
+ messages.error(
+ request,
+ f"No players found in tranches {start_tranche}-{end_tranche}.",
+ )
except Exception as e:
messages.error(request, f"Unexpected error: {str(e)}")
csrf_token = get_token(request)
- html_template = Template('''
+ html_template = Template("""
@@ -679,20 +734,18 @@ def download_french_padel_rankings(request):
- ''')
+ """)
- context = Context({
- 'csrf_token': csrf_token,
- 'default_sexe': default_sexe
- })
+ context = Context({"csrf_token": csrf_token, "default_sexe": default_sexe})
return HttpResponse(html_template.render(context))
+
@staff_member_required
def debug_tools_page(request):
"""
Simple debug tools page with download button
"""
- html_content = '''
+ html_content = """
@@ -728,6 +781,8 @@ def debug_tools_page(request):
.button.success:hover { background-color: #218838; }
.button.info { background-color: #17a2b8; }
.button.info:hover { background-color: #138496; }
+ .button.warning { background-color: #ffc107; color: #212529; }
+ .button.warning:hover { background-color: #e0a800; }
.info {
background-color: #d1ecf1;
color: #0c5460;
@@ -766,6 +821,20 @@ def debug_tools_page(request):
š„ Download Rankings (1001-1222)
+
+
šÆ Tournament & Umpire Management
+
Tools for gathering tournament data and umpire contact information from FFT.
+
+
Monthly Umpire Export
+
Gather all tournaments within a specified month and export umpire contact information to CSV format.
+
š Export Monthly Umpires
+
+
+ š Export Format: The CSV will contain: CLUB_NAME;LAST_NAME;FIRST_NAME;EMAIL;PHONE_NUMBER
+ ā±ļø Processing Time: This may take several minutes for large date ranges as it processes tournaments in batches of 100.
+
+
+
š Enhanced Rankings Tools
Advanced tools that combine multiple APIs for enriched data.
@@ -809,21 +878,24 @@ def debug_tools_page(request):
- '''
+ """
return HttpResponse(html_content)
+
@staff_member_required
def get_player_license_info(request):
"""
Get player license information using sessionId and idHomologation
"""
- if request.method == 'POST':
- session_id = request.POST.get('sessionId', '').strip()
- id_homologation = request.POST.get('idHomologation', '').strip()
- license_id = request.POST.get('licenseId', '').strip()
+ if request.method == "POST":
+ session_id = request.POST.get("sessionId", "").strip()
+ id_homologation = request.POST.get("idHomologation", "").strip()
+ license_id = request.POST.get("licenseId", "").strip()
if not session_id or not id_homologation or not license_id:
- messages.error(request, "sessionId, idHomologation, and licenseId are all required.")
+ messages.error(
+ request, "sessionId, idHomologation, and licenseId are all required."
+ )
return redirect(request.path)
try:
@@ -842,7 +914,7 @@ def get_player_license_info(request):
"Connection": "keep-alive",
"Referer": f"https://beach-padel.app.fft.fr/beachja/competitionFiche/inscrireEquipe?identifiantHomologation={id_homologation}",
"X-Requested-With": "XMLHttpRequest",
- "Cookie": session_id
+ "Cookie": session_id,
}
print(f"Making request to: {url}")
@@ -864,14 +936,14 @@ def get_player_license_info(request):
"url": url,
"license_id": license_id,
"id_homologation": id_homologation,
- "timestamp": datetime.now().isoformat()
+ "timestamp": datetime.now().isoformat(),
},
"response_info": {
"status_code": response.status_code,
"headers": dict(response.headers),
- "raw_response": response.text
+ "raw_response": response.text,
},
- "parsed_data": json_data
+ "parsed_data": json_data,
}
# Create filename with timestamp
@@ -881,16 +953,21 @@ def get_player_license_info(request):
# Return as downloadable JSON
http_response = HttpResponse(
json.dumps(result, indent=2, ensure_ascii=False),
- content_type='application/json; charset=utf-8'
+ content_type="application/json; charset=utf-8",
+ )
+ http_response["Content-Disposition"] = (
+ f'attachment; filename="{filename}"'
)
- http_response['Content-Disposition'] = f'attachment; filename="{filename}"'
return http_response
except json.JSONDecodeError as e:
messages.error(request, f"Failed to parse JSON response: {str(e)}")
else:
- messages.error(request, f"Request failed with status {response.status_code}: {response.text}")
+ messages.error(
+ request,
+ f"Request failed with status {response.status_code}: {response.text}",
+ )
except requests.exceptions.RequestException as e:
messages.error(request, f"Network error: {str(e)}")
@@ -902,7 +979,7 @@ def get_player_license_info(request):
# Default values
default_license_id = "5186803"
- html_template = Template('''
+ html_template = Template("""
@@ -1014,26 +1091,29 @@ def get_player_license_info(request):
- ''')
-
- context = Context({
- 'csrf_token': csrf_token,
- 'default_session_id': default_session_id,
- 'default_id_homologation': default_id_homologation,
- 'default_license_id': default_license_id,
- 'default_sexe': default_sexe
- })
+ """)
+
+ context = Context(
+ {
+ "csrf_token": csrf_token,
+ "default_session_id": default_session_id,
+ "default_id_homologation": default_id_homologation,
+ "default_license_id": default_license_id,
+ "default_sexe": default_sexe,
+ }
+ )
return HttpResponse(html_template.render(context))
+
@staff_member_required
def bulk_license_lookup(request):
"""
Lookup multiple license IDs at once
"""
- if request.method == 'POST':
- session_id = request.POST.get('sessionId', '').strip()
- id_homologation = request.POST.get('idHomologation', '').strip()
- license_ids_text = request.POST.get('licenseIds', '').strip()
+ if request.method == "POST":
+ session_id = request.POST.get("sessionId", "").strip()
+ id_homologation = request.POST.get("idHomologation", "").strip()
+ license_ids_text = request.POST.get("licenseIds", "").strip()
if not session_id or not id_homologation or not license_ids_text:
messages.error(request, "All fields are required.")
@@ -1041,7 +1121,7 @@ def bulk_license_lookup(request):
# Parse license IDs (one per line or comma-separated)
license_ids = []
- for line in license_ids_text.replace(',', '\n').split('\n'):
+ for line in license_ids_text.replace(",", "\n").split("\n"):
license_id = line.strip()
if license_id:
license_ids.append(license_id)
@@ -1054,9 +1134,9 @@ def bulk_license_lookup(request):
"bulk_lookup_info": {
"total_licenses": len(license_ids),
"id_homologation": id_homologation,
- "timestamp": datetime.now().isoformat()
+ "timestamp": datetime.now().isoformat(),
},
- "results": []
+ "results": [],
}
# Headers setup (same as single lookup)
@@ -1071,21 +1151,21 @@ def bulk_license_lookup(request):
"Connection": "keep-alive",
"Referer": f"https://beach-padel.app.fft.fr/beachja/competitionFiche/inscrireEquipe?identifiantHomologation={id_homologation}",
"X-Requested-With": "XMLHttpRequest",
- "Cookie": session_id
+ "Cookie": session_id,
}
for i, license_id in enumerate(license_ids):
try:
url = f"https://beach-padel.app.fft.fr/beachja/rechercheJoueur/licencies?idHomologation={id_homologation}&numeroLicence={license_id}"
- print(f"Looking up license {i+1}/{len(license_ids)}: {license_id}")
+ print(f"Looking up license {i + 1}/{len(license_ids)}: {license_id}")
response = requests.get(url, headers=headers, timeout=30)
result_item = {
"license_id": license_id,
"status_code": response.status_code,
- "success": response.status_code == 200
+ "success": response.status_code == 200,
}
if response.status_code == 200:
@@ -1106,11 +1186,9 @@ def bulk_license_lookup(request):
time.sleep(0.5)
except Exception as e:
- results["results"].append({
- "license_id": license_id,
- "success": False,
- "error": str(e)
- })
+ results["results"].append(
+ {"license_id": license_id, "success": False, "error": str(e)}
+ )
# Create filename with timestamp
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
@@ -1119,9 +1197,9 @@ def bulk_license_lookup(request):
# Return as downloadable JSON
http_response = HttpResponse(
json.dumps(results, indent=2, ensure_ascii=False),
- content_type='application/json; charset=utf-8'
+ content_type="application/json; charset=utf-8",
)
- http_response['Content-Disposition'] = f'attachment; filename="{filename}"'
+ http_response["Content-Disposition"] = f'attachment; filename="{filename}"'
return http_response
csrf_token = get_token(request)
@@ -1129,7 +1207,7 @@ def bulk_license_lookup(request):
# Default values
default_license_ids = "5186803\n1234567\n2345678"
- html_template = Template('''
+ html_template = Template("""
@@ -1241,28 +1319,31 @@ def bulk_license_lookup(request):
- ''')
-
- context = Context({
- 'csrf_token': csrf_token,
- 'default_session_id': default_session_id,
- 'default_id_homologation': default_id_homologation,
- 'default_license_ids': default_license_ids,
- 'default_sexe': default_sexe
- })
+ """)
+
+ context = Context(
+ {
+ "csrf_token": csrf_token,
+ "default_session_id": default_session_id,
+ "default_id_homologation": default_id_homologation,
+ "default_license_ids": default_license_ids,
+ "default_sexe": default_sexe,
+ }
+ )
return HttpResponse(html_template.render(context))
+
@staff_member_required
def search_player_by_name(request):
"""
Search for players by nom and prenom
"""
- if request.method == 'POST':
- session_id = request.POST.get('sessionId', '').strip()
- id_homologation = request.POST.get('idHomologation', '').strip()
- nom = request.POST.get('nom', '')
- prenom = request.POST.get('prenom', '')
- sexe = request.POST.get('sexe', default_sexe)
+ if request.method == "POST":
+ session_id = request.POST.get("sessionId", "").strip()
+ id_homologation = request.POST.get("idHomologation", "").strip()
+ nom = request.POST.get("nom", "")
+ prenom = request.POST.get("prenom", "")
+ sexe = request.POST.get("sexe", default_sexe)
if not session_id or not id_homologation:
messages.error(request, "sessionId and idHomologation are required.")
@@ -1274,10 +1355,10 @@ def search_player_by_name(request):
try:
# Construct the URL for name search
- base_url = "https://beach-padel.app.fft.fr/beachja/rechercheJoueur/licencies"
- params = {
- "idHomologation": id_homologation
- }
+ base_url = (
+ "https://beach-padel.app.fft.fr/beachja/rechercheJoueur/licencies"
+ )
+ params = {"idHomologation": id_homologation}
# Add name parameters if provided
if nom:
@@ -1301,7 +1382,7 @@ def search_player_by_name(request):
"Connection": "keep-alive",
"Referer": f"https://beach-padel.app.fft.fr/beachja/competitionFiche/inscrireEquipe?identifiantHomologation={id_homologation}",
"X-Requested-With": "XMLHttpRequest",
- "Cookie": session_id
+ "Cookie": session_id,
}
print(f"Making request to: {url}")
@@ -1324,14 +1405,14 @@ def search_player_by_name(request):
"nom": nom,
"prenom": prenom,
"id_homologation": id_homologation,
- "timestamp": datetime.now().isoformat()
+ "timestamp": datetime.now().isoformat(),
},
"response_info": {
"status_code": response.status_code,
"headers": dict(response.headers),
- "raw_response": response.text
+ "raw_response": response.text,
},
- "parsed_data": json_data
+ "parsed_data": json_data,
}
# Create filename with timestamp
@@ -1342,16 +1423,21 @@ def search_player_by_name(request):
# Return as downloadable JSON
http_response = HttpResponse(
json.dumps(result, indent=2, ensure_ascii=False),
- content_type='application/json; charset=utf-8'
+ content_type="application/json; charset=utf-8",
+ )
+ http_response["Content-Disposition"] = (
+ f'attachment; filename="{filename}"'
)
- http_response['Content-Disposition'] = f'attachment; filename="{filename}"'
return http_response
except json.JSONDecodeError as e:
messages.error(request, f"Failed to parse JSON response: {str(e)}")
else:
- messages.error(request, f"Request failed with status {response.status_code}: {response.text}")
+ messages.error(
+ request,
+ f"Request failed with status {response.status_code}: {response.text}",
+ )
except requests.exceptions.RequestException as e:
messages.error(request, f"Network error: {str(e)}")
@@ -1362,7 +1448,7 @@ def search_player_by_name(request):
# Default values
- html_template = Template('''
+ html_template = Template("""
@@ -1510,31 +1596,42 @@ def search_player_by_name(request):
- ''')
-
- context = Context({
- 'csrf_token': csrf_token,
- 'default_session_id': default_session_id,
- 'default_id_homologation': default_id_homologation,
- 'default_sexe': default_sexe
- })
+ """)
+
+ context = Context(
+ {
+ "csrf_token": csrf_token,
+ "default_session_id": default_session_id,
+ "default_id_homologation": default_id_homologation,
+ "default_sexe": default_sexe,
+ }
+ )
return HttpResponse(html_template.render(context))
+
@staff_member_required
def enrich_rankings_with_licenses(request):
"""
Load a local rankings file and enrich players with license data (resumable)
Uses concurrent processing to speed up lookups for large files.
"""
- if request.method == 'POST':
- file_path = request.POST.get('file_path', '').strip()
- session_id = request.POST.get('sessionId', default_session_id).strip()
- id_homologation = request.POST.get('idHomologation', default_id_homologation).strip()
- save_batch_size = int(request.POST.get('batch_size', 1000)) # How often to save progress
- max_workers = int(request.POST.get('max_workers', 10)) # New parameter for controlling concurrency
+ if request.method == "POST":
+ file_path = request.POST.get("file_path", "").strip()
+ session_id = request.POST.get("sessionId", default_session_id).strip()
+ id_homologation = request.POST.get(
+ "idHomologation", default_id_homologation
+ ).strip()
+ save_batch_size = int(
+ request.POST.get("batch_size", 1000)
+ ) # How often to save progress
+ max_workers = int(
+ request.POST.get("max_workers", 10)
+ ) # New parameter for controlling concurrency
if not file_path or not session_id or not id_homologation:
- messages.error(request, "File path, session ID, and ID homologation are required.")
+ messages.error(
+ request, "File path, session ID, and ID homologation are required."
+ )
return redirect(request.path)
try:
@@ -1544,13 +1641,13 @@ def enrich_rankings_with_licenses(request):
return redirect(request.path)
print(f"Loading rankings from: {file_path}")
- with open(file_path, 'r', encoding='utf-8') as f:
+ with open(file_path, "r", encoding="utf-8") as f:
data = json.load(f)
- players = data.get('joueurs', [])
- metadata = data.get('metadata', {})
+ players = data.get("joueurs", [])
+ metadata = data.get("metadata", {})
# Extract sex from metadata (it's the same for all players in the ranking)
- sexe = metadata.get('parameters', {}).get('sexe', default_sexe)
+ sexe = metadata.get("parameters", {}).get("sexe", default_sexe)
print(f"Using sex from metadata: {sexe}")
if not players:
@@ -1564,11 +1661,11 @@ def enrich_rankings_with_licenses(request):
original_index, player = player_tuple
# Extract and normalize names
- raw_nom = player.get('nom')
- raw_prenom = player.get('prenom')
+ raw_nom = player.get("nom")
+ raw_prenom = player.get("prenom")
if not raw_nom or not raw_prenom:
- player['license_lookup_status'] = 'missing_name_data'
+ player["license_lookup_status"] = "missing_name_data"
return player, False, None
# Keep original case and accents, just clean up any extra whitespace
@@ -1587,12 +1684,14 @@ def enrich_rankings_with_licenses(request):
"Connection": "keep-alive",
"Referer": f"https://beach-padel.app.fft.fr/beachja/competitionFiche/inscrireEquipe?identifiantHomologation={id_homologation}",
"X-Requested-With": "XMLHttpRequest",
- "Cookie": session_id
+ "Cookie": session_id,
}
def sanitize_for_latin1(text):
# Replace specific problematic characters
- text = text.replace('\u2019', "'") # Replace right single quotation mark with regular apostrophe
+ text = text.replace(
+ "\u2019", "'"
+ ) # Replace right single quotation mark with regular apostrophe
# Add more replacements as needed
return text
@@ -1601,75 +1700,127 @@ def enrich_rankings_with_licenses(request):
license_url = f"https://beach-padel.app.fft.fr/beachja/rechercheJoueur/licencies?idHomologation={id_homologation}&nom={urllib.parse.quote(sanitize_for_latin1(nom), encoding='latin-1')}&prenom={urllib.parse.quote(sanitize_for_latin1(prenom), encoding='latin-1')}&sexe={sexe}"
# Make license lookup request
- license_response = requests.get(license_url, headers=license_headers, timeout=30)
- license_response.encoding = 'utf-8' # Ensure consistent encoding
+ license_response = requests.get(
+ license_url, headers=license_headers, timeout=30
+ )
+ license_response.encoding = "utf-8" # Ensure consistent encoding
if license_response.status_code == 200:
try:
license_data = license_response.json()
- if license_data and 'object' in license_data and 'listeJoueurs' in license_data['object']:
- liste_joueurs = license_data['object']['listeJoueurs']
- presence_doublon = license_data['object'].get('presenceDoublon', False)
+ if (
+ license_data
+ and "object" in license_data
+ and "listeJoueurs" in license_data["object"]
+ ):
+ liste_joueurs = license_data["object"]["listeJoueurs"]
+ presence_doublon = license_data["object"].get(
+ "presenceDoublon", False
+ )
if liste_joueurs:
# Find the best matching license using age comparison
- license_info, match_info = find_best_license_match(liste_joueurs, player)
+ license_info, match_info = find_best_license_match(
+ liste_joueurs, player
+ )
if license_info is None:
- player['license_lookup_status'] = 'too_many_results'
- player['presenceDoublon'] = presence_doublon
- return player, False, f"Failed {nom} {prenom} {player['idCrm']} -> Too many results"
+ player["license_lookup_status"] = (
+ "too_many_results"
+ )
+ player["presenceDoublon"] = presence_doublon
+ return (
+ player,
+ False,
+ f"Failed {nom} {prenom} {player['idCrm']} -> Too many results",
+ )
# Add all license data to player
- player['licence'] = license_info.get('licence')
- player['codeClub'] = license_info.get('codeClub')
- player['nomClub'] = license_info.get('nomClub')
+ player["licence"] = license_info.get("licence")
+ player["codeClub"] = license_info.get("codeClub")
+ player["nomClub"] = license_info.get("nomClub")
# Track duplicates and matching info
- player['presenceDoublon'] = presence_doublon
- player['nombreResultatsLicence'] = len(liste_joueurs)
- player['license_match_info'] = match_info
+ player["presenceDoublon"] = presence_doublon
+ player["nombreResultatsLicence"] = len(
+ liste_joueurs
+ )
+ player["license_match_info"] = match_info
if len(liste_joueurs) > 1:
- player['tousResultatsLicence'] = liste_joueurs
+ player["tousResultatsLicence"] = liste_joueurs
- player['license_lookup_status'] = 'success'
+ player["license_lookup_status"] = "success"
# Enhanced logging with age matching info
doublon_status = ""
if presence_doublon:
if match_info["reason"] == "age_matched":
- age_diff = match_info.get("best_age_difference", "?")
+ age_diff = match_info.get(
+ "best_age_difference", "?"
+ )
doublon_status = f" (DOUBLON - matched by age, diff: {age_diff})"
elif match_info["reason"] == "no_age_data":
- doublon_status = " (DOUBLON - no age data, used first)"
+ doublon_status = (
+ " (DOUBLON - no age data, used first)"
+ )
else:
- doublon_status = f" (DOUBLON - {match_info['reason']})"
-
- return player, True, f"ā
{nom} {prenom} -> {license_info.get('licence')}{doublon_status}"
+ doublon_status = (
+ f" (DOUBLON - {match_info['reason']})"
+ )
+
+ return (
+ player,
+ True,
+ f"ā
{nom} {prenom} -> {license_info.get('licence')}{doublon_status}",
+ )
else:
- player['license_lookup_status'] = 'no_results'
- player['presenceDoublon'] = presence_doublon
- return player, False, f"Failed {nom} {prenom} {player['idCrm']} {presence_doublon} -> No results"
+ player["license_lookup_status"] = "no_results"
+ player["presenceDoublon"] = presence_doublon
+ return (
+ player,
+ False,
+ f"Failed {nom} {prenom} {player['idCrm']} {presence_doublon} -> No results",
+ )
else:
- player['license_lookup_status'] = 'no_data'
- return player, False, f"Failed {nom} {prenom} {player['idCrm']} -> No data"
+ player["license_lookup_status"] = "no_data"
+ return (
+ player,
+ False,
+ f"Failed {nom} {prenom} {player['idCrm']} -> No data",
+ )
except json.JSONDecodeError as json_err:
- player['license_lookup_status'] = 'json_decode_error'
- player['license_lookup_error'] = str(json_err)
- player['raw_response'] = license_response.text
+ player["license_lookup_status"] = "json_decode_error"
+ player["license_lookup_error"] = str(json_err)
+ player["raw_response"] = license_response.text
# Debug info for JSON decode errors
error_position = json_err.pos
- return player, False, f"Failed {nom} {prenom} {player['idCrm']} -> JSON decode error: {str(json_err)} at pos {error_position}"
+ return (
+ player,
+ False,
+ f"Failed {nom} {prenom} {player['idCrm']} -> JSON decode error: {str(json_err)} at pos {error_position}",
+ )
else:
- player['license_lookup_status'] = f'http_error_{license_response.status_code}'
- return player, False, f"Failed {nom} {prenom} {player['idCrm']} -> HTTP {license_response.status_code}"
+ player["license_lookup_status"] = (
+ f"http_error_{license_response.status_code}"
+ )
+ return (
+ player,
+ False,
+ f"Failed {nom} {prenom} {player['idCrm']} -> HTTP {license_response.status_code}",
+ )
except requests.exceptions.RequestException as e:
- player['license_lookup_status'] = f'network_error: {type(e).__name__}'
- player['license_lookup_error'] = str(e)
- return player, False, f"Failed {nom} {prenom} {player['idCrm']} -> Network error: {type(e).__name__}: {str(e)}"
+ player["license_lookup_status"] = (
+ f"network_error: {type(e).__name__}"
+ )
+ player["license_lookup_error"] = str(e)
+ return (
+ player,
+ False,
+ f"Failed {nom} {prenom} {player['idCrm']} -> Network error: {type(e).__name__}: {str(e)}",
+ )
# Find players that need license enrichment
players_needing_licenses = []
@@ -1678,7 +1829,7 @@ def enrich_rankings_with_licenses(request):
for i, player in enumerate(players):
if player is None:
continue
- if player.get('license_lookup_status') not in ['success']:
+ if player.get("license_lookup_status") not in ["success"]:
players_needing_licenses.append((i, player))
else:
players_with_licenses += 1
@@ -1695,22 +1846,38 @@ def enrich_rankings_with_licenses(request):
successful_lookups = 0
failed_lookups = 0
- print(f"Starting concurrent enrichment of {len(players_needing_licenses)} players with {max_workers} workers")
+ print(
+ f"Starting concurrent enrichment of {len(players_needing_licenses)} players with {max_workers} workers"
+ )
# Prepare the partial function with fixed parameters
- worker_fn = partial(lookup_player_license, session_id=session_id, id_homologation=id_homologation, sexe=sexe)
+ worker_fn = partial(
+ lookup_player_license,
+ session_id=session_id,
+ id_homologation=id_homologation,
+ sexe=sexe,
+ )
# Process in batches for saving progress, using the value from the form
for batch_start in range(0, len(players_needing_licenses), save_batch_size):
- batch_end = min(batch_start + save_batch_size, len(players_needing_licenses))
+ batch_end = min(
+ batch_start + save_batch_size, len(players_needing_licenses)
+ )
current_batch = players_needing_licenses[batch_start:batch_end]
- print(f"\n=== Processing batch {batch_start//save_batch_size + 1}: players {batch_start+1} to {batch_end} ===")
+ print(
+ f"\n=== Processing batch {batch_start // save_batch_size + 1}: players {batch_start + 1} to {batch_end} ==="
+ )
batch_results = []
# Process the current batch with concurrent workers
- with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
- future_to_player = {executor.submit(worker_fn, player_tuple): player_tuple for player_tuple in current_batch}
+ with concurrent.futures.ThreadPoolExecutor(
+ max_workers=max_workers
+ ) as executor:
+ future_to_player = {
+ executor.submit(worker_fn, player_tuple): player_tuple
+ for player_tuple in current_batch
+ }
for future in concurrent.futures.as_completed(future_to_player):
player_tuple = future_to_player[future]
@@ -1718,7 +1885,9 @@ def enrich_rankings_with_licenses(request):
try:
updated_player, success, message = future.result()
- players[original_index] = updated_player # Update the player in the main list
+ players[original_index] = (
+ updated_player # Update the player in the main list
+ )
if success:
successful_lookups += 1
@@ -1728,14 +1897,20 @@ def enrich_rankings_with_licenses(request):
total_processed += 1
# Print progress every 10 players
- if total_processed % 10 == 0 or total_processed == len(players_needing_licenses):
- print(f" Progress: {total_processed}/{len(players_needing_licenses)} ({(total_processed/len(players_needing_licenses)*100):.1f}%)")
+ if total_processed % 10 == 0 or total_processed == len(
+ players_needing_licenses
+ ):
+ print(
+ f" Progress: {total_processed}/{len(players_needing_licenses)} ({(total_processed / len(players_needing_licenses) * 100):.1f}%)"
+ )
if message:
batch_results.append(message)
except Exception as e:
- print(f" ERROR processing player {original_index} ({player_tuple}'): {str(e)}")
+ print(
+ f" ERROR processing player {original_index} ({player_tuple}'): {str(e)}"
+ )
failed_lookups += 1
total_processed += 1
@@ -1744,36 +1919,47 @@ def enrich_rankings_with_licenses(request):
print(f" {msg}")
# Save progress after each batch
- metadata['last_enrichment_update'] = datetime.now().isoformat()
- metadata['enrichment_progress'] = {
- 'players_with_licenses': successful_lookups + players_with_licenses,
- 'players_without_licenses': len(players) - (successful_lookups + players_with_licenses),
- 'last_processed_index': batch_end - 1,
- 'total_processed_this_run': total_processed,
- 'successful_this_run': successful_lookups,
- 'failed_this_run': failed_lookups
+ metadata["last_enrichment_update"] = datetime.now().isoformat()
+ metadata["enrichment_progress"] = {
+ "players_with_licenses": successful_lookups + players_with_licenses,
+ "players_without_licenses": len(players)
+ - (successful_lookups + players_with_licenses),
+ "last_processed_index": batch_end - 1,
+ "total_processed_this_run": total_processed,
+ "successful_this_run": successful_lookups,
+ "failed_this_run": failed_lookups,
}
# Save the updated file
- with open(file_path, 'w', encoding='utf-8') as f:
+ with open(file_path, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2, ensure_ascii=False)
- print(f"Progress saved: {successful_lookups + players_with_licenses}/{len(players)} players have licenses")
+ print(
+ f"Progress saved: {successful_lookups + players_with_licenses}/{len(players)} players have licenses"
+ )
print(f"\n=== ENRICHMENT COMPLETE ===")
print(f"Total processed this run: {total_processed}")
print(f"Successful lookups this run: {successful_lookups}")
print(f"Failed lookups this run: {failed_lookups}")
- print(f"Total players with licenses: {successful_lookups + players_with_licenses}/{len(players)}")
+ print(
+ f"Total players with licenses: {successful_lookups + players_with_licenses}/{len(players)}"
+ )
- messages.success(request, f"Enrichment complete! Processed {total_processed} players. {successful_lookups + players_with_licenses}/{len(players)} players now have license data.")
+ messages.success(
+ request,
+ f"Enrichment complete! Processed {total_processed} players. {successful_lookups + players_with_licenses}/{len(players)} players now have license data.",
+ )
except Exception as e:
import traceback
+
error_traceback = traceback.format_exc()
print(f"CRITICAL ERROR: {type(e).__name__}: {str(e)}")
print(f"Traceback: {error_traceback}")
- messages.error(request, f"Error during enrichment: {type(e).__name__}: {str(e)}")
+ messages.error(
+ request, f"Error during enrichment: {type(e).__name__}: {str(e)}"
+ )
return redirect(request.path)
# Show the form
@@ -1783,22 +1969,26 @@ def enrich_rankings_with_licenses(request):
csrf_token = get_token(request)
# Try to find existing ranking files
- rankings_dir = os.path.join(settings.BASE_DIR, 'data', 'rankings')
+ rankings_dir = os.path.join(settings.BASE_DIR, "data", "rankings")
existing_files = []
if os.path.exists(rankings_dir):
for filename in os.listdir(rankings_dir):
- if filename.endswith('.json') and 'french_padel_rankings' in filename:
+ if filename.endswith(".json") and "french_padel_rankings" in filename:
file_path = os.path.join(rankings_dir, filename)
- existing_files.append({
- 'filename': filename,
- 'path': file_path,
- 'size': os.path.getsize(file_path),
- 'modified': datetime.fromtimestamp(os.path.getmtime(file_path)).strftime('%Y-%m-%d %H:%M:%S')
- })
+ existing_files.append(
+ {
+ "filename": filename,
+ "path": file_path,
+ "size": os.path.getsize(file_path),
+ "modified": datetime.fromtimestamp(
+ os.path.getmtime(file_path)
+ ).strftime("%Y-%m-%d %H:%M:%S"),
+ }
+ )
- existing_files.sort(key=lambda x: x['modified'], reverse=True)
+ existing_files.sort(key=lambda x: x["modified"], reverse=True)
- html_template = Template('''
+ html_template = Template("""
@@ -1900,15 +2090,429 @@ def enrich_rankings_with_licenses(request):
- ''')
+ """)
context = {
- 'csrf_token': csrf_token,
- 'existing_files': existing_files,
- 'default_session_id': default_session_id,
- 'default_id_homologation': default_id_homologation,
- 'default_sexe': default_sexe
+ "csrf_token": csrf_token,
+ "existing_files": existing_files,
+ "default_session_id": default_session_id,
+ "default_id_homologation": default_id_homologation,
+ "default_sexe": default_sexe,
}
rendered_html = html_template.render(Context(context))
return HttpResponse(rendered_html)
+
+
+@staff_member_required
+def gather_monthly_tournaments_and_umpires(request):
+ """
+ Gather tournaments from current month and export umpire data to CSV
+ """
+ if request.method == "GET":
+ # Display the form
+ html_content = """
+
+
+
+ Monthly Tournament & Umpire Export - Padel Club Admin
+
+
+
+
+
+
+
+ """
+
+ from django.middleware.csrf import get_token
+ from django.template import Template, Context
+
+ csrf_token = get_token(request)
+ template = Template(html_content)
+ context = Context({"csrf_token": csrf_token})
+
+ return HttpResponse(template.render(context))
+
+ elif request.method == "POST":
+ start_date = request.POST.get("start_date")
+ end_date = request.POST.get("end_date")
+ city = request.POST.get("city", "Paris")
+ distance = int(request.POST.get("distance", 3000))
+
+ if not start_date or not end_date:
+ return HttpResponse("Missing start_date or end_date", status=400)
+
+ try:
+ # Convert to datetime objects
+ start_datetime = datetime.strptime(start_date, "%Y-%m-%d")
+ end_datetime = datetime.strptime(end_date, "%Y-%m-%d")
+
+ # Format for FFT API (DD/MM/YY format like iOS)
+ start_date_formatted = start_datetime.strftime("%d/%m/%y")
+ end_date_formatted = end_datetime.strftime("%d/%m/%y")
+
+ # Step 1: Gather all tournaments using the same API endpoint as iOS
+ print(
+ f"š Gathering tournaments from {start_date_formatted} to {end_date_formatted}"
+ )
+
+ tournaments = []
+ page = 0 # Start from page 0
+
+ # Default coordinates for Paris (same as iOS request)
+ lat = 48.856788
+ lng = 2.351077
+
+ base_url = f"http://127.0.0.1:8000/roads/fft/all-tournaments"
+
+ while True:
+ # Build URL exactly like iOS request
+ params = {
+ "sort": "dateDebut+asc",
+ "page": page,
+ "start_date": start_date_formatted,
+ "end_date": end_date_formatted,
+ "city": city,
+ "distance": distance,
+ "national_cup": "false",
+ "lat": lat,
+ "lng": lng,
+ }
+
+ # Make API request (internal call)
+ try:
+ import urllib.parse
+
+ query_string = urllib.parse.urlencode(params)
+ full_url = f"{base_url}?{query_string}"
+
+ print(f"š Requesting page {page}: {full_url}")
+
+ # Make internal API call using requests
+ response = requests.get(full_url)
+
+ if response.status_code != 200:
+ print(
+ f"ā API request failed with status {response.status_code}"
+ )
+ break
+
+ result = response.json()
+
+ if not result.get("success") or not result.get("tournaments"):
+ print(f"š No more tournaments found on page {page}")
+ break
+
+ page_tournaments = result["tournaments"]
+ tournaments.extend(page_tournaments)
+
+ current_count = len(page_tournaments)
+ print(
+ f"š Page {page}: found {current_count} tournaments (total: {len(tournaments)})"
+ )
+
+ # Check if we have more pages based on total results
+ total_results = result.get("total_results", 0)
+ if len(tournaments) >= total_results:
+ print(f"ā
Reached all {total_results} tournaments")
+ break
+
+ page += 1
+
+ # Safety limit
+ if page > 50:
+ print("ā ļø Reached page limit, stopping")
+ break
+
+ except Exception as e:
+ print(f"ā Error making API request for page {page}: {str(e)}")
+ break
+
+ print(f"š Found {len(tournaments)} tournaments total")
+
+ if not tournaments:
+ return HttpResponse(
+ f"No tournaments found for the period {start_date_formatted} to {end_date_formatted} "
+ f"ā Back to form ",
+ content_type="text/html",
+ )
+
+ # Step 2: Gather umpire data for each tournament (with batching)
+ umpire_data = []
+ batch_size = 100
+ total_tournaments = len(tournaments)
+
+ print(
+ f"šÆ Starting umpire data collection for {total_tournaments} tournaments"
+ )
+
+ # Process tournaments in batches
+ for i in range(0, len(tournaments), batch_size):
+ batch = tournaments[i : i + batch_size]
+ batch_number = (i // batch_size) + 1
+ total_batches = (len(tournaments) + batch_size - 1) // batch_size
+
+ print(
+ f"\nš Processing batch {batch_number}/{total_batches} ({len(batch)} tournaments)"
+ )
+
+ batch_processed = 0
+
+ # Use ThreadPoolExecutor for concurrent requests within each batch
+ with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
+ # Submit all tasks for this batch
+ future_to_tournament = {
+ executor.submit(
+ get_tournament_umpire_data_api, tournament
+ ): tournament
+ for tournament in batch
+ }
+
+ # Process results as they complete
+ for future in concurrent.futures.as_completed(future_to_tournament):
+ tournament = future_to_tournament[future]
+ batch_processed += 1
+ processed_total = i + batch_processed
+
+ try:
+ umpire_info = future.result()
+ if umpire_info:
+ umpire_data.append(umpire_info)
+
+ # Progress update every 10 tournaments
+ if batch_processed % 10 == 0 or batch_processed == len(
+ batch
+ ):
+ batch_progress = int(
+ (batch_processed / len(batch)) * 100
+ )
+ total_progress = int(
+ (processed_total / total_tournaments) * 100
+ )
+ remaining = total_tournaments - processed_total
+ print(
+ f" ā” Batch {batch_number}: {batch_processed}/{len(batch)} ({batch_progress}%) | Total: {processed_total}/{total_tournaments} ({total_progress}%) | {remaining} left"
+ )
+
+ except Exception as e:
+ print(
+ f"ā Error processing tournament {tournament.get('id', 'unknown')}: {str(e)}"
+ )
+ continue
+
+ print(f"ā
Completed batch {batch_number}/{total_batches}")
+
+ print(
+ f"\nš Umpire data collection complete! Found {len(umpire_data)} umpires with contact info"
+ )
+
+ # Step 3: Generate CSV
+ if not umpire_data:
+ return HttpResponse(
+ "No umpire contact information found for the tournaments in this period "
+ "ā Back to form ",
+ content_type="text/html",
+ )
+
+ # Create CSV content
+ output = io.StringIO()
+
+ for umpire in umpire_data:
+ # Format: CLUB_NAME;LAST_NAME;FIRST_NAME;EMAIL;PHONE
+ club_name = umpire.get("club_name", "").replace(
+ ";", ","
+ ) # Remove semicolons to avoid CSV issues
+
+ # Try to split name into first and last name
+ full_name = umpire.get("name", "")
+ name_parts = full_name.split(" ", 1)
+
+ if len(name_parts) >= 2:
+ first_name = name_parts[0]
+ last_name = " ".join(name_parts[1:])
+ else:
+ first_name = full_name
+ last_name = ""
+
+ email = umpire.get("email", "")
+ phone = umpire.get("phone", "")
+
+ # Write line in the specified format
+ output.write(f"{club_name};{last_name};{first_name};{email};{phone}\n")
+
+ csv_content = output.getvalue()
+ output.close()
+
+ # Generate filename with date range
+ filename = f"umpires_{start_date}_{end_date}.csv"
+
+ # Return CSV as download
+ response = HttpResponse(csv_content, content_type="text/csv; charset=utf-8")
+ response["Content-Disposition"] = f'attachment; filename="{filename}"'
+
+ return response
+
+ except Exception as e:
+ import traceback
+
+ error_details = traceback.format_exc()
+
+ return HttpResponse(
+ f"Error processing request "
+ f"Error: {str(e)}
"
+ f"{error_details} "
+ f"ā Back to form ",
+ content_type="text/html",
+ )
+
+
+def get_tournament_umpire_data_api(tournament):
+ """
+ Helper function to get umpire data for a single tournament using the same API endpoint as iOS
+ Returns dict with umpire info or None if not found/error
+ """
+ try:
+ tournament_id = tournament.get("id")
+ if not tournament_id:
+ return None
+
+ # Use the same API endpoint as iOS: /roads/fft/umpire/{tournament_id}/
+ api_url = f"http://127.0.0.1:8000/roads/fft/umpire/{tournament_id}/"
+
+ response = requests.get(api_url, timeout=30)
+
+ if response.status_code != 200:
+ print(
+ f"ā Umpire API request failed for tournament {tournament_id}: {response.status_code}"
+ )
+ return None
+
+ data = response.json()
+
+ name = data.get("name", "")
+ email = data.get("email", "")
+ phone = data.get("phone", "")
+
+ # Skip if no contact info
+ if not name and not email and not phone:
+ return None
+
+ # Extract club name from tournament data
+ club_name = (
+ tournament.get("organisateur", {}).get("nom", "")
+ if tournament.get("organisateur")
+ else ""
+ )
+
+ return {
+ "tournament_id": tournament_id,
+ "tournament_name": tournament.get("intitule", ""),
+ "club_name": club_name,
+ "name": name or "",
+ "email": email or "",
+ "phone": phone or "",
+ }
+
+ except Exception as e:
+ print(
+ f"Error getting umpire data for tournament {tournament.get('id', 'unknown')}: {str(e)}"
+ )
+ return None
diff --git a/tournaments/models/tournament.py b/tournaments/models/tournament.py
index 903d8ef..d4423ff 100644
--- a/tournaments/models/tournament.py
+++ b/tournaments/models/tournament.py
@@ -96,8 +96,8 @@ class Tournament(BaseModel):
club_member_fee_deduction = models.FloatField(null=True, blank=True)
unregister_delta_in_hours = models.IntegerField(default=24)
currency_code = models.CharField(null=True, blank=True, max_length=3, default='EUR')
- parent = models.ForeignKey('self', blank=True, null=True, on_delete=models.SET_NULL, related_name='children')
- loser_index = models.IntegerField(default=0)
+ # parent = models.ForeignKey('self', blank=True, null=True, on_delete=models.SET_NULL, related_name='children')
+ # loser_index = models.IntegerField(default=0)
def delete_dependencies(self):
for team_registration in self.team_registrations.all():