improvements

sync3
laurent 5 months ago
parent fc21dc2b93
commit 90e7f4216e
  1. 0
      sync/model_manager.py
  2. 85
      sync/models/base.py
  3. 3
      sync/models/data_access.py
  4. 180
      sync/registry.py
  5. 115
      sync/signals.py
  6. 1
      sync/utils.py
  7. 81
      sync/views.py
  8. 12
      tournaments/models/match.py
  9. 18
      tournaments/models/team_score.py

@ -1,9 +1,14 @@
from django.db import models from django.db import models
from django.utils.timezone import now from django.utils.timezone import now
from django.conf import settings from django.conf import settings
from django.apps import apps
from typing import List, Set from typing import List, Set
from django.apps import apps from collections import defaultdict
import logging
logger = logging.getLogger(__name__)
class BaseModel(models.Model): class BaseModel(models.Model):
creation_date = models.DateTimeField(default=now, editable=False) creation_date = models.DateTimeField(default=now, editable=False)
@ -28,20 +33,20 @@ class BaseModel(models.Model):
else: else:
return None return None
def data_identifier_dict(self):
return {
'model_id': self.id,
'store_id': None
}
def update_data_access_list(self): def update_data_access_list(self):
related_instances = self.related_instances() related_instances = self.sharing_related_instances()
related_ids = [ri.id for ri in related_instances] data_access_ids = {instance.data_access_ids for instance in related_instances}
related_ids.append(self.id) data_access_ids.update(self.data_access_ids)
self.data_access_ids = data_access_ids
DataAccess = apps.get_model('sync', 'DataAccess')
data_accesses = DataAccess.objects.filter(model_id__in=related_ids) # DataAccess = apps.get_model('sync', 'DataAccess')
for data_access in data_accesses: # data_accesses = DataAccess.objects.filter(model_id__in=related_ids)
self.add_data_access_relation(data_access)
# add data_access to children who might not had the relationship
# if data_accesses:
# for child in self.get_children_by_model():
# if len(child.data_access_ids) == 0:
# for data_access in data_accesses: # for data_access in data_accesses:
# self.add_data_access_relation(data_access) # self.add_data_access_relation(data_access)
@ -187,8 +192,60 @@ class BaseModel(models.Model):
return None return None
def sharing_related_instances(self):
"""
Get all related instances (both children and parents) recursively
"""
instances = []
processed_objects = set()
instances.extend(self.get_shared_children(processed_objects))
processed_objects = set()
instances.extend(self.get_recursive_parents(processed_objects))
return instances
def get_shared_children(self, processed_objects):
sync_models = getattr(settings, 'SYNC_MODEL_CHILDREN_SHARING', {})
relationships = sync_models[self.__class__.__name__]
if relationships:
return self.get_shared_children_from_relationships(relationships, processed_objects)
else:
return self.get_recursive_children(processed_objects)
def get_shared_children_from_relationships(self, relationships, processed_objects):
# print(f'relationships = {relationships}')
current = [self]
for relationship in relationships:
# print(f'> relationship = {relationship}')
values = []
for item in current:
value = getattr(item, relationship)
if hasattr(value, 'all') and callable(value.all):
# This is a queryset from a reverse relationship
for related_obj in value.all():
processed_objects.add(related_obj)
values.extend(value.all())
else:
processed_objects.add(value)
values.append(value)
current = values
logger.info(f'+++ shared children = {processed_objects}')
return processed_objects
class SideStoreModel(BaseModel): class SideStoreModel(BaseModel):
store_id = models.CharField(max_length=100, default="") # a value matching LeStorage directory sub-stores. Matches the name of the directory. store_id = models.CharField(max_length=100, default="") # a value matching LeStorage directory sub-stores. Matches the name of the directory.
class Meta: class Meta:
abstract = True abstract = True
def data_identifier_dict(self):
return {
'model_id': self.id,
'store_id': self.store_id
}

@ -69,7 +69,8 @@ class DataAccess(BaseModel):
if model_class: if model_class:
try: try:
obj = model_class.objects.get(id=self.model_id) obj = model_class.objects.get(id=self.model_id)
related_instance = obj.related_instances()
related_instance = obj.sharing_related_instances()
related_instance.append(obj) related_instance.append(obj)
with transaction.atomic(): with transaction.atomic():

@ -1,8 +1,16 @@
from django.conf import settings from django.conf import settings
from django.apps import apps from django.apps import apps
from .models import BaseModel
from django.contrib.auth import get_user_model from django.contrib.auth import get_user_model
from .models import BaseModel
import threading import threading
import logging
from typing import List, Optional, Dict
logger = logging.getLogger(__name__)
User = get_user_model() User = get_user_model()
@ -105,3 +113,173 @@ class RelatedUsersRegistry:
# Global instance # Global instance
related_users_registry = RelatedUsersRegistry() related_users_registry = RelatedUsersRegistry()
class SyncModelChildrenManager:
"""
Manager class for handling model children sharing configuration.
Reads the SYNC_MODEL_CHILDREN_SHARING setting once and builds a bidirectional
relationship graph for efficient lookup.
"""
def __init__(self):
"""Initialize the manager by reading the Django setting and building the relationship graph."""
self._model_relationships = getattr(
settings,
'SYNC_MODEL_CHILDREN_SHARING',
{}
)
self._relationship_graph = self._build_relationship_graph()
logger.info(f'self._relationship_graph = {self._relationship_graph}')
def _build_relationship_graph(self) -> Dict[str, List[List[str]]]:
"""
Build a bidirectional relationship graph.
Returns:
Dict[str, List[List[str]]]: Dictionary where keys are model names and values
are lists of relationship paths (arrays of relationship names).
"""
graph = {}
# Add direct relationships (original models to their children)
for model_name, relationships in self._model_relationships.items():
if model_name not in graph:
graph[model_name] = []
# Add direct relationships as single-item arrays
for relationship in relationships:
graph[model_name].append([relationship])
# Build reverse relationships (children back to original models)
for original_model_name, relationships in self._model_relationships.items():
try:
original_model = model_registry.get_model(original_model_name)
if original_model is None:
continue
for relationship_name in relationships:
# Get the related model through _meta
try:
field = None
# Try to find the field in the model's _meta
for f in original_model._meta.get_fields():
if hasattr(f, 'related_name') and f.related_name == relationship_name:
field = f
break
elif hasattr(f, 'name') and f.name == relationship_name:
field = f
break
if field is None:
continue
# Get the related model
if hasattr(field, 'related_model'):
related_model = field.related_model
elif hasattr(field, 'model'):
related_model = field.model
else:
continue
related_model_name = related_model.__name__
# Find the reverse relationship name
reverse_relationship_name = self._find_reverse_relationship(
related_model, original_model, relationship_name
)
if reverse_relationship_name:
# Add the reverse path
if related_model_name not in graph:
graph[related_model_name] = []
# The path back is just the reverse relationship name
graph[related_model_name].append([reverse_relationship_name])
except Exception as e:
# Skip problematic relationships
continue
except Exception as e:
# Skip problematic models
continue
return graph
def _find_reverse_relationship(self, from_model, to_model, original_relationship_name):
"""
Find the reverse relationship name from from_model to to_model.
Args:
from_model: The model to search relationships from
to_model: The target model to find relationship to
original_relationship_name: The original relationship name for context
Returns:
str or None: The reverse relationship name if found
"""
try:
for field in from_model._meta.get_fields():
# Check ForeignKey, OneToOneField fields
if hasattr(field, 'related_model') and field.related_model == to_model:
# Check if this field has a related_name that matches our original relationship
if hasattr(field, 'related_name') and field.related_name == original_relationship_name:
# This is the reverse of our original relationship
return field.name
elif not hasattr(field, 'related_name') or field.related_name is None:
# Default reverse relationship name
default_name = f"{to_model._meta.model_name}"
if default_name == original_relationship_name.rstrip('s'): # Simple heuristic
return field.name
# Check reverse relationships
if hasattr(field, 'field') and hasattr(field.field, 'model'):
if field.field.model == to_model:
if field.get_accessor_name() == original_relationship_name:
return field.field.name
except Exception:
pass
return None
def get_relationships(self, model_name: str) -> List[str]:
"""
Get the list of direct relationships for a given model name.
Args:
model_name (str): The name of the model to look up
Returns:
List[str]: List of relationship names for the model.
Returns empty list if model is not found.
"""
return self._model_relationships.get(model_name, [])
def get_relationship_paths(self, model_name: str) -> List[List[str]]:
"""
Get all relationship paths for a given model name.
This includes both direct relationships and reverse paths.
Args:
model_name (str): The name of the model to look up
Returns:
List[List[str]]: List of relationship paths (each path is a list of relationship names).
Returns empty list if model is not found.
"""
return self._relationship_graph.get(model_name, [])
def get_relationship_graph(self) -> Dict[str, List[List[str]]]:
"""
Get the complete relationship graph.
Returns:
Dict[str, List[List[str]]]: The complete relationship graph
"""
return self._relationship_graph.copy()
# Create a singleton instance to use throughout the application
sync_model_manager = SyncModelChildrenManager()

@ -11,6 +11,8 @@ from .ws_sender import websocket_sender
from .registry import device_registry, related_users_registry from .registry import device_registry, related_users_registry
import logging import logging
import sys
import traceback
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -21,8 +23,9 @@ User = get_user_model()
@receiver([pre_save, pre_delete]) @receiver([pre_save, pre_delete])
def presave_handler(sender, instance, **kwargs): def presave_handler(sender, instance, **kwargs):
try:
# some other classes are excluded in settings_app.py: SYNC_APPS # some other classes are excluded in settings_app.py: SYNC_APPS
if not isinstance(instance, (BaseModel, User)): if not isinstance(instance, (BaseModel, User)) or isinstance(instance, DataAccess):
return return
signal = kwargs.get('signal') signal = kwargs.get('signal')
@ -33,20 +36,29 @@ def presave_handler(sender, instance, **kwargs):
users = related_users(instance) users = related_users(instance)
related_users_registry.register(instance.id, users) related_users_registry.register(instance.id, users)
# user_ids = [user.id for user in users]
if signal == pre_save: if signal == pre_save:
detect_foreign_key_changes_for_shared_instances(sender, instance) detect_foreign_key_changes_for_shared_instances(sender, instance)
sig_type = 'pre_save' sig_type = 'pre_save'
elif signal == pre_delete: elif signal == pre_delete:
if hasattr(instance, 'id'): # if hasattr(instance, 'id'):
data_access_list = DataAccess.objects.filter(model_id=instance.id) # try:
logger.info(f'>>> delete {data_access_list.count()} DataAccess') # data_access_list = DataAccess.objects.filter(model_id=instance.id)
data_access_list.delete() # if data_access_list:
# logger.info(f'>>> {instance.__class__.__name__} {instance.id} : delete {data_access_list.count()} DataAccess')
# data_access_list.delete()
# except Exception as e:
# logger.info(f'*** ERRRRRRR: {e}')
# logger.info(traceback.format_exc())
# raise
sig_type = 'pre_delete' sig_type = 'pre_delete'
# logger.info(f'* {sig_type} : {instance.__class__.__name__} > impacted users = {users}') # logger.info(f'* {sig_type} : {instance.__class__.__name__} > impacted users = {users}')
except Exception as e:
logger.info(f'*** ERROR: {e}')
raise
@receiver([post_save, post_delete]) @receiver([post_save, post_delete])
def synchronization_notifications(sender, instance, created=False, **kwargs): def synchronization_notifications(sender, instance, created=False, **kwargs):
""" """
@ -59,47 +71,31 @@ def synchronization_notifications(sender, instance, created=False, **kwargs):
if not isinstance(instance, BaseModel) and not isinstance(instance, User): if not isinstance(instance, BaseModel) and not isinstance(instance, User):
return return
try:
process_foreign_key_changes(sender, instance, **kwargs) process_foreign_key_changes(sender, instance, **kwargs)
signal = kwargs.get('signal') signal = kwargs.get('signal')
save_model_log_if_possible(instance, signal, created) save_model_log_if_possible(instance, signal, created)
notify_impacted_users(instance) notify_impacted_users(instance)
related_users_registry.unregister(instance.id) related_users_registry.unregister(instance.id)
except Exception as e:
logger.info(f'*** ERROR2: {e}')
logger.info(traceback.format_exc())
raise
def notify_impacted_users(instance): def notify_impacted_users(instance):
# print(f'*** notify_impacted_users for instance: {instance}')
# user_ids = set()
# # add impacted users
# if isinstance(instance, User):
# user_ids.add(instance.id)
# elif isinstance(instance, BaseModel):
# owner = instance.last_updated_by
# if owner:
# user_ids.add(owner.id)
# if isinstance(instance, BaseModel):
# if hasattr(instance, '_users_to_notify'):
# user_ids.update(instance._users_to_notify)
# else:
# print('no users to notify')
device_id = device_registry.get_device_id(instance.id) device_id = device_registry.get_device_id(instance.id)
users = related_users_registry.get_users(instance.id) users = related_users_registry.get_users(instance.id)
if users: if users:
user_ids = [user.id for user in users] user_ids = [user.id for user in users]
websocket_sender.send_message(user_ids, device_id) websocket_sender.send_message(user_ids, device_id)
# print(f'notify device: {device_id}, users = {user_ids}')
# for user_id in user_ids:
# websocket_sender.send_user_message(user_id, device_id)
device_registry.unregister(instance.id) device_registry.unregister(instance.id)
def save_model_log_if_possible(instance, signal, created): def save_model_log_if_possible(instance, signal, created):
users = related_users_registry.get_users(instance.id) users = related_users_registry.get_users(instance.id)
logger.debug(f'*** save_model_log_if_possible >>> users from registry = {users}, instance = {instance}') # logger.info(f'*** save_model_log_if_possible >>> users from registry = {users}, instance = {instance}')
if not users: if not users:
logger.warning(f'!!! Registry returned empty users for instance {instance.id} ({instance.__class__.__name__})') logger.warning(f'!!! Registry returned empty users for instance {instance.id} ({instance.__class__.__name__})')
@ -121,8 +117,8 @@ def save_model_log_if_possible(instance, signal, created):
if isinstance(instance, SideStoreModel): if isinstance(instance, SideStoreModel):
store_id = instance.store_id store_id = instance.store_id
if operation == ModelOperation.DELETE: # delete now unnecessary logs # if operation == ModelOperation.DELETE: # delete now unnecessary logs
ModelLog.objects.filter(model_id=instance.id).delete() # ModelLog.objects.filter(model_id=instance.id).delete()
# user_ids = [user.id for user in users] # user_ids = [user.id for user in users]
# # print(f'users to notify: {user_ids}') # # print(f'users to notify: {user_ids}')
@ -133,16 +129,14 @@ def save_model_log_if_possible(instance, signal, created):
logger.info(f'!!! Model Log could not be created because no linked user could be found: {instance.__class__.__name__} {instance}, {signal}') logger.info(f'!!! Model Log could not be created because no linked user could be found: {instance.__class__.__name__} {instance}, {signal}')
def save_model_log(users, model_operation, model_name, model_id, store_id): def save_model_log(users, model_operation, model_name, model_id, store_id):
device_id = device_registry.get_device_id(model_id) device_id = device_registry.get_device_id(model_id)
logger.info(f'*** creating ModelLogs for: {model_operation} {model_name} : {users}') logger.info(f'*** creating ModelLogs for: {model_operation} {model_name} : {users}')
try: try:
with transaction.atomic(): with transaction.atomic():
created_logs = [] created_logs = []
for user in users: for user in users:
# logger.info(f'Creating ModelLog for user {user.id} ({user.username})') # logger.info(f'Creating ModelLog for user {user.id} - user exists: {User.objects.filter(id=user.id).exists()}')
model_log = ModelLog( model_log = ModelLog(
user=user, user=user,
operation=model_operation, operation=model_operation,
@ -152,20 +146,19 @@ def save_model_log(users, model_operation, model_name, model_id, store_id):
device_id=device_id device_id=device_id
) )
model_log.save() model_log.save()
# logger.info(f'ModelLog saved with ID: {model_log.id}')
created_logs.append(model_log.id) created_logs.append(model_log.id)
# logger.info(f'Successfully created ModelLog {model_log.id}')
# logger.info(f'*** Successfully created {len(created_logs)} ModelLogs: {created_logs}') # Immediate verification within transaction
immediate_count = ModelLog.objects.filter(id__in=created_logs).count()
# logger.info(f'*** Within transaction: Created {len(created_logs)}, found {immediate_count}')
# Verify ModelLogs were actually persisted # Verification after transaction commits
persisted_count = ModelLog.objects.filter(id__in=created_logs).count() persisted_count = ModelLog.objects.filter(id__in=created_logs).count()
if persisted_count != len(created_logs): # logger.info(f'*** After transaction: Created {len(created_logs)}, persisted {persisted_count}')
logger.error(f'*** PERSISTENCE VERIFICATION FAILED! Created {len(created_logs)} ModelLogs but only {persisted_count} were persisted to database')
else:
logger.info(f'*** PERSISTENCE VERIFIED: All {persisted_count} ModelLogs successfully persisted')
except Exception as e: except Exception as e:
logger.error(f'*** FAILED to create ModelLogs for: {model_operation} {model_name}, users: {[u.id for u in users]}, error: {e}', exc_info=True) logger.error(f'*** Exception during ModelLog creation: {e}', exc_info=True)
raise raise
# with transaction.atomic(): # with transaction.atomic():
@ -209,7 +202,6 @@ def detect_foreign_key_changes_for_shared_instances(sender, instance):
return return
data_access_list = related_data_access(instance) data_access_list = related_data_access(instance)
# print(f'FK change > DA count = {len(data_access_list)}')
if data_access_list: if data_access_list:
try: try:
old_instance = sender.objects.get(pk=instance.pk) old_instance = sender.objects.get(pk=instance.pk)
@ -278,12 +270,12 @@ def process_foreign_key_changes(sender, instance, **kwargs):
### Data Access ### Data Access
# @receiver(post_delete) @receiver(post_delete)
# def delete_data_access_if_necessary(sender, instance, **kwargs): def delete_data_access_if_necessary(sender, instance, **kwargs):
# if not isinstance(instance, BaseModel): if not isinstance(instance, BaseModel):
# return return
# if hasattr(instance, 'id'): if hasattr(instance, 'id'):
# DataAccess.objects.filter(model_id=instance.id).delete() DataAccess.objects.filter(model_id=instance.id).delete()
@receiver(m2m_changed, sender=DataAccess.shared_with.through) @receiver(m2m_changed, sender=DataAccess.shared_with.through)
def handle_shared_with_changes(sender, instance, action, pk_set, **kwargs): def handle_shared_with_changes(sender, instance, action, pk_set, **kwargs):
@ -309,26 +301,41 @@ def handle_shared_with_changes(sender, instance, action, pk_set, **kwargs):
@receiver(post_save, sender=DataAccess) @receiver(post_save, sender=DataAccess)
def data_access_post_save(sender, instance, **kwargs): def data_access_post_save(sender, instance, **kwargs):
try:
instance.add_references() # create DataAccess references on hierarchy instance.add_references() # create DataAccess references on hierarchy
if instance.related_user: if instance.related_user:
evaluate_if_user_should_sync(instance.related_user) evaluate_if_user_should_sync(instance.related_user)
except Exception as e:
logger.info(f'*** ERROR3: {e}')
logger.info(traceback.format_exc())
raise
@receiver(pre_delete, sender=DataAccess) @receiver(pre_delete, sender=DataAccess)
def revoke_access_after_delete(sender, instance, **kwargs): def revoke_access_after_delete(sender, instance, **kwargs):
try:
instance.cleanup_references() instance.cleanup_references()
instance.create_revoke_access_log() instance.create_revoke_access_log()
related_users_registry.register(instance.id, instance.shared_with.all()) related_users_registry.register(instance.id, instance.shared_with.all())
instance._user = instance.related_user instance._user = instance.related_user
except Exception as e:
logger.info(f'*** ERROR4: {e}')
logger.info(traceback.format_exc())
raise
@receiver(post_delete, sender=DataAccess) @receiver(post_delete, sender=DataAccess)
def data_access_post_delete(sender, instance, **kwargs): def data_access_post_delete(sender, instance, **kwargs):
try:
notify_impacted_users(instance) notify_impacted_users(instance)
if not hasattr(instance, '_user') or not instance._user: if not hasattr(instance, '_user') or not instance._user:
return return
evaluate_if_user_should_sync(instance._user) evaluate_if_user_should_sync(instance._user)
except Exception as e:
logger.info(f'*** ERROR5: {e}')
logger.info(traceback.format_exc())
raise
def related_users(instance): def related_users(instance):
users = set() users = set()
@ -373,9 +380,14 @@ def evaluate_if_user_should_sync(user):
@receiver(post_save, sender=Device) @receiver(post_save, sender=Device)
def device_created(sender, instance, **kwargs): def device_created(sender, instance, **kwargs):
try:
if not instance.user: if not instance.user:
return return
evaluate_if_user_should_sync(instance.user) evaluate_if_user_should_sync(instance.user)
except Exception as e:
logger.info(f'*** ERROR6: {e}')
logger.info(traceback.format_exc())
raise
@receiver(pre_delete, sender=Device) @receiver(pre_delete, sender=Device)
def device_pre_delete(sender, instance, **kwargs): def device_pre_delete(sender, instance, **kwargs):
@ -383,6 +395,11 @@ def device_pre_delete(sender, instance, **kwargs):
@receiver(post_delete, sender=Device) @receiver(post_delete, sender=Device)
def device_post_delete(sender, instance, **kwargs): def device_post_delete(sender, instance, **kwargs):
try:
if not hasattr(instance, '_user') or not instance._user: if not hasattr(instance, '_user') or not instance._user:
return return
evaluate_if_user_should_sync(instance._user) evaluate_if_user_should_sync(instance._user)
except Exception as e:
logger.info(f'*** ERROR7: {e}')
logger.info(traceback.format_exc())
raise

@ -50,6 +50,7 @@ class HierarchyOrganizer:
def __init__(self): def __init__(self):
self.levels = [] # List of dictionaries, each representing a level self.levels = [] # List of dictionaries, each representing a level
self.item_levels = {} # Keep track of items and their levels: (model_name, id) -> level self.item_levels = {} # Keep track of items and their levels: (model_name, id) -> level
self.sharing_related_instances = {} # Keep track of items and their levels: (model_name, id) -> level
def add_item(self, model_name, item_data, level): def add_item(self, model_name, item_data, level):
""" """

@ -24,6 +24,10 @@ from .models import ModelLog, BaseModel, SideStoreModel, DataAccess
from .registry import model_registry, device_registry from .registry import model_registry, device_registry
from .ws_sender import websocket_sender from .ws_sender import websocket_sender
import logging
logger = logging.getLogger(__name__)
# class HierarchyApiView(APIView): # class HierarchyApiView(APIView):
def add_children_hierarchy(instance, models_dict): def add_children_hierarchy(instance, models_dict):
@ -358,21 +362,32 @@ class LogProcessingResult:
revocations = defaultdict(list) revocations = defaultdict(list)
revocations_parents_organizer = HierarchyOrganizer() revocations_parents_organizer = HierarchyOrganizer()
# print(f'*** process_revocations: {len(self.revoke_info)}') # logger.info(f'$$$ process_revocations: {len(self.revoke_info)}')
sync_models = getattr(settings, 'SYNC_MODEL_CHILDREN_SHARING', {})
# First, collect all revocations # First, collect all revocations
for model_name, items in self.revoke_info.items(): for model_name, items in self.revoke_info.items():
revocations[model_name].extend(items) revocations[model_name].extend(items)
# print(f'*** process_revocations for {model_name}') logger.info(f'$$$ process_revocations for {model_name}, items = {len(items)}')
# Process parent hierarchies for each revoked item # Process parent hierarchies for each revoked item
model = model_registry.get_model(model_name) model = model_registry.get_model(model_name)
for item in items: for item in items:
logger.info(f'$$$ item revoked = {item}')
try: try:
instance = model.objects.get(id=item['model_id']) instance = model.objects.get(id=item['model_id'])
# print(f'*** process revoked item parents of {model_name} : {item['model_id']}') logger.info(f'$$$ process revoked item parents of {model_name} : {item['model_id']}')
add_parents_with_hierarchy_organizer(instance, revocations_parents_organizer) add_parents_with_hierarchy_organizer(instance, revocations_parents_organizer)
# if instance.__class__.__name__ in sync_models:
# sharing_related_instances = sharing_related_instances(instance, True)
# logger.info(f'$$$ get shared instances: {len(sharing_related_instances)}')
# revocations = merge_dicts_dedup(revocations, sharing_related_instances)
# # revocations_parents_organizer.sharing_related_instances = instance.sharing_related_instances()
except model.DoesNotExist: except model.DoesNotExist:
pass pass
@ -387,6 +402,8 @@ class LogProcessingResult:
# print(f'self.shared_relationship_sets = {self.shared_relationship_sets}') # print(f'self.shared_relationship_sets = {self.shared_relationship_sets}')
# print(f'self.shared_relationship_removals = {self.shared_relationship_removals}') # print(f'self.shared_relationship_removals = {self.shared_relationship_removals}')
logger.info('--------------------- SYNC')
return { return {
"updates": dict(self.updates), "updates": dict(self.updates),
"deletions": dict(self.deletions), "deletions": dict(self.deletions),
@ -451,3 +468,61 @@ class DataAccessViewSet(viewsets.ModelViewSet):
if self.request.user: if self.request.user:
return self.queryset.filter(Q(related_user=self.request.user) | Q(shared_with__in=[self.request.user])) return self.queryset.filter(Q(related_user=self.request.user) | Q(shared_with__in=[self.request.user]))
return [] return []
def merge_dicts_dedup(dict1, dict2):
"""Merge two dictionaries, combining arrays and removing duplicates"""
all_keys = set(dict1.keys()) | set(dict2.keys())
merged = {}
for key in all_keys:
arr1 = dict1.get(key, [])
arr2 = dict2.get(key, [])
# Convert to sets, union them, then back to list to remove duplicates
merged[key] = list(set(arr1) | set(arr2))
return merged
def sharing_related_instances(instance, identifiers_only):
sync_models = getattr(settings, 'SYNC_MODEL_CHILDREN_SHARING', {})
# if self.__class__.__name__ in sync_models:
relationships = sync_models[instance.__class__.__name__]
# 'Match': {'team_scores', 'team_registration', 'player_registrations'}
models_dict = defaultdict(dict)
print(f'relationships = {relationships}')
current = [instance]
for relationship in relationships:
print(f'> relationship = {relationship}')
values = []
for item in current:
value = getattr(item, relationship)
if hasattr(value, 'all') and callable(value.all):
# This is a queryset from a reverse relationship
for related_obj in value.all():
child_model_name = related_obj.__class__.__name__
if identifiers_only:
models_dict[child_model_name].append(related_obj.data_identifier_dict())
else:
serializer = get_serializer(related_obj, child_model_name)
models_dict[child_model_name].append(serializer.data)
# print(f'>>> 1 Added child for {relationship}: {child_model_name}')
values.extend(value.all())
else:
# This is a single object
child_model_name = value.__class__.__name__
if identifiers_only:
models_dict[child_model_name].append(value.data_identifier_dict())
else:
serializer = get_serializer(value, child_model_name)
models_dict[child_model_name].append(serializer.data)
# serializer = get_serializer(value, child_model_name)
# models_dict[child_model_name][value.id] = serializer.data
# print(f'>>> 2 Added child for {relationship}: {child_model_name}')
values.append(value)
current = values
return models_dict

@ -1,7 +1,9 @@
from django.db import models from django.db import models
# from tournaments.models import group_stage
from . import TournamentSubModel, Round, GroupStage, FederalMatchCategory
from django.utils import timezone, formats from django.utils import timezone, formats
from django.core.exceptions import ObjectDoesNotExist
from . import TournamentSubModel, Round, GroupStage, FederalMatchCategory
from datetime import datetime, timedelta from datetime import datetime, timedelta
import uuid import uuid
@ -90,14 +92,16 @@ class Match(TournamentSubModel):
return '--' return '--'
def stage_name(self): def stage_name(self):
try:
if self.name: if self.name:
return self.name return self.name
elif self.round: elif self.round:
return self.round.name() return self.round.name()
elif self.group_stage: elif self.group_stage:
return self.group_stage.display_name() return self.group_stage.display_name()
else: except ObjectDoesNotExist:
return '--' pass
return "--"
def get_previous_round(self): def get_previous_round(self):
# Calculate the next index # Calculate the next index

@ -1,4 +1,6 @@
from django.db import models from django.db import models
from django.core.exceptions import ObjectDoesNotExist
from . import TournamentSubModel, Match, TeamRegistration, FederalMatchCategory from . import TournamentSubModel, Match, TeamRegistration, FederalMatchCategory
import uuid import uuid
from .match import Team # Import Team only when needed from .match import Team # Import Team only when needed
@ -15,10 +17,14 @@ class TeamScore(TournamentSubModel):
pass pass
def __str__(self): def __str__(self):
if self.match: try:
if self.match and self.team_registration:
return f"{self.match.stage_name()} #{self.match.index}: {self.player_names()}" return f"{self.match.stage_name()} #{self.match.index}: {self.player_names()}"
else: if self.match:
return "Empty" return f"{self.match.stage_name()} #{self.match.index}"
except ObjectDoesNotExist:
pass
return "--"
def get_tournament(self): # mandatory method for TournamentSubModel def get_tournament(self): # mandatory method for TournamentSubModel
if self.team_registration: if self.team_registration:
@ -48,13 +54,15 @@ class TeamScore(TournamentSubModel):
# return None # return None
def player_names(self): def player_names(self):
if self.team_registration: try:
if self.team_registration: # this can cause an exception when deleted
if self.team_registration.name: if self.team_registration.name:
return self.team_registration.name return self.team_registration.name
else: else:
names = self.team_registration.team_names() names = self.team_registration.team_names()
return " - ".join(names) return " - ".join(names)
else: except TeamRegistration.DoesNotExist:
pass
return "--" return "--"
def shortened_team_names(self, forced=False): def shortened_team_names(self, forced=False):

Loading…
Cancel
Save