From f055b560dd41dcf3c26376e9f1919d5091b2a034 Mon Sep 17 00:00:00 2001
From: autodeploy <benoit@startinblox.com>
Date: Mon, 10 Jun 2024 10:51:37 +0200
Subject: [PATCH] fix: ids update

---
 .../commands/generate_static_content.py       | 56 ++++++++++---------
 1 file changed, 31 insertions(+), 25 deletions(-)

diff --git a/djangoldp/management/commands/generate_static_content.py b/djangoldp/management/commands/generate_static_content.py
index 1f032fb9..3b9151b6 100644
--- a/djangoldp/management/commands/generate_static_content.py
+++ b/djangoldp/management/commands/generate_static_content.py
@@ -15,7 +15,7 @@ class Command(BaseCommand):
           os.makedirs(output_dir, exist_ok=True)
 
         base_uri = getattr(settings, 'BASE_URL', '')
-        max_depth = getattr(settings, 'MAX_RECURSION_DEPTH', 3)  # Ad
+        max_depth = getattr(settings, 'MAX_RECURSION_DEPTH', 5)  # Ad
 
         for model in apps.get_models():
             if hasattr(model._meta, 'static_version'):
@@ -57,8 +57,10 @@ class Command(BaseCommand):
             if isinstance(data, list):
                 for item in data:
                     self.update_and_fetch_id(item, base_uri, output_dir, depth, max_depth)
-            elif isinstance(data, dict):
+            else:
                 self.update_and_fetch_id(data, base_uri, output_dir, depth, max_depth)
+
+            print(f"Content: {data}")
             return json.dumps(data)
         except json.JSONDecodeError as e:
             self.stdout.write(self.style.ERROR(f'Failed to decode JSON: {e}'))
@@ -70,30 +72,34 @@ class Command(BaseCommand):
             path = f'/ssr{parsed_url.path}'
             item['@id'] = urlunparse((parsed_url.scheme, parsed_url.netloc, path, parsed_url.params, parsed_url.query, parsed_url.fragment))
             associated_url = urlunparse((parsed_url.scheme, parsed_url.netloc, parsed_url.path, parsed_url.params, parsed_url.query, parsed_url.fragment))
+            associated_file_path = path[1:-1] + '.json'
+            print(f"associated_file_path: {associated_file_path}")
+            associated_file_dir = os.path.dirname(associated_file_path)
+            if not os.path.exists(associated_file_dir):
+                os.makedirs(associated_file_dir)
 
-            # Fetch associated content
-            try:
-                response = requests.get(associated_url, timeout=settings.SSR_REQUEST_TIMEOUT)
-                if response.status_code == 200:
-                    associated_content = response.text
-                    associated_content = self.update_ids_and_fetch_associated(associated_content, base_uri, output_dir, depth + 1, max_depth)
-                    # associated_content = self.update_ids_and_fetch_associated(response.text, base_uri, output_dir)
-                    # associated_content = self.update_and_fetch_id(response.text, base_uri,  output_dir)
-                    associated_file_path = path[1:-1] + '.json'
-                    print(f"associated_file_path: {associated_file_path}")
-                    associated_file_dir = os.path.dirname(associated_file_path)
-                    print(f"associated_file_path: {associated_file_dir}")
-                    if not os.path.exists(associated_file_dir):
-                        os.makedirs(associated_file_dir)
-                    with open(associated_file_path, 'w') as f:
-                        f.write(associated_content)
-                    self.stdout.write(self.style.SUCCESS(f'Successfully fetched and saved associated content for {associated_url}'))
-                else:
-                    self.stdout.write(self.style.ERROR(f'Failed to fetch associated content from {associated_url}: {response.status_code}'))
-            except requests.exceptions.Timeout:
-                self.stdout.write(self.style.ERROR(f'Request to {associated_url} timed out'))
-            except requests.exceptions.RequestException as e:
-                self.stdout.write(self.style.ERROR(f'An error occurred: {e}'))
+            if not os.path.exists(associated_file_path):
+              # Fetch associated content
+              try:
+                  response = requests.get(associated_url, timeout=settings.SSR_REQUEST_TIMEOUT)
+                  if response.status_code == 200:
+                      associated_content = self.update_ids_and_fetch_associated(response.text, base_uri, output_dir, depth + 1, max_depth)
+                      print(f"associated_file_path: {associated_file_path}")
+                      associated_file_dir = os.path.dirname(associated_file_path)
+                      print(f"associated_file_path: {associated_file_dir}")
+                      if not os.path.exists(associated_file_dir):
+                          os.makedirs(associated_file_dir)
+                      with open(associated_file_path, 'w') as f:
+                          f.write(associated_content)
+                      self.stdout.write(self.style.SUCCESS(f'Successfully fetched and saved associated content for {associated_url}'))
+                  else:
+                      self.stdout.write(self.style.ERROR(f'Failed to fetch associated content from {associated_url}: {response.status_code}'))
+              except requests.exceptions.Timeout:
+                  self.stdout.write(self.style.ERROR(f'Request to {associated_url} timed out'))
+              except requests.exceptions.RequestException as e:
+                  self.stdout.write(self.style.ERROR(f'An error occurred: {e}'))
+            else:
+                self.stdout.write(self.style.SUCCESS(f'Associated file already exists for {associated_url}, skipping fetch'))
 
         for key, value in item.items():
             if isinstance(value, dict):
-- 
GitLab