@@ -238,6 +238,9 @@ def bulk_add_tag_mapping(
238238
239239 all_tag_names = list (tag_to_instances .keys ())
240240
241+ def _key (name : str ) -> str :
242+ return name if case_sensitive else name .lower ()
243+
241244 # --- Query 1: fetch existing tag objects ---
242245 if case_sensitive :
243246 existing_tags : dict [str , object ] = {
@@ -255,28 +258,16 @@ def bulk_add_tag_mapping(
255258 }
256259 missing_names = [n for n in all_tag_names if n .lower () not in existing_tags ]
257260
258- # --- Query 2: create missing tag objects then re-fetch to get their PKs ---
261+ # --- Query 2: create missing tag objects ---
262+ # Use get_or_create to call model.save(), which lets tagulous generate the slug field.
263+ # bulk_create bypasses save() so slug is never set, causing unique constraint failures.
259264 if missing_names :
260- tag_model .objects .bulk_create (
261- [tag_model (name = n , protected = False ) for n in missing_names ],
262- ignore_conflicts = True ,
263- )
264- if case_sensitive :
265- existing_tags .update (
266- {t .name : t for t in tag_model .objects .filter (name__in = missing_names )},
267- )
268- else :
269- existing_tags .update (
270- {
271- t .name_lower : t
272- for t in tag_model .objects .annotate (name_lower = Lower ("name" )).filter (
273- name_lower__in = [n .lower () for n in missing_names ],
274- )
275- },
276- )
277-
278- def _key (name : str ) -> str :
279- return name if case_sensitive else name .lower ()
265+ for n in missing_names :
266+ if case_sensitive :
267+ tag , _ = tag_model .objects .get_or_create (name = n , defaults = {"protected" : False })
268+ else :
269+ tag , _ = tag_model .objects .get_or_create (name__iexact = n , defaults = {"name" : n , "protected" : False })
270+ existing_tags [_key (n )] = tag
280271
281272 # --- Query 3: fetch all pre-existing (instance, tag) through-model rows ---
282273 all_instance_ids = {inst .pk for inst in all_instances }
@@ -307,14 +298,13 @@ def _key(name: str) -> str:
307298 return 0
308299
309300 # --- Query 4: bulk-create all new relationships (batched for memory) ---
310- total_created = 0
301+ # Use len(new_relationships) for the count: existing pairs were already filtered out above,
302+ # so every entry here is new. bulk_create return value is unreliable with ignore_conflicts.
303+ total_created = len (new_relationships )
311304 with transaction .atomic ():
312305 for i in range (0 , len (new_relationships ), batch_size ):
313306 batch = new_relationships [i : i + batch_size ]
314- actually_created = through_model .objects .bulk_create (batch , ignore_conflicts = True )
315- total_created += (
316- len (actually_created ) if hasattr (actually_created , "__len__" ) else len (batch )
317- )
307+ through_model .objects .bulk_create (batch , ignore_conflicts = True )
318308
319309 # --- Query 5: update all tag counts in one UPDATE … CASE WHEN … ---
320310 tag_model .objects .filter (pk__in = list (created_per_tag .keys ())).update (
0 commit comments