Maybe this will work for TXT record generation.

This commit is contained in:
Kalzu Rekku 2025-05-03 12:29:02 +03:00
parent 8ee0203de8
commit cb2beade61

View File

@ -1094,122 +1094,138 @@ class MiniDiscoveryResolver(common.ResolverBase):
def _handle_txt_query( def _handle_txt_query(
self, name: bytes, base_name: str, cls: int self, name: bytes, base_name: str, cls: int
) -> Tuple[List, List, List]: ) -> Tuple[List, List, List]:
"""Handles TXT record lookups, returning service metadata.""" """Handles TXT record lookups, returning service metadata, flattening single-segment payloads."""
answers = [] answers = []
instances = self._get_instances_for_query(base_name, is_srv_query=False) instances = self._get_instances_for_query(base_name, is_srv_query=False)
MAX_TXT_STRING_LEN = 255
for instance in instances: for instance in instances:
# --- Initialize list for this instance --- # --- Initialize list for the final payload segments ---
txt_data = [] final_txt_payload_segments = []
instance_id_str = str(instance.id) # Use consistently instance_id_str = str(instance.id)
try: try:
print(f"DNS TXT: Processing instance {instance_id_str}") # Log start print(f"DNS TXT: Processing instance {instance_id_str}")
# --- Process Tags --- # --- 1. Gather all logical strings first ---
logical_strings_to_encode = []
if isinstance(instance.tags, list): if isinstance(instance.tags, list):
for tag in instance.tags: logical_strings_to_encode.extend(
try: [str(tag) for tag in instance.tags]
# Ensure tag is string before encoding )
txt_data.append(str(tag).encode("utf-8"))
except Exception as tag_enc_err:
print(
f"ERROR encoding tag '{repr(tag)}' (type: {type(tag)}) for instance {instance_id_str}: {tag_enc_err}"
)
else: else:
print( print(
f"WARNING: Instance {instance_id_str} tags are not a list: {type(instance.tags)}" f"WARNING: Instance {instance_id_str} tags are not a list: {type(instance.tags)}"
) )
# --- Process Metadata ---
if isinstance(instance.metadata, dict): if isinstance(instance.metadata, dict):
for k, v in instance.metadata.items(): logical_strings_to_encode.extend(
try: [f"{str(k)}={str(v)}" for k, v in instance.metadata.items()]
# Ensure key/value are strings before formatting/encoding )
key_str = str(k)
val_str = str(v)
txt_data.append(f"{key_str}={val_str}".encode("utf-8"))
except Exception as meta_enc_err:
print(
f"ERROR encoding metadata item '{repr(k)}':'{repr(v)}' (types: {type(k)}/{type(v)}) for instance {instance_id_str}: {meta_enc_err}"
)
else: else:
print( print(
f"WARNING: Instance {instance_id_str} metadata is not a dict: {type(instance.metadata)}" f"WARNING: Instance {instance_id_str} metadata is not a dict: {type(instance.metadata)}"
) )
# --- Process Instance ID --- logical_strings_to_encode.append(f"instance_id={instance_id_str}")
try:
txt_data.append(f"instance_id={instance_id_str}".encode("utf-8")) # --- 2. Encode each logical string and split if > 255 bytes ---
except Exception as id_enc_err: for logical_string in logical_strings_to_encode:
try:
encoded_bytes = logical_string.encode("utf-8")
# Split the encoded bytes into chunks of MAX_TXT_STRING_LEN
for i in range(0, len(encoded_bytes), MAX_TXT_STRING_LEN):
chunk = encoded_bytes[i : i + MAX_TXT_STRING_LEN]
# Append each chunk as a separate item for the TXT payload
final_txt_payload_segments.append(chunk)
except Exception as enc_err:
# Handle potential errors during encoding or processing a specific string
print(
f"ERROR encoding/splitting item '{logical_string}' for {instance_id_str}: {enc_err}. Skipping this item."
)
# --- 3. Debugging the final list of segments ---
# (Optional: Keep the debugging print statements from previous versions if needed)
print(
f"DNS TXT DEBUG: FINAL payload segments count for {instance_id_str}: {len(final_txt_payload_segments)}"
)
# ... add detailed segment logging back here if required ...
valid_payload_structure = True # Assume valid unless checks fail below
# Basic check if it's a list and contains bytes
if not isinstance(final_txt_payload_segments, list):
print(f" ERROR: final_txt_payload_segments is not a list!")
valid_payload_structure = False
elif final_txt_payload_segments and not all(
isinstance(s, bytes) for s in final_txt_payload_segments
):
print( print(
f"ERROR encoding instance ID for {instance_id_str}: {id_enc_err}" f" ERROR: Not all items in final_txt_payload_segments are bytes!"
)
valid_payload_structure = False
# --- 4. Create Record_TXT, FLATTENING if only one segment ---
if valid_payload_structure and final_txt_payload_segments:
num_segments = len(final_txt_payload_segments)
print(
f"DNS TXT: Attempting to create Record_TXT for instance {instance_id_str} with {num_segments} segments..."
) )
# --- **** THE CRITICAL DEBUGGING STEP **** --- # **** THE KEY WORKAROUND ****
print( if num_segments == 1:
f"DNS TXT DEBUG: Data for instance {instance_id_str} BEFORE Record_TXT:" # If only one segment, pass the bytes object directly
) payload_data = final_txt_payload_segments[0]
valid_types = True print(" (Payload is single segment, passing bytes directly)")
if not isinstance(txt_data, list): else:
print(f" FATAL: txt_data is NOT a list! Type: {type(txt_data)}") # If multiple segments, pass the list (MUST use list/tuple here)
valid_types = False payload_data = final_txt_payload_segments # Pass the list
else: print(
for i, item in enumerate(txt_data): f" (Payload has {num_segments} segments, passing sequence)"
item_type = type(item) )
print(f" Item {i}: Type={item_type}, Value={repr(item)}") # **** END WORKAROUND ****
if item_type is not bytes:
print(f" ^^^^^ ERROR: Item {i} is NOT bytes!")
valid_types = False
# --- **** END DEBUGGING STEP **** ---
if not txt_data: # Instantiate Record_TXT with the correctly structured data
payload = dns.Record_TXT(payload_data, ttl=DNS_DEFAULT_TTL)
print( print(
f"DNS TXT: No valid TXT data generated for instance {instance_id_str}, skipping." f"DNS TXT: Record_TXT created successfully for {instance_id_str}."
) )
continue
# Only proceed if all items were bytes rr = dns.RRHeader(
if not valid_types: name=name,
type=dns.TXT,
cls=cls,
ttl=DNS_DEFAULT_TTL,
payload=payload,
)
answers.append(rr)
print( print(
f"DNS TXT ERROR: txt_data for {instance_id_str} contained non-bytes elements. Skipping record creation." f"DNS TXT: RRHeader created and added for instance {instance_id_str}."
) )
continue # Skip this instance if data is bad
# --- Create Payload and RR Header --- elif not final_txt_payload_segments:
# This is where the error occurs if txt_data contains non-bytes print(
print( f"DNS TXT: Skipping record creation for {instance_id_str} due to empty payload."
f"DNS TXT: Attempting to create Record_TXT for instance {instance_id_str}..." )
) else: # valid_payload_structure must be False
payload = dns.Record_TXT(txt_data, ttl=DNS_DEFAULT_TTL) print(
print( f"DNS TXT ERROR: Skipping record creation for {instance_id_str} due to invalid payload structure."
f"DNS TXT: Record_TXT created successfully for {instance_id_str}." )
)
rr = dns.RRHeader( # --- Error Handling (Catch errors during the DNS object creation itself) ---
name=name,
type=dns.TXT,
cls=cls,
ttl=DNS_DEFAULT_TTL,
payload=payload,
)
answers.append(rr)
print(
f"DNS TXT: RRHeader created and added for instance {instance_id_str}."
)
# Catch errors specifically during the DNS object creation phase
except TypeError as te_dns: except TypeError as te_dns:
# This might still catch errors if the multi-segment pathway also fails
print( print(
f"FATAL DNS TypeError creating TXT record for {instance_id_str}: {te_dns}" f"FATAL DNS TypeError creating TXT record objects for {instance_id_str}: {te_dns}"
) )
print( print(
" This likely means the list passed to Record_TXT contained non-bytes elements." " This could indicate an issue even with multi-segment lists, or the flattened single segment."
) )
traceback.print_exc() # Crucial to see where in Twisted it fails traceback.print_exc()
except Exception as e_dns: except Exception as e_dns:
print( print(
f"ERROR creating TXT DNS objects for instance {instance_id_str}: {e_dns.__class__.__name__}: {e_dns}" f"ERROR creating TXT DNS objects for {instance_id_str}: {e_dns.__class__.__name__}: {e_dns}"
) )
traceback.print_exc() traceback.print_exc()