Maybe this will work for TXT record generation.

This commit is contained in:
Kalzu Rekku 2025-05-03 12:29:02 +03:00
parent 8ee0203de8
commit cb2beade61

View File

@ -1094,122 +1094,138 @@ class MiniDiscoveryResolver(common.ResolverBase):
def _handle_txt_query(
self, name: bytes, base_name: str, cls: int
) -> Tuple[List, List, List]:
"""Handles TXT record lookups, returning service metadata."""
"""Handles TXT record lookups, returning service metadata, flattening single-segment payloads."""
answers = []
instances = self._get_instances_for_query(base_name, is_srv_query=False)
MAX_TXT_STRING_LEN = 255
for instance in instances:
# --- Initialize list for this instance ---
txt_data = []
instance_id_str = str(instance.id) # Use consistently
# --- Initialize list for the final payload segments ---
final_txt_payload_segments = []
instance_id_str = str(instance.id)
try:
print(f"DNS TXT: Processing instance {instance_id_str}") # Log start
print(f"DNS TXT: Processing instance {instance_id_str}")
# --- Process Tags ---
# --- 1. Gather all logical strings first ---
logical_strings_to_encode = []
if isinstance(instance.tags, list):
for tag in instance.tags:
try:
# Ensure tag is string before encoding
txt_data.append(str(tag).encode("utf-8"))
except Exception as tag_enc_err:
print(
f"ERROR encoding tag '{repr(tag)}' (type: {type(tag)}) for instance {instance_id_str}: {tag_enc_err}"
)
logical_strings_to_encode.extend(
[str(tag) for tag in instance.tags]
)
else:
print(
f"WARNING: Instance {instance_id_str} tags are not a list: {type(instance.tags)}"
)
# --- Process Metadata ---
if isinstance(instance.metadata, dict):
for k, v in instance.metadata.items():
try:
# Ensure key/value are strings before formatting/encoding
key_str = str(k)
val_str = str(v)
txt_data.append(f"{key_str}={val_str}".encode("utf-8"))
except Exception as meta_enc_err:
print(
f"ERROR encoding metadata item '{repr(k)}':'{repr(v)}' (types: {type(k)}/{type(v)}) for instance {instance_id_str}: {meta_enc_err}"
)
logical_strings_to_encode.extend(
[f"{str(k)}={str(v)}" for k, v in instance.metadata.items()]
)
else:
print(
f"WARNING: Instance {instance_id_str} metadata is not a dict: {type(instance.metadata)}"
)
# --- Process Instance ID ---
try:
txt_data.append(f"instance_id={instance_id_str}".encode("utf-8"))
except Exception as id_enc_err:
logical_strings_to_encode.append(f"instance_id={instance_id_str}")
# --- 2. Encode each logical string and split if > 255 bytes ---
for logical_string in logical_strings_to_encode:
try:
encoded_bytes = logical_string.encode("utf-8")
# Split the encoded bytes into chunks of MAX_TXT_STRING_LEN
for i in range(0, len(encoded_bytes), MAX_TXT_STRING_LEN):
chunk = encoded_bytes[i : i + MAX_TXT_STRING_LEN]
# Append each chunk as a separate item for the TXT payload
final_txt_payload_segments.append(chunk)
except Exception as enc_err:
# Handle potential errors during encoding or processing a specific string
print(
f"ERROR encoding/splitting item '{logical_string}' for {instance_id_str}: {enc_err}. Skipping this item."
)
# --- 3. Debugging the final list of segments ---
# (Optional: Keep the debugging print statements from previous versions if needed)
print(
f"DNS TXT DEBUG: FINAL payload segments count for {instance_id_str}: {len(final_txt_payload_segments)}"
)
# ... add detailed segment logging back here if required ...
valid_payload_structure = True # Assume valid unless checks fail below
# Basic check if it's a list and contains bytes
if not isinstance(final_txt_payload_segments, list):
print(f" ERROR: final_txt_payload_segments is not a list!")
valid_payload_structure = False
elif final_txt_payload_segments and not all(
isinstance(s, bytes) for s in final_txt_payload_segments
):
print(
f"ERROR encoding instance ID for {instance_id_str}: {id_enc_err}"
f" ERROR: Not all items in final_txt_payload_segments are bytes!"
)
valid_payload_structure = False
# --- 4. Create Record_TXT, FLATTENING if only one segment ---
if valid_payload_structure and final_txt_payload_segments:
num_segments = len(final_txt_payload_segments)
print(
f"DNS TXT: Attempting to create Record_TXT for instance {instance_id_str} with {num_segments} segments..."
)
# --- **** THE CRITICAL DEBUGGING STEP **** ---
print(
f"DNS TXT DEBUG: Data for instance {instance_id_str} BEFORE Record_TXT:"
)
valid_types = True
if not isinstance(txt_data, list):
print(f" FATAL: txt_data is NOT a list! Type: {type(txt_data)}")
valid_types = False
else:
for i, item in enumerate(txt_data):
item_type = type(item)
print(f" Item {i}: Type={item_type}, Value={repr(item)}")
if item_type is not bytes:
print(f" ^^^^^ ERROR: Item {i} is NOT bytes!")
valid_types = False
# --- **** END DEBUGGING STEP **** ---
# **** THE KEY WORKAROUND ****
if num_segments == 1:
# If only one segment, pass the bytes object directly
payload_data = final_txt_payload_segments[0]
print(" (Payload is single segment, passing bytes directly)")
else:
# If multiple segments, pass the list (MUST use list/tuple here)
payload_data = final_txt_payload_segments # Pass the list
print(
f" (Payload has {num_segments} segments, passing sequence)"
)
# **** END WORKAROUND ****
if not txt_data:
# Instantiate Record_TXT with the correctly structured data
payload = dns.Record_TXT(payload_data, ttl=DNS_DEFAULT_TTL)
print(
f"DNS TXT: No valid TXT data generated for instance {instance_id_str}, skipping."
f"DNS TXT: Record_TXT created successfully for {instance_id_str}."
)
continue
# Only proceed if all items were bytes
if not valid_types:
rr = dns.RRHeader(
name=name,
type=dns.TXT,
cls=cls,
ttl=DNS_DEFAULT_TTL,
payload=payload,
)
answers.append(rr)
print(
f"DNS TXT ERROR: txt_data for {instance_id_str} contained non-bytes elements. Skipping record creation."
f"DNS TXT: RRHeader created and added for instance {instance_id_str}."
)
continue # Skip this instance if data is bad
# --- Create Payload and RR Header ---
# This is where the error occurs if txt_data contains non-bytes
print(
f"DNS TXT: Attempting to create Record_TXT for instance {instance_id_str}..."
)
payload = dns.Record_TXT(txt_data, ttl=DNS_DEFAULT_TTL)
print(
f"DNS TXT: Record_TXT created successfully for {instance_id_str}."
)
elif not final_txt_payload_segments:
print(
f"DNS TXT: Skipping record creation for {instance_id_str} due to empty payload."
)
else: # valid_payload_structure must be False
print(
f"DNS TXT ERROR: Skipping record creation for {instance_id_str} due to invalid payload structure."
)
rr = dns.RRHeader(
name=name,
type=dns.TXT,
cls=cls,
ttl=DNS_DEFAULT_TTL,
payload=payload,
)
answers.append(rr)
print(
f"DNS TXT: RRHeader created and added for instance {instance_id_str}."
)
# Catch errors specifically during the DNS object creation phase
# --- Error Handling (Catch errors during the DNS object creation itself) ---
except TypeError as te_dns:
# This might still catch errors if the multi-segment pathway also fails
print(
f"FATAL DNS TypeError creating TXT record for {instance_id_str}: {te_dns}"
f"FATAL DNS TypeError creating TXT record objects for {instance_id_str}: {te_dns}"
)
print(
" This likely means the list passed to Record_TXT contained non-bytes elements."
" This could indicate an issue even with multi-segment lists, or the flattened single segment."
)
traceback.print_exc() # Crucial to see where in Twisted it fails
traceback.print_exc()
except Exception as e_dns:
print(
f"ERROR creating TXT DNS objects for instance {instance_id_str}: {e_dns.__class__.__name__}: {e_dns}"
f"ERROR creating TXT DNS objects for {instance_id_str}: {e_dns.__class__.__name__}: {e_dns}"
)
traceback.print_exc()