How can I get an external host’s IP address in Python External Transforms?

I am working in a Python transforms repository in Foundry. I need to resolve external hostnames (e.g., www.google.com) within my transform code using Python’s socket library. Currently, only internal hostnames (e.g., kubernetes.default.svc.cluster.local) are resolvable, and external DNS lookups return null.

Does anyone know how to get an external host’s IP address in Python External Transforms?

the example code like below

from transforms.api import transform, Output
import socket
from transforms.external.systems import external_systems, Source
from pyspark.sql.types import StructType, StructField, StringType

def get_resolv_conf():
try:
with open(“/etc/resolv.conf”, “r”) as f:
return f.read()
except Exception as e:
return f"UNAVAILABLE: {type(e).name}: {str(e)}"

def resolve_ips_driver(hostname: str):
if not hostname:
return , “EMPTY_HOSTNAME”
try:
socket.setdefaulttimeout(5)
results = socket.getaddrinfo(hostname, None)

    ips = []
    for r in results:
        ip = r[4][0]
        if ip and ip not in ips:
            ips.append(ip)

    if ips:
        return ips, "RESOLVED"
    return [], "NO_RECORDS_RETURNED"
except Exception as e:
    return [], f"ERROR: {type(e).__name__}: {str(e)}"

@external_systemsexternal_systemsexternal_systemsexternal_systems(
akamai_api_external_transform_prod_source=Source(“xxx73”)
)
@transform(
output=Output(“xx2”)
)
def resolve_hostnames(ctx, output):
spark = ctx.spark_session

hostnames = ["kubernetes.default.svc.cluster.local", "www.google.com"]

resolv_conf = get_resolv_conf()

rows = []
for hostname in hostnames:
    ips, status = resolve_ips_driver(hostname)

    # Always write at least one row per hostname
    if ips:
        for ip in ips:
            rows.append((hostname, ip, status, resolv_conf))
    else:
        rows.append((hostname, None, status, resolv_conf))

schema = StructType([
    StructField("hostname", StringType(), True),
    StructField("ip_address", StringType(), True),
    StructField("status", StringType(), True),
    StructField("resolv_conf", StringType(), True),
])

df = spark.createDataFrame(rows, schema=schema)
output.write_dataframe(df)