{% extends "layout.html" %} {% block body %}

Replace df with the dataframe variable you use

from itertools import chain

from pyspark.sql.functions import coalesce,lit, col, create_map

map_values = {{final}}

mapping_expr = create_map([lit(x) for x in chain(*map_values.items())])

df=df.withColumn({{col2}}, coalesce(mapping_expr[df[{{col1}}]], df[{{col2}}]))

{% endblock %}