{% extends "layout.html" %} {% block body %}
from itertools import chain
from pyspark.sql.functions import coalesce,lit, col, create_map
map_values = {{final_part}}
mapping_expr = create_map([lit(x) for x in chain(*map_values.items())])
df=df.withColumn({{col2}}, coalesce(mapping_expr[df[{{col1}}]], df[{{col2}}]))
map_values = {{final_all}}
mapping_expr = create_map([lit(x) for x in chain(*map_values.items())])
df=df.withColumn({{col2}}, coalesce(mapping_expr[df[{{col1}}]], lit(-1)))
{% endblock %}