Skip to content

has_column

laktory.spark.dataframe.has_column ¤

Functions¤

has_column ¤

has_column(df, col)

Check if column col exists in df

PARAMETER DESCRIPTION
df

Input DataFrame

TYPE: DataFrame

col

Column name

TYPE: str

RETURNS DESCRIPTION
bool

Result

Examples:

import laktory  # noqa: F401
import pyspark.sql.types as T

schema = T.StructType(
    [
        T.StructField("indexx", T.IntegerType()),
        T.StructField(
            "stock",
            T.StructType(
                [
                    T.StructField("symbol", T.StringType()),
                    T.StructField("name", T.StringType()),
                ]
            ),
        ),
        T.StructField(
            "prices",
            T.ArrayType(
                T.StructType(
                    [
                        T.StructField("open", T.IntegerType()),
                        T.StructField("close", T.IntegerType()),
                    ]
                )
            ),
        ),
    ]
)

data = [
    (
        1,
        {"symbol": "AAPL", "name": "Apple"},
        [{"open": 1, "close": 2}, {"open": 1, "close": 2}],
    ),
    (
        2,
        {"symbol": "MSFT", "name": "Microsoft"},
        [{"open": 1, "close": 2}, {"open": 1, "close": 2}],
    ),
    (
        3,
        {"symbol": "GOOGL", "name": "Google"},
        [{"open": 1, "close": 2}, {"open": 1, "close": 2}],
    ),
]

df = spark.createDataFrame(data, schema=schema)
print(df.laktory.has_column("symbol"))
#> False
print(df.laktory.has_column("`stock`.`symbol`"))
#> True
print(df.laktory.has_column("`prices[2]`.`close`"))
#> True
Source code in laktory/spark/dataframe/has_column.py
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
def has_column(df: DataFrame, col: str) -> bool:
    """
    Check if column `col` exists in `df`

    Parameters
    ----------
    df:
        Input DataFrame
    col
        Column name

    Returns
    -------
    :
        Result

    Examples
    --------

    ```py
    import laktory  # noqa: F401
    import pyspark.sql.types as T

    schema = T.StructType(
        [
            T.StructField("indexx", T.IntegerType()),
            T.StructField(
                "stock",
                T.StructType(
                    [
                        T.StructField("symbol", T.StringType()),
                        T.StructField("name", T.StringType()),
                    ]
                ),
            ),
            T.StructField(
                "prices",
                T.ArrayType(
                    T.StructType(
                        [
                            T.StructField("open", T.IntegerType()),
                            T.StructField("close", T.IntegerType()),
                        ]
                    )
                ),
            ),
        ]
    )

    data = [
        (
            1,
            {"symbol": "AAPL", "name": "Apple"},
            [{"open": 1, "close": 2}, {"open": 1, "close": 2}],
        ),
        (
            2,
            {"symbol": "MSFT", "name": "Microsoft"},
            [{"open": 1, "close": 2}, {"open": 1, "close": 2}],
        ),
        (
            3,
            {"symbol": "GOOGL", "name": "Google"},
            [{"open": 1, "close": 2}, {"open": 1, "close": 2}],
        ),
    ]

    df = spark.createDataFrame(data, schema=schema)
    print(df.laktory.has_column("symbol"))
    #> False
    print(df.laktory.has_column("`stock`.`symbol`"))
    #> True
    print(df.laktory.has_column("`prices[2]`.`close`"))
    #> True
    ```
    """
    _col = re.sub(r"\[(\d+)\]", r"[*]", col)
    _col = re.sub(r"`", "", _col)
    return _col in df.laktory.schema_flat()