Note
Access to this page requires authorization. You can try signing in or changing directories.
Access to this page requires authorization. You can try changing directories.
This is a special version of make_interval that performs the same operation, but returns a NULL value instead of raising an error if interval cannot be created.
Syntax
from pyspark.databricks.sql import functions as dbf
dbf.try_make_interval(years=<years>, months=<months>, weeks=<weeks>, days=<days>, hours=<hours>, mins=<mins>, secs=<secs>)
Parameters
| Parameter | Type | Description |
|---|---|---|
years |
pyspark.sql.Column or str, optional |
The number of years, positive or negative. |
months |
pyspark.sql.Column or str, optional |
The number of months, positive or negative. |
weeks |
pyspark.sql.Column or str, optional |
The number of weeks, positive or negative. |
days |
pyspark.sql.Column or str, optional |
The number of days, positive or negative. |
hours |
pyspark.sql.Column or str, optional |
The number of hours, positive or negative. |
mins |
pyspark.sql.Column or str, optional |
The number of minutes, positive or negative. |
secs |
pyspark.sql.Column or str, optional |
The number of seconds with the fractional part in microsecond precision. |
Returns
pyspark.sql.Column: A new column that contains an interval.
Examples
from pyspark.databricks.sql import functions as dbf
df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
['year', 'month', 'week', 'day', 'hour', 'min', 'sec'])
df.select(
dbf.try_make_interval(df.year, df.month, 'week', df.day, 'hour', df.min, df.sec)
).show(truncate=False)
df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
['year', 'month', 'week', 'day', 'hour', 'min', 'sec'])
df.select(
dbf.try_make_interval(df.year, df.month, 'week', df.day, df.hour, df.min)
).show(truncate=False)
df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
['year', 'month', 'week', 'day', 'hour', 'min', 'sec'])
df.select(
dbf.try_make_interval(df.year, df.month, 'week', df.day, df.hour)
).show(truncate=False)
df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
['year', 'month', 'week', 'day', 'hour', 'min', 'sec'])
df.select(dbf.try_make_interval(df.year, 'month', df.week, df.day)).show(truncate=False)
df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
['year', 'month', 'week', 'day', 'hour', 'min', 'sec'])
df.select(dbf.try_make_interval(df.year, 'month', df.week)).show(truncate=False)
df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
['year', 'month', 'week', 'day', 'hour', 'min', 'sec'])
df.select(dbf.try_make_interval(df.year, 'month')).show(truncate=False)
df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
['year', 'month', 'week', 'day', 'hour', 'min', 'sec'])
df.select(dbf.try_make_interval(df.year)).show(truncate=False)
spark.range(1).select(dbf.try_make_interval()).show(truncate=False)
spark.range(1).select(dbf.try_make_interval(dbf.lit(2147483647))).show(truncate=False)