Note
Access to this page requires authorization. You can try signing in or changing directories.
Access to this page requires authorization. You can try changing directories.
Make interval from years, months, weeks, days, hours, mins and secs.
For the corresponding Databricks SQL function, see make_interval function.
Syntax
from pyspark.databricks.sql import functions as dbf
dbf.make_interval(years=<years>, months=<months>, weeks=<weeks>, days=<days>, hours=<hours>, mins=<mins>, secs=<secs>)
Parameters
| Parameter | Type | Description |
|---|---|---|
years |
pyspark.sql.Column or str, optional |
The number of years, positive or negative. |
months |
pyspark.sql.Column or str, optional |
The number of months, positive or negative. |
weeks |
pyspark.sql.Column or str, optional |
The number of weeks, positive or negative. |
days |
pyspark.sql.Column or str, optional |
The number of days, positive or negative. |
hours |
pyspark.sql.Column or str, optional |
The number of hours, positive or negative. |
mins |
pyspark.sql.Column or str, optional |
The number of minutes, positive or negative. |
secs |
pyspark.sql.Column or str, optional |
The number of seconds with the fractional part in microsecond precision. |
Returns
pyspark.sql.Column: A new column that contains an interval.
Examples
from pyspark.databricks.sql import functions as dbf
df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
['year', 'month', 'week', 'day', 'hour', 'min', 'sec'])
df.select(
dbf.make_interval(df.year, df.month, 'week', df.day, df.hour, df.min, df.sec)
).show(truncate=False)
df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
['year', 'month', 'week', 'day', 'hour', 'min', 'sec'])
df.select(
dbf.make_interval(df.year, df.month, 'week', df.day, df.hour, df.min)
).show(truncate=False)
df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
['year', 'month', 'week', 'day', 'hour', 'min', 'sec'])
df.select(
dbf.make_interval(df.year, df.month, 'week', df.day, df.hour)
).show(truncate=False)
df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
['year', 'month', 'week', 'day', 'hour', 'min', 'sec'])
df.select(dbf.make_interval(df.year, df.month, 'week', df.day)).show(truncate=False)
df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
['year', 'month', 'week', 'day', 'hour', 'min', 'sec'])
df.select(dbf.make_interval(df.year, df.month, 'week')).show(truncate=False)
df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
['year', 'month', 'week', 'day', 'hour', 'min', 'sec'])
df.select(dbf.make_interval(df.year, df.month)).show(truncate=False)
df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
['year', 'month', 'week', 'day', 'hour', 'min', 'sec'])
df.select(dbf.make_interval(df.year)).show(truncate=False)
spark.range(1).select(dbf.make_interval()).show(truncate=False)