diff --git a/API.md b/API.md
index 937720e0..c34f541c 100644
--- a/API.md
+++ b/API.md
@@ -76,6 +76,11 @@ Path: `/collections/{cid}/items`
Multiple property filters are ANDed together.
* `filter=cql-expr` - filters features via a CQL expression
* `filter-crs=SRID` - specifies the CRS for geometry values in the CQL filter
+* `datetime=INSTANT | INTERVAL` - specify a time range to filter the data by (must have a datetime column configured. see [Database](config/pg_featureserv.toml.example) section of config)
+ * exact match: `datetime=2025-01-02T00:00:00Z`
+ * between: `datetime=2025-01-02T00:00:00Z/2025-02-02T00:00:00Z`
+ * before: `datetime=../2025-01-02T00:00:00Z`
+ * after: `datetime=2025-01-02T00:00:00Z/..`
* `transform=fun1[,args][|fun2,args...]` - transform the feature geometry by a geometry function pipeline.
* `groupby=PROP-NAME` - group results on a property.
Usually used with an aggregate `transform` function.
diff --git a/FEATURES.md b/FEATURES.md
index ee724b83..47e533bb 100644
--- a/FEATURES.md
+++ b/FEATURES.md
@@ -37,7 +37,7 @@ It includes [*OGC API - Features*](http://docs.opengeospatial.org/is/17-069r3/17
- [x] `bbox=x1,y1,x2,y2`
- [ ] `bbox` (6 numbers)
- [x] `bbox-crs=srid`
-- [ ] `datetime`
+- [x] `datetime`
- [x] `properties` list
- restricts properties included in response
- [x] `sortby` to sort output by a property
diff --git a/assets/items.gohtml b/assets/items.gohtml
index 7bdb17f9..20805eeb 100644
--- a/assets/items.gohtml
+++ b/assets/items.gohtml
@@ -37,6 +37,23 @@
+{{if .context.TimeAware}}
+
+
Datetime
+
+
+
+
+
+
Datetime range
+
+
+to
+
+
Leave a field blank for an open interval.
+
+
+{{end}}
{{template "funArgs" .}}
@@ -78,20 +95,24 @@ function onMapLoad() {
document.getElementById('feature-count').innerHTML = numFeat;
}
+populateDatetimeControls();
function doQuery() {
var url = window.location.pathname;
var newUrl = addFunctionArgs(url);
var select = document.getElementById('item-limit');
var lim = select.options[select.selectedIndex].value;
- newurl = addQueryParam(newUrl, 'limit', lim);
+ newUrl = addQueryParam(newUrl, 'limit', lim);
+
+ var datetimeVal = buildDatetimeParam();
+ newUrl = addQueryParam(newUrl, 'datetime', datetimeVal);
var useBbox = document.getElementById('chk-bbox').checked;
if (useBbox) {
var bbox = bboxStr(5);
- newurl = addQueryParam(newurl, 'bbox', bbox);
+ newUrl = addQueryParam(newUrl, 'bbox', bbox);
}
- window.location.assign(newurl);
+ window.location.assign(newUrl);
}
function addQueryParam(url, name, value) {
if (! value || value.length <= 0) return url;
@@ -100,6 +121,100 @@ function addQueryParam(url, name, value) {
let newUrl = `${url}${delim}${name}=${value}`;
return newUrl;
}
+
+function buildDatetimeParam() {
+ var instantRaw = document.getElementById('datetime-instant').value;
+ var startRaw = document.getElementById('datetime-start').value;
+ var endRaw = document.getElementById('datetime-end').value;
+
+ var instant = toIsoString(instantRaw);
+ var start = toIsoString(startRaw);
+ var end = toIsoString(endRaw);
+
+ if (instant) {
+ return instant;
+ }
+ if (!start && !end) {
+ return '';
+ }
+ var startPart = start ? start : '..';
+ var endPart = end ? end : '..';
+ if (startPart === '..' && endPart === '..') {
+ return '';
+ }
+ return `${startPart}/${endPart}`;
+}
+
+function toIsoString(value) {
+ if (!value) {
+ return '';
+ }
+ var dt = new Date(value);
+ if (Number.isNaN(dt.getTime())) {
+ return '';
+ }
+ return dt.toISOString();
+}
+
+function populateDatetimeControls() {
+ var params = new URL(window.location.href).searchParams;
+ var value = params.get('datetime');
+ if (!value) {
+ return;
+ }
+ if (value.indexOf('/') >= 0) {
+ var parts = value.split('/');
+ if (parts.length === 2) {
+ if (parts[0] && parts[0] !== '..') {
+ setDatetimeInput('datetime-start', parts[0]);
+ }
+ if (parts[1] && parts[1] !== '..') {
+ setDatetimeInput('datetime-end', parts[1]);
+ }
+ }
+ } else {
+ setDatetimeInput('datetime-instant', value);
+ }
+}
+
+function setDatetimeInput(id, value) {
+ var input = document.getElementById(id);
+ if (!input) {
+ return;
+ }
+ var dt = parseDateTime(value);
+ if (!dt) {
+ return;
+ }
+ input.value = formatForInput(dt);
+}
+
+function parseDateTime(value) {
+ if (!value) {
+ return null;
+ }
+ var dt = new Date(value);
+ if (Number.isNaN(dt.getTime())) {
+ return null;
+ }
+ return dt;
+}
+
+function formatForInput(date) {
+ var pad = function(num) {
+ return String(num).padStart(2, '0');
+ };
+ var yyyy = date.getFullYear();
+ var mm = pad(date.getMonth() + 1);
+ var dd = pad(date.getDate());
+ var hh = pad(date.getHours());
+ var min = pad(date.getMinutes());
+ var sec = pad(date.getSeconds());
+ if (sec === '00') {
+ return `${yyyy}-${mm}-${dd}T${hh}:${min}`;
+ }
+ return `${yyyy}-${mm}-${dd}T${hh}:${min}:${sec}`;
+}
{{ end }}
{{define "funArgs"}}
diff --git a/config/pg_featureserv.toml.example b/config/pg_featureserv.toml.example
index cb35560f..2a2168b3 100644
--- a/config/pg_featureserv.toml.example
+++ b/config/pg_featureserv.toml.example
@@ -68,6 +68,14 @@ WriteTimeoutSec = 30
# Publish functions from these schemas (default is publish postgisftw)
# FunctionIncludes = [ "postgisftw", "schema2" ]
+# Assign time columns for tables with temporal data
+# These should be timestamp or timestamptz columns in the table
+# Columns to be used for feature start and end of time intervals
+StartTimeColumns = [ "start_time" ]
+EndTimeColumns = [ "end_time" ]
+# Columns to be used for (instantaneous) feature timestamps
+TimeColumns = [ "time" ]
+
[Paging]
# The default number of features in a response
LimitDefault = 20
diff --git a/demo/initdb/04-views.sql b/demo/initdb/04-views.sql
index d4d56593..d05a63ad 100644
--- a/demo/initdb/04-views.sql
+++ b/demo/initdb/04-views.sql
@@ -1,14 +1,55 @@
CREATE TABLE cities (
- id serial PRIMARY KEY,
- name text,
+ name text PRIMARY KEY,
geom geometry(Point, 4326)
);
+CREATE TABLE trips (
+ id serial PRIMARY KEY,
+ city text REFERENCES cities(name),
+ time timestamptz,
+ start_time timestamptz,
+ end_time timestamptz
+);
+
+CREATE TABLE receipts (
+ id serial PRIMARY KEY,
+ trip_id int REFERENCES trips(id),
+ time timestamptz,
+ amount numeric
+);
+
+
INSERT INTO cities (name, geom) VALUES
('Paris', ST_SetSRID(ST_MakePoint(2.3522, 48.8566), 4326)),
+ ('London', ST_SetSRID(ST_MakePoint(-0.1276, 51.5074), 4326)),
+ ('Tokyo', ST_SetSRID(ST_MakePoint(139.6917, 35.6895), 4326)),
+ ('Sydney', ST_SetSRID(ST_MakePoint(151.2093, -33.8688), 4326)),
('NYC', ST_SetSRID(ST_MakePoint(-74.0060, 40.7128), 4326));
+
+
+INSERT INTO trips (city, time, start_time, end_time) VALUES
+ ('Paris', '2025-01-01 12:00:00', '2024-01-01 12:00:00', '2025-01-01 12:00:00'),
+ ('London', '2025-02-01 12:00:00', '2024-02-01 12:00:00', '2025-02-01 12:00:00'),
+ ('Tokyo', '2025-03-01 12:00:00', '2024-03-01 12:00:00', '2025-03-01 12:00:00'),
+ ('Sydney', '2025-04-01 12:00:00', '2024-04-01 12:00:00', '2025-04-01 12:00:00'),
+ ('NYC', '2025-05-01 12:00:00', '2024-05-01 12:00:00', '2025-05-01 12:00:00');
+
+
+INSERT INTO receipts (trip_id, time, amount) VALUES
+ (1, '2024-06-01 12:00:00', 100.00),
+ (1, '2024-07-01 12:00:00', 150.00),
+ (2, '2024-06-15 12:00:00', 200.00),
+ (3, '2024-08-01 12:00:00', 250.00),
+ (4, '2024-09-01 12:00:00', 300.00),
+ (5, '2024-10-01 12:00:00', 350.00);
+
-- View with geometry and featureID column (no PK)
CREATE VIEW cities_view AS
- SELECT id AS id, name, geom FROM cities;
+ SELECT * FROM cities;
+
+CREATE VIEW trips_view AS
+ SELECT trips.*, cities.geom FROM trips LEFT JOIN cities ON trips.city = cities.name;
+CREATE VIEW receipts_view AS
+ SELECT receipts.*, cities.geom FROM receipts LEFT JOIN trips ON receipts.trip_id = trips.id LEFT JOIN cities ON trips.city = cities.name;
diff --git a/hugo/content/installation/configuration.md b/hugo/content/installation/configuration.md
index f0b3273e..18eaf558 100644
--- a/hugo/content/installation/configuration.md
+++ b/hugo/content/installation/configuration.md
@@ -119,6 +119,15 @@ WriteTimeoutSec = 30
# Publish functions from these schemas (default is publish postgisftw)
# FunctionIncludes = [ "postgisftw", "schema2" ]
+# Assign time columns for tables with temporal data
+# These should be timestamp or timestamptz columns in the table
+# Columns to be used for feature start and end of time intervals
+# StartTimeColumns = [ "start_time" ]
+# EndTimeColumns = [ "end_time" ]
+# Columns to be used for (instantaneous) feature timestamps
+# TimeColumns = [ "time" ]
+
+
[Paging]
# The default number of features in a response
LimitDefault = 20
@@ -243,6 +252,24 @@ Overrides items specified in `TableIncludes`.
A list of the schemas to publish functions from.
The default is to publish functions in the `postgisftw` schema.
+#### StartTimeColumns
+
+Specifies the column(s) that represent the start time for temporal features.
+Use this to identify when a feature becomes active or relevant.
+The first found column is used.
+
+#### EndTimeColumns
+
+Specifies the column(s) that represent the end time for temporal features.
+Use this to indicate when a feature is no longer active or relevant.
+The first found column is used.
+
+#### TimeColumns
+
+Specifies the column(s) that contain time or timestamp information for features.
+Useful for filtering or querying features based on specific time values.
+The first found column is used.
+
#### LimitDefault
The default number of features in a response,
diff --git a/internal/api/api.go b/internal/api/api.go
index 01aaac51..96fd3e92 100644
--- a/internal/api/api.go
+++ b/internal/api/api.go
@@ -39,6 +39,7 @@ const (
ParamBboxCrs = "bbox-crs"
ParamFilter = "filter"
ParamFilterCrs = "filter-crs"
+ ParamDateTime = "datetime"
ParamGroupBy = "groupby"
ParamOrderBy = "orderby"
ParamPrecision = "precision"
@@ -98,6 +99,7 @@ var ParamReservedNames = []string{
ParamBbox,
ParamBboxCrs,
ParamFilter,
+ ParamDateTime,
ParamGroupBy,
ParamOrderBy,
ParamPrecision,
@@ -194,13 +196,19 @@ var ParameterSchema openapi3.Schema = openapi3.Schema{
// Bbox for extent
type Bbox struct {
- Crs string `json:"crs"`
+ Crs string `json:"crs"`
Extent [][]float64 `json:"bbox"`
}
+type TemporalExtent struct {
+ Trs string `json:"trs"`
+ Interval []*string `json:"interval"`
+}
+
// Extent OAPIF Extent structure (partial)
type Extent struct {
- Spatial *Bbox `json:"spatial"`
+ Spatial *Bbox `json:"spatial"`
+ Temporal *TemporalExtent `json:"temporal,omitempty"`
}
// --- @See https://raw.githubusercontent.com/opengeospatial/WFS_FES/master/core/openapi/schemas/bbox.yaml
@@ -245,6 +253,7 @@ type RequestParam struct {
Properties []string
Filter string
FilterCrs int
+ DateTime string
GroupBy []string
SortBy []data.Sorting
Precision int
@@ -494,6 +503,28 @@ func toBbox(cc *data.Table) *Bbox {
}
}
+func toTemporalExtent(cc *data.Table) *TemporalExtent {
+ if cc.TemporalExtent.Start.IsZero() && cc.TemporalExtent.End.IsZero() {
+ return nil
+ }
+ var startStr, endStr *string
+ if !cc.TemporalExtent.Start.IsZero() {
+ s := cc.TemporalExtent.Start.Format(time.RFC3339)
+ startStr = &s
+ }
+ if !cc.TemporalExtent.End.IsZero() {
+ e := cc.TemporalExtent.End.Format(time.RFC3339)
+ endStr = &e
+ }
+ interval := make([]*string, 2)
+ interval[0] = startStr
+ interval[1] = endStr
+ return &TemporalExtent{
+ Trs: "http://www.opengis.net/def/uom/ISO-8601/0/Gregorian",
+ Interval: interval,
+ }
+}
+
func NewLink(href string, rel string, conType string, title string) *Link {
return &Link{
Href: href,
@@ -525,7 +556,8 @@ func NewCollectionInfo(tbl *data.Table) *CollectionInfo {
Title: tbl.Title,
Description: tbl.Description,
Extent: &Extent{
- Spatial: toBbox(tbl),
+ Spatial: toBbox(tbl),
+ Temporal: toTemporalExtent(tbl),
},
}
return &doc
diff --git a/internal/api/openapi.go b/internal/api/openapi.go
index 465db557..c2167463 100644
--- a/internal/api/openapi.go
+++ b/internal/api/openapi.go
@@ -98,6 +98,17 @@ func GetOpenAPIContent(urlBase string) *openapi3.Swagger {
AllowEmptyValue: false,
},
}
+ paramDateTime := openapi3.ParameterRef{
+ Value: &openapi3.Parameter{
+ Name: "datetime",
+ Description: "Temporal filter (RFC 3339 instant or interval).",
+ In: "query",
+ Required: false,
+ Style: "form",
+ Explode: openapi3.BoolPtr(false),
+ Schema: &openapi3.SchemaRef{Value: openapi3.NewStringSchema()},
+ },
+ }
paramFilterCrs := openapi3.ParameterRef{
Value: &openapi3.Parameter{
Name: "filter-crs",
@@ -319,6 +330,7 @@ func GetOpenAPIContent(urlBase string) *openapi3.Swagger {
¶mBbox,
¶mBboxCrs,
¶mFilter,
+ ¶mDateTime,
¶mFilterCrs,
¶mTransform,
¶mProperties,
@@ -441,6 +453,7 @@ func GetOpenAPIContent(urlBase string) *openapi3.Swagger {
¶mBbox,
¶mBboxCrs,
¶mFilter,
+ ¶mDateTime,
¶mFilterCrs,
¶mTransform,
¶mProperties,
diff --git a/internal/conf/config.go b/internal/conf/config.go
index fd81b0ff..c7a2eeac 100644
--- a/internal/conf/config.go
+++ b/internal/conf/config.go
@@ -44,6 +44,9 @@ func setDefaultConfig() {
viper.SetDefault("Database.TableIncludes", []string{})
viper.SetDefault("Database.TableExcludes", []string{})
viper.SetDefault("Database.FunctionIncludes", []string{"postgisftw"})
+ viper.SetDefault("Database.TimeColumns", []string{"time"})
+ viper.SetDefault("Database.StartTimeColumns", []string{"start_time"})
+ viper.SetDefault("Database.EndTimeColumns", []string{"end_time"})
viper.SetDefault("Paging.LimitDefault", 10)
viper.SetDefault("Paging.LimitMax", 1000)
@@ -94,6 +97,9 @@ type Database struct {
TableIncludes []string
TableExcludes []string
FunctionIncludes []string
+ TimeColumns []string
+ StartTimeColumns []string
+ EndTimeColumns []string
}
// Metadata config
@@ -180,4 +186,7 @@ func DumpConfig() {
log.Debugf(" TableExcludes = %v", Configuration.Database.TableExcludes)
log.Debugf(" FunctionIncludes = %v", Configuration.Database.FunctionIncludes)
log.Debugf(" TransformFunctions = %v", Configuration.Server.TransformFunctions)
+ log.Debugf(" TimeColumns = %v", Configuration.Database.TimeColumns)
+ log.Debugf(" StartTimeColumns = %v", Configuration.Database.StartTimeColumns)
+ log.Debugf(" EndTimeColumns = %v", Configuration.Database.EndTimeColumns)
}
diff --git a/internal/data/catalog.go b/internal/data/catalog.go
index b38e4d88..7df0b8dd 100644
--- a/internal/data/catalog.go
+++ b/internal/data/catalog.go
@@ -4,6 +4,7 @@ import (
"context"
"fmt"
"strings"
+ "time"
)
/*
@@ -91,24 +92,36 @@ type QueryParam struct {
SortBy []Sorting
Precision int
TransformFuns []TransformFunction
+ DateTime *TimeRange
+}
+
+// TimeRange restricts results to a temporal interval for a specific column
+type TimeRange struct {
+ Start *time.Time
+ End *time.Time
+ StartInclusive bool
+ EndInclusive bool
}
// Table holds metadata for table/view objects
type Table struct {
- ID string
- Schema string
- Table string
- Title string
- Description string
- GeometryType string
- GeometryColumn string
- IDColumn string
- Srid int
- Extent Extent
- Columns []string
- DbTypes map[string]string
- JSONTypes []string
- ColDesc []string
+ ID string
+ Schema string
+ Table string
+ Title string
+ Description string
+ GeometryType string
+ GeometryColumn string
+ IDColumn string
+ StartTimeColumn string
+ EndTimeColumn string
+ Srid int
+ Extent Extent
+ TemporalExtent TemporalExtent
+ Columns []string
+ DbTypes map[string]string
+ JSONTypes []string
+ ColDesc []string
}
// Extent of a table
@@ -116,23 +129,30 @@ type Extent struct {
Minx, Miny, Maxx, Maxy float64
}
+type TemporalExtent struct {
+ Start time.Time
+ End time.Time
+}
+
// Function tbd
type Function struct {
- ID string
- Schema string
- Name string
- Description string
- InNames []string
- InDbTypes []string
- InTypeMap map[string]string
- InDefaults []string
- NumNoDefault int
- OutNames []string
- OutDbTypes []string
- OutJSONTypes []string
- Types map[string]string
- GeometryColumn string
- IDColumn string
+ ID string
+ Schema string
+ Name string
+ Description string
+ InNames []string
+ InDbTypes []string
+ InTypeMap map[string]string
+ InDefaults []string
+ NumNoDefault int
+ OutNames []string
+ OutDbTypes []string
+ OutJSONTypes []string
+ Types map[string]string
+ GeometryColumn string
+ IDColumn string
+ StartTimeColumn string
+ EndTimeColumn string
}
func (fun *Function) IsGeometryFunction() bool {
diff --git a/internal/data/catalog_db.go b/internal/data/catalog_db.go
index 281b42f3..450455ac 100644
--- a/internal/data/catalog_db.go
+++ b/internal/data/catalog_db.go
@@ -38,13 +38,16 @@ const (
JSONTypeBooleanArray = "boolean[]"
JSONTypeStringArray = "string[]"
JSONTypeNumberArray = "number[]"
-
- PGTypeBool = "bool"
- PGTypeNumeric = "numeric"
- PGTypeJSON = "json"
- PGTypeJSONB = "jsonb"
- PGTypeGeometry = "geometry"
- PGTypeTextArray = "_text"
+ JSONTypeDatetime = "date"
+
+ PGTypeBool = "bool"
+ PGTypeNumeric = "numeric"
+ PGTypeJSON = "json"
+ PGTypeJSONB = "jsonb"
+ PGTypeGeometry = "geometry"
+ PGTypeTextArray = "_text"
+ PGTypeTimestamp = "timestamp"
+ PGTypeTimestamptz = "timestamptz"
)
type catalogDB struct {
@@ -166,6 +169,11 @@ func (cat *catalogDB) TableReload(name string) {
sqlExtentExact := sqlExtentExact(tbl)
cat.loadExtent(sqlExtentExact, tbl)
}
+ // load temporal extent (which may change over time)
+ if tbl.StartTimeColumn != "" {
+ sqlTemporalExtent := sqlTemporalExtentExact(tbl)
+ cat.loadTemporalExtent(sqlTemporalExtent, tbl)
+ }
}
func (cat *catalogDB) loadExtent(sql string, tbl *Table) bool {
@@ -191,6 +199,25 @@ func (cat *catalogDB) loadExtent(sql string, tbl *Table) bool {
return true
}
+func (cat *catalogDB) loadTemporalExtent(sql string, tbl *Table) bool {
+ var (
+ start pgtype.Timestamptz
+ end pgtype.Timestamptz
+ )
+ log.Debug("Temporal extent query: " + sql)
+ err := cat.dbconn.QueryRow(context.Background(), sql).Scan(&start, &end)
+ if err != nil {
+ log.Debugf("Error querying Temporal Extent for %s: %v", tbl.ID, err)
+ }
+ // no extent was read (perhaps a view...)
+ if start.Status == pgtype.Null {
+ return false
+ }
+ tbl.TemporalExtent.Start = start.Time
+ tbl.TemporalExtent.End = end.Time
+ return true
+}
+
func (cat *catalogDB) TableByName(name string) (*Table, error) {
cat.refreshTables(false)
tbl, ok := cat.tableMap[name]
@@ -361,20 +388,24 @@ func scanTable(rows pgx.Rows) *Table {
description = fmt.Sprintf("Data for table %v", id)
}
+ startTimeColumn, endTimeColumn := temporalColumns(columns, datatypes)
+
return &Table{
- ID: id,
- Schema: schema,
- Table: table,
- Title: title,
- Description: description,
- GeometryColumn: geometryCol,
- Srid: srid,
- GeometryType: geometryType,
- IDColumn: idColumn,
- Columns: columns,
- DbTypes: datatypes,
- JSONTypes: jsontypes,
- ColDesc: colDesc,
+ ID: id,
+ Schema: schema,
+ Table: table,
+ Title: title,
+ Description: description,
+ GeometryColumn: geometryCol,
+ Srid: srid,
+ GeometryType: geometryType,
+ IDColumn: idColumn,
+ StartTimeColumn: startTimeColumn,
+ EndTimeColumn: endTimeColumn,
+ Columns: columns,
+ DbTypes: datatypes,
+ JSONTypes: jsontypes,
+ ColDesc: colDesc,
}
}
@@ -466,6 +497,41 @@ func extractProperties(vals []interface{}, propOffset int, propNames []string) m
func toJSONValue(value interface{}) interface{} {
//fmt.Printf("toJSONValue: %v\n", reflect.TypeOf(value))
switch v := value.(type) {
+ case time.Time:
+ return formatDateTime(v)
+ case *time.Time:
+ if v == nil {
+ return nil
+ }
+ return formatDateTime(*v)
+ case pgtype.Timestamp:
+ if v.Status != pgtype.Present {
+ return nil
+ }
+ return formatDateTime(v.Time)
+ case *pgtype.Timestamp:
+ if v == nil || v.Status != pgtype.Present {
+ return nil
+ }
+ return formatDateTime(v.Time)
+ case pgtype.Timestamptz:
+ if v.Status != pgtype.Present {
+ return nil
+ }
+ return formatDateTime(v.Time)
+ case *pgtype.Timestamptz:
+ if v == nil || v.Status != pgtype.Present {
+ return nil
+ }
+ return formatDateTime(v.Time)
+ case pgtype.TimestampArray:
+ return formatTimestampArray(&v)
+ case *pgtype.TimestampArray:
+ return formatTimestampArray(v)
+ case pgtype.TimestamptzArray:
+ return formatTimestamptzArray(&v)
+ case *pgtype.TimestamptzArray:
+ return formatTimestamptzArray(v)
case *pgtype.Numeric:
var num float64
// TODO: handle error
@@ -514,6 +580,65 @@ func toJSONValue(value interface{}) interface{} {
return value
}
+func formatDateTime(t time.Time) string {
+ return t.Format(time.RFC3339Nano)
+}
+
+func formatTimestampArray(arr *pgtype.TimestampArray) []string {
+ if arr == nil || arr.Status == pgtype.Null {
+ return nil
+ }
+ var times []time.Time
+ if err := arr.AssignTo(×); err == nil {
+ return formatDateTimeSlice(times)
+ }
+ return formatTimestampElements(arr.Elements)
+}
+
+func formatTimestamptzArray(arr *pgtype.TimestamptzArray) []string {
+ if arr == nil || arr.Status == pgtype.Null {
+ return nil
+ }
+ var times []time.Time
+ if err := arr.AssignTo(×); err == nil {
+ return formatDateTimeSlice(times)
+ }
+ return formatTimestamptzElements(arr.Elements)
+}
+
+func formatDateTimeSlice(times []time.Time) []string {
+ if times == nil {
+ return nil
+ }
+ result := make([]string, len(times))
+ for i, tm := range times {
+ result[i] = formatDateTime(tm)
+ }
+ return result
+}
+
+func formatTimestampElements(elements []pgtype.Timestamp) []string {
+ result := make([]string, len(elements))
+ for i, elem := range elements {
+ if elem.Status != pgtype.Present {
+ continue
+ }
+ result[i] = formatDateTime(elem.Time)
+ }
+ return result
+}
+
+func formatTimestamptzElements(elements []pgtype.Timestamptz) []string {
+ result := make([]string, len(elements))
+ for i, elem := range elements {
+ if elem.Status != pgtype.Present {
+ continue
+ }
+ result[i] = formatDateTime(elem.Time)
+ }
+ return result
+}
+
func toJSONTypeFromPGArray(pgTypes []string) []string {
jsonTypes := make([]string, len(pgTypes))
for i, pgType := range pgTypes {
@@ -533,6 +658,9 @@ func toJSONTypeFromPG(pgType string) string {
if strings.HasPrefix(pgType, "_bool") {
return JSONTypeBooleanArray
}
+ if strings.HasPrefix(pgType, "_timestamp") || strings.HasPrefix(pgType, "_timestamptz") {
+ return JSONTypeStringArray
+ }
switch pgType {
case PGTypeNumeric:
return JSONTypeNumber
@@ -544,6 +672,8 @@ func toJSONTypeFromPG(pgType string) string {
return JSONTypeJSON
case PGTypeTextArray:
return JSONTypeStringArray
+ case PGTypeTimestamp, PGTypeTimestamptz:
+ return JSONTypeDatetime
// hack to allow displaying geometry type
case PGTypeGeometry:
return PGTypeGeometry
@@ -593,3 +723,31 @@ func indexOfName(names []string, name string) int {
}
return -1
}
+func temporalColumns(names []string, types map[string]string) (string, string) {
+ actualNames := make(map[string]string, len(names))
+ for _, name := range names {
+ actualNames[strings.ToLower(name)] = name
+ }
+ lookup := func(candidates []string) string {
+ for _, cand := range candidates {
+ if cand == "" {
+ continue
+ }
+ if col, ok := actualNames[strings.ToLower(cand)]; ok {
+ if types[col] == PGTypeTimestamp || types[col] == PGTypeTimestamptz {
+ return col
+ }
+ }
+ }
+ return ""
+ }
+ // QUESTION: preference of time columns? instant vs start/end?
+ instant := lookup(conf.Configuration.Database.TimeColumns)
+ start := lookup(conf.Configuration.Database.StartTimeColumns)
+ end := lookup(conf.Configuration.Database.EndTimeColumns)
+ if instant != "" && (start == "" || end == "") {
+ start = instant
+ end = instant
+ }
+ return start, end
+}
diff --git a/internal/data/catalog_db_fun.go b/internal/data/catalog_db_fun.go
index 1afe2fe0..ad5c0c3f 100644
--- a/internal/data/catalog_db_fun.go
+++ b/internal/data/catalog_db_fun.go
@@ -115,22 +115,26 @@ func scanFunctionDef(rows pgx.Rows) *Function {
}
geomCol := geometryColumn(outNames, datatypes)
+ startTimeColumn, endTimeColumn := temporalColumns(outNames, datatypes)
funDef := Function{
- ID: id,
- Schema: schema,
- Name: name,
- Description: description,
- InNames: inNames,
- InDbTypes: inTypes,
- InTypeMap: inTypeMap,
- InDefaults: inDefaults,
- NumNoDefault: numNoDefault,
- OutNames: outNames,
- OutDbTypes: outTypes,
- OutJSONTypes: outJSONTypes,
- Types: datatypes,
- GeometryColumn: geomCol,
+ ID: id,
+ Schema: schema,
+ Name: name,
+ Description: description,
+ InNames: inNames,
+ InDbTypes: inTypes,
+ InTypeMap: inTypeMap,
+ InDefaults: inDefaults,
+ NumNoDefault: numNoDefault,
+ OutNames: outNames,
+ OutDbTypes: outTypes,
+ OutJSONTypes: outJSONTypes,
+ Types: datatypes,
+ GeometryColumn: geomCol,
+ IDColumn: FunctionIDColumnName,
+ StartTimeColumn: startTimeColumn,
+ EndTimeColumn: endTimeColumn,
}
//fmt.Printf("DEBUG: Function definitions: %v\n", funDef)
return &funDef
diff --git a/internal/data/db_sql.go b/internal/data/db_sql.go
index 06afea8b..1351588d 100644
--- a/internal/data/db_sql.go
+++ b/internal/data/db_sql.go
@@ -126,6 +126,21 @@ func sqlExtentExact(tbl *Table) string {
return fmt.Sprintf(sqlFmtExtentExact, tbl.GeometryColumn, tbl.Srid, tbl.Schema, tbl.Table)
}
+// const sqlFmtTemporalExtentExact = `SELECT MIN(%s) AS start, MAX(%s) AS "end" FROM "%s"."%s";`
+const sqlFmtTemporalExtentExact = `
+SELECT
+ (SELECT %s FROM "%s"."%s" WHERE %s IS NOT NULL ORDER BY %s ASC LIMIT 1) AS start,
+ (SELECT %s FROM "%s"."%s" WHERE %s IS NOT NULL ORDER BY %s DESC LIMIT 1) AS "end";
+`
+
+func sqlTemporalExtentExact(tbl *Table) string {
+
+ return fmt.Sprintf(
+ sqlFmtTemporalExtentExact,
+ tbl.StartTimeColumn, tbl.Schema, tbl.Table, tbl.StartTimeColumn, tbl.StartTimeColumn,
+ tbl.EndTimeColumn, tbl.Schema, tbl.Table, tbl.EndTimeColumn, tbl.EndTimeColumn)
+}
+
const sqlFmtFeatures = "SELECT %v %v FROM \"%s\".\"%s\" %v %v %v %s;"
func sqlFeatures(tbl *Table, param *QueryParam) (string, []interface{}) {
@@ -134,7 +149,9 @@ func sqlFeatures(tbl *Table, param *QueryParam) (string, []interface{}) {
bboxFilter := sqlBBoxFilter(tbl.GeometryColumn, tbl.Srid, param.Bbox, param.BboxCrs)
attrFilter, attrVals := sqlAttrFilter(param.Filter)
cqlFilter := sqlCqlFilter(param.FilterSql)
- sqlWhere := sqlWhere(bboxFilter, attrFilter, cqlFilter)
+ timeFilter, timeVals := sqlDateTimeFilter(tbl.StartTimeColumn, tbl.EndTimeColumn, param.DateTime, len(attrVals)+1)
+ attrVals = append(attrVals, timeVals...)
+ sqlWhere := sqlWhere(bboxFilter, attrFilter, cqlFilter, timeFilter)
sqlGroupBy := sqlGroupBy(param.GroupBy)
sqlOrderBy := sqlOrderBy(param.SortBy)
sqlLimitOffset := sqlLimitOffset(param.Limit, param.Offset)
@@ -199,16 +216,12 @@ func sqlCqlFilter(sql string) string {
return "(" + sql + ")"
}
-func sqlWhere(cond1 string, cond2 string, cond3 string) string {
+func sqlWhere(conditions ...string) string {
var condList []string
- if len(cond1) > 0 {
- condList = append(condList, cond1)
- }
- if len(cond2) > 0 {
- condList = append(condList, cond2)
- }
- if len(cond3) > 0 {
- condList = append(condList, cond3)
+ for _, cond := range conditions {
+ if len(cond) > 0 {
+ condList = append(condList, cond)
+ }
}
where := strings.Join(condList, " AND ")
if len(where) > 0 {
@@ -229,6 +242,73 @@ func sqlAttrFilter(filterConds []*PropertyFilter) (string, []interface{}) {
return sql, vals
}
+func sqlDateTimeFilter(StartColumn string, EndColumn string, rng *TimeRange, startIndex int) (string, []interface{}) {
+ if rng == nil {
+ return "", nil
+ }
+ if StartColumn == "" {
+ return "", nil
+ }
+ idx := startIndex
+ var exprItems []string
+ var vals []interface{}
+
+ colStart := strconv.Quote(StartColumn)
+ colEnd := strconv.Quote(EndColumn)
+
+ if StartColumn == EndColumn {
+ //-- single column time range
+ if rng.Start != nil {
+ op := ">="
+ if !rng.StartInclusive {
+ op = ">"
+ }
+ exprItems = append(exprItems, fmt.Sprintf("%s %s $%d", colStart, op, idx))
+ vals = append(vals, *rng.Start)
+ idx++
+ }
+ if rng.End != nil {
+ op := "<="
+ if !rng.EndInclusive {
+ op = "<"
+ }
+ exprItems = append(exprItems, fmt.Sprintf("%s %s $%d", colEnd, op, idx))
+ vals = append(vals, *rng.End)
+ idx++
+ }
+ if len(exprItems) == 0 {
+ return "", nil
+ }
+ filter := strings.Join(exprItems, " AND ")
+ filter = fmt.Sprintf("(%s IS NULL OR (%s))", colStart, filter)
+ return filter, vals
+ }
+
+ if rng.Start != nil {
+ op := ">="
+ if !rng.StartInclusive {
+ op = ">"
+ }
+ exprItems = append(exprItems, fmt.Sprintf("(%s IS NULL OR %s %s $%d)", colEnd, colEnd, op, idx))
+ vals = append(vals, *rng.Start)
+ idx++
+ }
+ if rng.End != nil {
+ op := "<="
+ if !rng.EndInclusive {
+ op = "<"
+ }
+ exprItems = append(exprItems, fmt.Sprintf("(%s IS NULL OR %s %s $%d)", colStart, colStart, op, idx))
+ vals = append(vals, *rng.End)
+ idx++
+ }
+ if len(exprItems) == 0 {
+ return "", nil
+ }
+ filter := strings.Join(exprItems, " AND ")
+ return filter, vals
+}
+
const sqlFmtBBoxTransformFilter = ` ST_Intersects("%v", ST_Transform( ST_MakeEnvelope(%v, %v, %v, %v, %v), %v)) `
const sqlFmtBBoxGeoFilter = ` ST_Intersects("%v", ST_MakeEnvelope(%v, %v, %v, %v, %v)) `
@@ -332,7 +412,9 @@ func sqlGeomFunction(fn *Function, args map[string]string, propCols []string, pa
//-- SRS of function output is unknown, so have to assume 4326
bboxFilter := sqlBBoxFilter(fn.GeometryColumn, SRID_4326, param.Bbox, param.BboxCrs)
cqlFilter := sqlCqlFilter(param.FilterSql)
- sqlWhere := sqlWhere(bboxFilter, cqlFilter, "")
+ timeFilter, timeVals := sqlDateTimeFilter(fn.StartTimeColumn, fn.EndTimeColumn, param.DateTime, len(argVals)+1)
+ argVals = append(argVals, timeVals...)
+ sqlWhere := sqlWhere(bboxFilter, cqlFilter, timeFilter)
sqlOrderBy := sqlOrderBy(param.SortBy)
sqlLimitOffset := sqlLimitOffset(param.Limit, param.Offset)
sql := fmt.Sprintf(sqlFmtGeomFunction, sqlGeomCol, sqlPropCols, fn.Schema, fn.Name, sqlArgs, sqlWhere, sqlOrderBy, sqlLimitOffset)
@@ -345,7 +427,9 @@ func sqlFunction(fn *Function, args map[string]string, propCols []string, param
sqlArgs, argVals := sqlFunctionArgs(fn, args)
sqlPropCols := sqlColList(propCols, fn.Types, false)
cqlFilter := sqlCqlFilter(param.FilterSql)
- sqlWhere := sqlWhere(cqlFilter, "", "")
+ timeFilter, timeVals := sqlDateTimeFilter(fn.StartTimeColumn, fn.EndTimeColumn, param.DateTime, len(argVals)+1)
+ argVals = append(argVals, timeVals...)
+ sqlWhere := sqlWhere(cqlFilter, timeFilter)
sqlOrderBy := sqlOrderBy(param.SortBy)
sqlLimitOffset := sqlLimitOffset(param.Limit, param.Offset)
sql := fmt.Sprintf(sqlFmtFunction, sqlPropCols, fn.Schema, fn.Name, sqlArgs, sqlWhere, sqlOrderBy, sqlLimitOffset)
diff --git a/internal/data/db_sql_test.go b/internal/data/db_sql_test.go
new file mode 100644
index 00000000..72e5cde1
--- /dev/null
+++ b/internal/data/db_sql_test.go
@@ -0,0 +1,94 @@
+package data
+
+import (
+ "testing"
+ "time"
+)
+
+func TestSQLDateTimeFilterInstant(t *testing.T) {
+ start := time.Date(2020, 1, 2, 3, 4, 5, 0, time.UTC)
+ Column := "observed_at"
+ rng := &TimeRange{
+ Start: &start,
+ End: &start,
+ StartInclusive: true,
+ EndInclusive: true,
+ }
+ sql, args := sqlDateTimeFilter(Column, Column, rng, 1)
+ expected := "(\"observed_at\" IS NULL OR (\"observed_at\" >= $1 AND \"observed_at\" <= $2))"
+ if sql != expected {
+ t.Fatalf("unexpected sql: \n%s\n%s", sql, expected)
+ }
+ if len(args) != 2 {
+ t.Fatalf("expected 2 args, got %d", len(args))
+ }
+ if !args[0].(time.Time).Equal(start) {
+ t.Errorf("unexpected start arg: %v", args[0])
+ }
+ if !args[1].(time.Time).Equal(start) {
+ t.Errorf("unexpected end arg: %v", args[1])
+ }
+}
+
+func TestSQLDateTimeFilterExclusiveEnd(t *testing.T) {
+ start := time.Date(2020, 1, 2, 3, 4, 5, 0, time.UTC)
+ end := start.Add(24 * time.Hour)
+ Column := "observed_at"
+ rng := &TimeRange{
+ Start: &start,
+ End: &end,
+ StartInclusive: true,
+ EndInclusive: false,
+ }
+ sql, args := sqlDateTimeFilter(Column, Column, rng, 3)
+ expected := "(\"observed_at\" IS NULL OR (\"observed_at\" >= $3 AND \"observed_at\" < $4))"
+ if sql != expected {
+ t.Fatalf("unexpected sql: %s", sql)
+ }
+ if len(args) != 2 {
+ t.Fatalf("expected 2 args, got %d", len(args))
+ }
+ if !args[0].(time.Time).Equal(start) {
+ t.Errorf("unexpected start arg: %v", args[0])
+ }
+ if !args[1].(time.Time).Equal(end) {
+ t.Errorf("unexpected end arg: %v", args[1])
+ }
+}
+
+func TestSQLDateTimeFilterNilRange(t *testing.T) {
+ sql, args := sqlDateTimeFilter("", "", nil, 1)
+ if sql != "" {
+ t.Fatalf("expected empty sql")
+ }
+ if args != nil {
+ t.Fatalf("expected nil args")
+ }
+}
+
+func TestSQLDateTimeFilterIntervalColumns(t *testing.T) {
+ start := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)
+ end := time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC)
+ StartColumn := "start_time"
+ EndColumn := "end_time"
+ rng := &TimeRange{
+ Start: &start,
+ End: &end,
+ StartInclusive: true,
+ EndInclusive: true,
+ }
+ sql, args := sqlDateTimeFilter(StartColumn, EndColumn, rng, 2)
+ expected := "(\"end_time\" IS NULL OR \"end_time\" >= $2) AND (\"start_time\" IS NULL OR \"start_time\" <= $3)"
+ if sql != expected {
+ t.Fatalf("unexpected sql: %s", sql)
+ }
+ if len(args) != 2 {
+ t.Fatalf("expected 2 args, got %d", len(args))
+ }
+ if !args[0].(time.Time).Equal(start) {
+ t.Errorf("unexpected start arg: %v", args[0])
+ }
+ if !args[1].(time.Time).Equal(end) {
+ t.Errorf("unexpected end arg: %v", args[1])
+ }
+}
diff --git a/internal/service/handler.go b/internal/service/handler.go
index a0d5fe46..59349e99 100644
--- a/internal/service/handler.go
+++ b/internal/service/handler.go
@@ -304,6 +304,7 @@ func writeItemsHTML(w http.ResponseWriter, tbl *data.Table, name string, query s
context.Title = tbl.Title
context.IDColumn = tbl.IDColumn
context.ShowFeatureLink = true
+ context.TimeAware = tbl.StartTimeColumn != ""
// features are not needed for items page (page queries for them)
return writeHTML(w, nil, context, ui.PageItems())
@@ -637,6 +638,7 @@ func writeFunItemsHTML(w http.ResponseWriter, name string, query string, urlBase
context.Title = fn.ID
context.Function = fn
context.IDColumn = data.FunctionIDColumnName
+ context.TimeAware = fn.StartTimeColumn != ""
// features are not needed for items page (page queries for them)
return writeHTML(w, nil, context, ui.PageFunctionItems())
diff --git a/internal/service/param.go b/internal/service/param.go
index 58f86b6a..92d5a83d 100644
--- a/internal/service/param.go
+++ b/internal/service/param.go
@@ -19,6 +19,7 @@ import (
"net/url"
"strconv"
"strings"
+ "time"
"github.com/CrunchyData/pg_featureserv/internal/api"
"github.com/CrunchyData/pg_featureserv/internal/conf"
@@ -78,6 +79,9 @@ func parseRequestParams(r *http.Request) (api.RequestParam, error) {
// --- filter parameter
param.Filter = parseString(paramValues, api.ParamFilter)
+ // --- datetime parameter
+ param.DateTime = parseString(paramValues, api.ParamDateTime)
+
// --- filter-crs parameter
filterCrs, err := parseInt(paramValues, api.ParamFilterCrs, 0, 99999999, data.SRID_4326)
if err != nil {
@@ -458,5 +462,127 @@ func createQueryParams(param *api.RequestParam, colNames []string, sourceSRID in
}
query.FilterSql = sql
+ dtRange, err := parseDateTimeRange(param.DateTime)
+ if err != nil {
+ return &query, err
+ }
+ query.DateTime = dtRange
+
return &query, nil
}
+
+func parseDateTimeRange(value string) (*data.TimeRange, error) {
+ trimmed := strings.TrimSpace(value)
+ if trimmed == "" {
+ return nil, nil
+ }
+ if !strings.Contains(trimmed, "/") {
+ inst, err := parseDateTimeInstant(trimmed)
+ if err != nil {
+ return nil, err
+ }
+ if inst == nil {
+ return nil, nil
+ }
+ rng := &data.TimeRange{StartInclusive: true, EndInclusive: true}
+ start := copyTime(inst.Time)
+ rng.Start = &start
+ if inst.DateOnly {
+ end := start.Add(24 * time.Hour)
+ rng.End = &end
+ rng.EndInclusive = false
+ } else {
+ end := copyTime(inst.Time)
+ rng.End = &end
+ }
+ return rng, nil
+ }
+ parts := strings.SplitN(trimmed, "/", 2)
+ if len(parts) != 2 {
+ return nil, fmt.Errorf(api.ErrMsgInvalidParameterValue, api.ParamDateTime, value)
+ }
+ startInst, err := parseDateTimeInstant(parts[0])
+ if err != nil {
+ return nil, err
+ }
+ endInst, err := parseDateTimeInstant(parts[1])
+ if err != nil {
+ return nil, err
+ }
+ if startInst == nil && endInst == nil {
+ return nil, nil
+ }
+ rng := &data.TimeRange{StartInclusive: true, EndInclusive: true}
+ if startInst != nil {
+ start := copyTime(startInst.Time)
+ rng.Start = &start
+ }
+ if endInst != nil {
+ end := copyTime(endInst.Time)
+ rng.End = &end
+ }
+ if startInst != nil && startInst.DateOnly {
+ // already normalized to midnight
+ rng.StartInclusive = true
+ }
+ if endInst != nil && endInst.DateOnly {
+ endAdj := rng.End.Add(24 * time.Hour)
+ rng.End = &endAdj
+ rng.EndInclusive = false
+ }
+ if rng.Start != nil && rng.End != nil {
+ if rng.EndInclusive {
+ if rng.Start.After(*rng.End) {
+ return nil, fmt.Errorf(api.ErrMsgInvalidParameterValue, api.ParamDateTime, value)
+ }
+ } else {
+ if !rng.Start.Before(*rng.End) {
+ return nil, fmt.Errorf(api.ErrMsgInvalidParameterValue, api.ParamDateTime, value)
+ }
+ }
+ }
+ return rng, nil
+}
+
+type parsedInstant struct {
+ Time time.Time
+ DateOnly bool
+}
+
+func parseDateTimeInstant(value string) (*parsedInstant, error) {
+ trimmed := strings.TrimSpace(value)
+ if trimmed == "" || trimmed == ".." {
+ return nil, nil
+ }
+ tm, isDateOnly, err := parseDateTimeLiteral(trimmed)
+ if err != nil {
+ return nil, err
+ }
+ return &parsedInstant{Time: tm.UTC(), DateOnly: isDateOnly}, nil
+}
+
+func parseDateTimeLiteral(value string) (time.Time, bool, error) {
+ layouts := []string{time.RFC3339Nano, time.RFC3339}
+ for _, layout := range layouts {
+ tm, err := time.Parse(layout, value)
+ if err == nil {
+ return tm.UTC(), false, nil
+ }
+ }
+ nonZoneLayouts := []string{"2006-01-02T15:04:05", "2006-01-02T15:04"}
+ for _, layout := range nonZoneLayouts {
+ tm, err := time.ParseInLocation(layout, value, time.UTC)
+ if err == nil {
+ return tm.UTC(), false, nil
+ }
+ }
+ tm, err := time.Parse("2006-01-02", value)
+ if err == nil {
+ return tm.UTC(), true, nil
+ }
+ return time.Time{}, false, fmt.Errorf(api.ErrMsgInvalidParameterValue, api.ParamDateTime, value)
+}
+
+func copyTime(t time.Time) time.Time {
+ return t.UTC()
+}
diff --git a/internal/service/param_datetime_test.go b/internal/service/param_datetime_test.go
new file mode 100644
index 00000000..b15adec1
--- /dev/null
+++ b/internal/service/param_datetime_test.go
@@ -0,0 +1,84 @@
+package service
+
+import (
+ "testing"
+ "time"
+)
+
+func TestParseDateTimeRangeInstant(t *testing.T) {
+ rng, err := parseDateTimeRange("2018-02-12T23:20:52Z")
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ if rng == nil {
+ t.Fatalf("expected range")
+ }
+ if rng.Start == nil || rng.End == nil {
+ t.Fatalf("expected start and end")
+ }
+ if !rng.Start.Equal(*rng.End) {
+ t.Errorf("expected identical start and end, got %v %v", rng.Start, rng.End)
+ }
+ if !rng.EndInclusive {
+ t.Errorf("expected inclusive end")
+ }
+}
+
+func TestParseDateTimeRangeDate(t *testing.T) {
+ rng, err := parseDateTimeRange("2018-02-12")
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ if rng == nil {
+ t.Fatalf("expected range")
+ }
+ if rng.Start == nil || rng.End == nil {
+ t.Fatalf("expected start and end")
+ }
+ expected := time.Date(2018, 2, 12, 0, 0, 0, 0, time.UTC)
+ if !rng.Start.Equal(expected) {
+ t.Errorf("unexpected start: %v", rng.Start)
+ }
+ if rng.EndInclusive {
+ t.Errorf("expected exclusive end for date-only value")
+ }
+ diff := rng.End.Sub(*rng.Start)
+ if diff != 24*time.Hour {
+ t.Errorf("expected 24h interval, got %v", diff)
+ }
+}
+
+func TestParseDateTimeRangeInterval(t *testing.T) {
+ rng, err := parseDateTimeRange("2018-02-12T00:00:00Z/2018-03-18T12:31:12Z")
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ if rng == nil || rng.Start == nil || rng.End == nil {
+ t.Fatalf("expected populated range")
+ }
+ if rng.Start.After(*rng.End) {
+ t.Fatalf("start after end")
+ }
+}
+
+func TestParseDateTimeRangeOpen(t *testing.T) {
+ rng, err := parseDateTimeRange("../2018-03-18T12:31:12Z")
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ if rng == nil {
+ t.Fatalf("expected range")
+ }
+ if rng.Start != nil {
+ t.Errorf("expected open start")
+ }
+ if rng.End == nil {
+ t.Errorf("expected end value")
+ }
+}
+
+func TestParseDateTimeRangeInvalid(t *testing.T) {
+ if _, err := parseDateTimeRange("not-a-date"); err == nil {
+ t.Fatalf("expected error for invalid input")
+ }
+}
diff --git a/internal/ui/ui.go b/internal/ui/ui.go
index 276528cb..30967a47 100644
--- a/internal/ui/ui.go
+++ b/internal/ui/ui.go
@@ -45,6 +45,7 @@ type PageData struct {
Function *data.Function
FeatureID string
ShowFeatureLink bool
+ TimeAware bool
}
var htmlTemp struct {