diff --git a/core/coord_to_tz.go b/core/coord_to_tz.go
index 490a4e8..bdef2c0 100644
--- a/core/coord_to_tz.go
+++ b/core/coord_to_tz.go
@@ -49,6 +49,54 @@ func CoordinateToTimezone(lat float64, lon float64) (string, error) {
return name, nil
}
+// only those state that have strictly one timezone
+var stateTimeZones = map[string]string{
+ "AL": "America/Chicago",
+ "AZ": "America/Phoenix",
+ "AR": "America/Chicago",
+ "CA": "America/Los_Angeles",
+ "CO": "America/Denver",
+ "CT": "America/New_York",
+ "DE": "America/New_York",
+ "DC": "America/New_York",
+ "GA": "America/New_York",
+ "HI": "Pacific/Honolulu",
+ "IL": "America/Chicago",
+ "IA": "America/Chicago",
+ "ME": "America/New_York",
+ "MD": "America/New_York",
+ "MA": "America/New_York",
+ "MN": "America/Chicago",
+ "MS": "America/Chicago",
+ "MO": "America/Chicago",
+ "MT": "America/Denver",
+ "NV": "America/Los_Angeles",
+ "NH": "America/New_York",
+ "NJ": "America/New_York",
+ "NM": "America/Denver",
+ "NY": "America/New_York",
+ "NC": "America/New_York",
+ "OH": "America/New_York",
+ "OK": "America/Chicago",
+ "PA": "America/New_York",
+ "RI": "America/New_York",
+ "SC": "America/New_York",
+ "UT": "America/Denver",
+ "VT": "America/New_York",
+ "VA": "America/New_York",
+ "WA": "America/Los_Angeles",
+ "WV": "America/New_York",
+ "WI": "America/Chicago",
+ "WY": "America/Denver",
+}
+
+func USCoordinateToTimezone(state string, lat float64, lon float64) (string, error) {
+ if name, ok := stateTimeZones[state]; ok {
+ return name, nil
+ }
+ return CoordinateToTimezone(lat, lon)
+}
+
func CloseTimezoneDb() {
if tz != nil {
(*tz).Close()
diff --git a/scripts/usnws/descriptor.go b/scripts/usnws/descriptor.go
index 3e8f6ab..a78cb90 100644
--- a/scripts/usnws/descriptor.go
+++ b/scripts/usnws/descriptor.go
@@ -1,6 +1,8 @@
package usnws
import (
+ "fmt"
+
"github.com/whitewater-guide/gorge/core"
)
@@ -9,9 +11,20 @@ var Descriptor = &core.ScriptDescriptor{
Description: "U.S. National Oceanic and Atmospheric Administration's National Weather Service",
Mode: core.AllAtOnce,
DefaultOptions: func() interface{} {
- return &optionsUsnws{}
+ return &optionsUsnws{
+ pageSize: 1000, // defaults to 5000 if not mentioned at all, total around 10500
+ numWorkers: 5,
+ }
},
Factory: func(name string, options interface{}) (core.Script, error) {
- return &scriptUsnws{name: name, kmzUrl: "https://water.weather.gov/ahps/download.php?data=kmz_obs"}, nil
+ if opts, ok := options.(*optionsUsnws); ok {
+ return &scriptUsnws{
+ name: name,
+ url: "https://mapservices.weather.noaa.gov/eventdriven/rest/services/water/ahps_riv_gauges/MapServer/0/query",
+ pageSize: opts.pageSize,
+ numWorkers: opts.numWorkers,
+ }, nil
+ }
+ return nil, fmt.Errorf("failed to cast %T", optionsUsnws{})
},
}
diff --git a/scripts/usnws/kmz_reader.go b/scripts/usnws/kmz_reader.go
deleted file mode 100644
index d7a8135..0000000
--- a/scripts/usnws/kmz_reader.go
+++ /dev/null
@@ -1,190 +0,0 @@
-package usnws
-
-import (
- "archive/zip"
- "bufio"
- "encoding/xml"
- "fmt"
- "io"
- "net/http"
- "os"
- "regexp"
- "strconv"
- "strings"
- "time"
-
- "github.com/mattn/go-nulltype"
- "github.com/whitewater-guide/gorge/core"
-)
-
-var (
- reCode = regexp.MustCompile(`NWSLID:\s*\s*(.*)\s*
`)
- reName = regexp.MustCompile(`Location:\s*\s*(.*)\s*
`)
- reVal = regexp.MustCompile(`Latest Observation Value:\s*\s*(.*)\s*
`)
- reTime = regexp.MustCompile(`UTC Observation Time:\s*\s*(.*)\s*
`)
- reLoc = regexp.MustCompile(`Lat/Long:\s*\s*(.*)\s*
`)
- reHref = regexp.MustCompile(`Link to Gauge Hydrograph`)
-)
-
-type description struct {
- XMLName xml.Name `xml:"description"`
- Text string `xml:",cdata"`
-}
-
-func (s *scriptUsnws) parseKmz(gauges chan<- *core.Gauge, measurements chan<- *core.Measurement, errs chan<- error) {
- client := core.NewClient(core.ClientOptions{
- UserAgent: "whitewater.guide robot",
- Timeout: 300,
- }, s.GetLogger())
- req, _ := http.NewRequest("GET", s.kmzUrl, nil)
- s.GetLogger().Debugf("fetching %s", s.kmzUrl)
- resp, err := client.Do(req, nil)
- if err != nil {
- errs <- err
- return
- }
- defer resp.Body.Close()
- s.GetLogger().Debug("fetched")
- zipFile, err := os.CreateTemp("", "usnws")
- if err != nil {
- errs <- fmt.Errorf("failed to create tmp file: %w", err)
- return
- }
- defer os.Remove(zipFile.Name())
- if _, err = io.Copy(zipFile, resp.Body); err != nil {
- errs <- fmt.Errorf("failed to write tmp file: %w", err)
- return
- }
- s.GetLogger().Debugf("saved temp zip %s", zipFile.Name())
-
- // Open a zip archive for reading.
- r, err := zip.OpenReader(zipFile.Name())
- if err != nil {
- errs <- fmt.Errorf("failed to open tmp file: %w", err)
- return
- }
- defer r.Close()
- if len(r.File) != 1 {
- errs <- fmt.Errorf("expected 1 file inside kmz, found many: %d", len(r.File))
- return
- }
- kmlReader, err := r.File[0].Open()
- if err != nil {
- errs <- fmt.Errorf("failed to read kml file: %w", err)
- return
- }
- defer kmlReader.Close()
-
- decoder := xml.NewDecoder(kmlReader)
- s.GetLogger().Debug("created xml decoder")
- var descr description
- for {
- t, err := decoder.Token()
- if err != nil || t == nil {
- if err != io.EOF {
- s.GetLogger().Errorf("xml token error: %s", err)
- }
- break
- }
- switch se := t.(type) {
- case xml.StartElement:
- if se.Name.Local == "description" {
- if err := decoder.DecodeElement(&descr, &se); err == nil {
- s.parseEntry(descr.Text, gauges, measurements)
- } else {
- s.GetLogger().Errorf("decoder error: %s", err)
- }
- }
- default:
- }
- }
-}
-
-func (s *scriptUsnws) parseEntry(text string, gauges chan<- *core.Gauge, measurements chan<- *core.Measurement) {
- scanner := bufio.NewScanner(strings.NewReader(text))
- scanner.Split(bufio.ScanLines)
- var g core.Gauge
- g.Script = s.name
- var m core.Measurement
- mOk := false
- for scanner.Scan() {
- line := scanner.Text()
- if matches := reCode.FindStringSubmatch(line); len(matches) > 0 {
- g.Code = strings.TrimSpace(matches[1])
- } else if matches := reName.FindStringSubmatch(line); len(matches) > 0 {
- g.Name = strings.TrimSpace(matches[1])
- } else if matches := reVal.FindStringSubmatch(line); len(matches) > 0 {
- line = strings.TrimSpace(matches[1])
- parts := strings.Split(line, " ")
- if len(parts) == 2 {
- var v nulltype.NullFloat64
- vStr, unit := strings.TrimSpace(parts[0]), strings.TrimSpace(parts[1])
-
- if vF, err := strconv.ParseFloat(vStr, 64); err == nil {
- v = nulltype.NullFloat64Of(vF)
- } else {
- s.GetLogger().Warnf("cannot parse value '%s'", line)
- }
-
- switch unit {
- case "ft":
- m.Level = v
- g.LevelUnit = unit
- case "kcfs":
- m.Flow = v
- g.FlowUnit = unit
- default:
- s.GetLogger().Warnf("unknown unit '%s'", unit)
- m.Level = v
- g.LevelUnit = unit
- }
- mOk = true
- } else if line != "N/A" {
- // when the value is N/A, it's impossible to find out unit even from other lines, such as flood threshold
- s.GetLogger().Warnf("cannot parse value '%s'", line)
- continue
- }
- } else if matches := reTime.FindStringSubmatch(line); len(matches) > 0 {
- if t, err := time.Parse("2006-01-02 15:04:05", strings.TrimSpace(matches[1])); err == nil {
- m.Timestamp = core.HTime{Time: t}
- } else {
- mOk = false
- if matches[1] != "N/A" {
- s.GetLogger().Warnf("cannot parse time '%s'", matches[1])
- }
- }
-
- } else if matches := reLoc.FindStringSubmatch(line); len(matches) > 0 {
- parts := strings.Split(strings.TrimSpace(matches[1]), ",")
- if len(parts) != 2 {
- s.GetLogger().Warnf("cannot parse location '%s'", matches[1])
- continue
- }
- g.Location = &core.Location{}
- if lat, err := strconv.ParseFloat(strings.TrimSpace(parts[0]), 64); err == nil {
- if lon, err := strconv.ParseFloat(strings.TrimSpace(parts[1]), 64); err == nil {
- g.Location = &core.Location{Latitude: lat, Longitude: lon}
- zone, err := core.CoordinateToTimezone(lat, lon)
- if err != nil {
- s.GetLogger().Warnf("cannot find timezone for (%f, %f)", lat, lon)
- zone = "UTC"
- }
- g.Timezone = zone
- } else {
- s.GetLogger().Warnf("cannot parse longtitude '%s'", parts[1])
- }
- } else {
- s.GetLogger().Warnf("cannot parse latitude '%s'", parts[0])
- }
- } else if matches := reHref.FindStringSubmatch(line); len(matches) > 0 {
- g.URL = strings.TrimSpace(matches[1])
- }
- }
- if gauges != nil && g.Code != "" {
- gauges <- &g
- }
- if measurements != nil && g.Code != "" && mOk {
- m.GaugeID = g.GaugeID
- measurements <- &m
- }
-}
diff --git a/scripts/usnws/parser.go b/scripts/usnws/parser.go
new file mode 100644
index 0000000..59521e5
--- /dev/null
+++ b/scripts/usnws/parser.go
@@ -0,0 +1,187 @@
+package usnws
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/mattn/go-nulltype"
+ "github.com/whitewater-guide/gorge/core"
+ "golang.org/x/sync/errgroup"
+)
+
+type attributes struct {
+ Objectid int `json:"objectid"`
+ Gaugelid string `json:"gaugelid"`
+ Location string `json:"location"`
+ Latitude float64 `json:"latitude"`
+ Longitude float64 `json:"longitude"`
+ Waterbody string `json:"waterbody"`
+ State string `json:"state"`
+ Obstime string `json:"obstime"`
+ Units string `json:"units"`
+ Secunit string `json:"secunit"`
+ URL string `json:"url"`
+ Observed string `json:"observed"`
+ Secvalue string `json:"secvalue"`
+}
+
+type response struct {
+ Features []struct {
+ Attributes attributes `json:"attributes"`
+ } `json:"features"`
+ ExceededTransferLimit bool `json:"exceededTransferLimit"`
+}
+
+type countResponse struct {
+ Count int `json:"count"`
+}
+
+func (s *scriptUsnws) parseJson(gauges chan<- *core.Gauge, measurements chan<- *core.Measurement, errs chan<- error) {
+ var cntResp countResponse
+ if err := core.Client.GetAsJSON(s.url+"?where=1%3D1&text=&objectIds=&time=&timeRelation=esriTimeRelationOverlaps&geometry=&geometryType=esriGeometryEnvelope&inSR=&spatialRel=esriSpatialRelIntersects&distance=&units=esriSRUnit_Foot&relationParam=&outFields=&returnGeometry=true&returnTrueCurves=false&maxAllowableOffset=&geometryPrecision=&outSR=&havingClause=&returnIdsOnly=false&returnCountOnly=true&orderByFields=&groupByFieldsForStatistics=&outStatistics=&returnZ=false&returnM=false&gdbVersion=&historicMoment=&returnDistinctValues=false&resultOffset=&resultRecordCount=&returnExtentOnly=false&sqlFormat=none&datumTransformation=¶meterValues=&rangeValues=&quantizationParameters=&featureEncoding=esriDefault&f=pjson", &cntResp, nil); err != nil {
+ errs <- err
+ return
+ }
+ s.GetLogger().Debugf("found %d features", cntResp.Count)
+ jobs := make(chan int)
+ g := new(errgroup.Group)
+ for i := 0; i < s.numWorkers; i++ {
+ g.Go(func() error {
+ return s.worker(jobs, gauges, measurements)
+ })
+ }
+ for offset := 0; offset < cntResp.Count; offset += s.pageSize {
+ jobs <- offset
+ }
+ close(jobs)
+ if err := g.Wait(); err != nil {
+ errs <- err
+ }
+}
+
+func (s *scriptUsnws) worker(jobs <-chan int, gauges chan<- *core.Gauge, measurements chan<- *core.Measurement) error {
+ for offset := range jobs {
+ var resp response
+ // if err := core.Client.GetAsJSON(fmt.Sprintf("%s?f=json&where=(1%%3D1)%%20AND%%20(1%%3D1)&returnGeometry=false&spatialRel=esriSpatialRelIntersects&outFields=objectid,gaugelid,location,latitude,longitude,waterbody,state,obstime,units,secunit,url,observed,secvalue&orderByFields=objectid%%20ASC&outSR=102100&resultOffset=%d&resultRecordCount=%d", s.url, offset, s.pageSize), &resp, nil); err != nil {
+ if err := core.Client.GetAsJSON(fmt.Sprintf("%s?f=json&where=(1%%3D1)%%20AND%%20(1%%3D1)&returnGeometry=false&spatialRel=esriSpatialRelIntersects&outFields=objectid,gaugelid,location,latitude,longitude,waterbody,state,obstime,units,secunit,url,observed,secvalue&orderByFields=objectid%%20ASC&outSR=4326&resultOffset=%d&resultRecordCount=%d", s.url, offset, s.pageSize), &resp, nil); err != nil {
+ return err
+ }
+ for _, feat := range resp.Features {
+ if gauges != nil {
+ if g := s.attributesToGauge(feat.Attributes); g != nil {
+ gauges <- g
+ }
+ } else if measurements != nil {
+ if m := s.attributesToMeasurement(feat.Attributes); m != nil {
+ measurements <- m
+ }
+ }
+ }
+ }
+ return nil
+}
+
+func (s *scriptUsnws) attributesToGauge(attrs attributes) *core.Gauge {
+ flowU, levelU, _ := getUnits(attrs)
+ if flowU == "" && levelU == "" {
+ return nil
+ }
+
+ zone, err := core.USCoordinateToTimezone(attrs.State, attrs.Latitude, attrs.Longitude)
+ if err != nil {
+ zone = "UTC"
+ }
+ return &core.Gauge{
+ GaugeID: core.GaugeID{
+ Script: s.name,
+ Code: attrs.Gaugelid,
+ },
+ Name: fmt.Sprintf("%s / %s / %s", attrs.Waterbody, attrs.Location, attrs.State),
+ URL: attrs.URL,
+ LevelUnit: levelU,
+ FlowUnit: flowU,
+ Location: &core.Location{
+ Latitude: core.TruncCoord(attrs.Latitude),
+ Longitude: core.TruncCoord(attrs.Longitude),
+ },
+ Timezone: zone,
+ }
+}
+
+func (s *scriptUsnws) attributesToMeasurement(attrs attributes) *core.Measurement {
+ flowU, levelU, flowPrimary := getUnits(attrs)
+ if flowU == "" && levelU == "" {
+ return nil
+ }
+ // 2023-10-01 18:30:00",
+ obstime := strings.TrimSpace(attrs.Obstime)
+ if obstime == "" || obstime == "N/A" {
+ return nil
+ }
+ t, err := time.ParseInLocation("2006-01-02 15:04:05", obstime, time.UTC)
+ if err != nil {
+ s.GetLogger().Warnf("failed to parse obstime %s", obstime)
+ return nil
+ }
+ flow, level := nulltype.NullFloat64{}, nulltype.NullFloat64{}
+ vPrim, vSec := strings.TrimSpace(attrs.Observed), strings.TrimSpace(attrs.Secvalue)
+ if vPrim != "" && vPrim != "-999.00" {
+ if f, err := strconv.ParseFloat(vPrim, 64); err != nil {
+ s.GetLogger().Warnf("failed to parse observer '%s'", vPrim)
+ } else if flowPrimary {
+ flow.Set(f)
+ } else {
+ level.Set(f)
+ }
+ }
+ if vSec != "" && vSec != "-999.00" {
+ if f, err := strconv.ParseFloat(vSec, 64); err != nil {
+ s.GetLogger().Warnf("failed to parse secvalue '%s'", vSec)
+ } else if flowPrimary {
+ level.Set(f)
+ } else {
+ flow.Set(f)
+ }
+ }
+
+ return &core.Measurement{
+ GaugeID: core.GaugeID{
+ Script: s.name,
+ Code: attrs.Gaugelid,
+ },
+ Timestamp: core.HTime{
+ Time: t,
+ },
+ Level: level,
+ Flow: flow,
+ }
+}
+
+func getUnits(attrs attributes) (flowUnit string, levelUnit string, flowPrimary bool) {
+ // known units values: "cfs", "cfs*", "ft", "ft*", "kcfs", "kcfs*"
+ // known secunit values: " ", "ft", "kcfs"
+ switch attrs.Units {
+ case "cfs", "kcfs":
+ flowUnit = attrs.Units
+ levelUnit = attrs.Secunit
+ flowPrimary = true
+ case "ft":
+ flowUnit = attrs.Secunit
+ levelUnit = attrs.Units
+ case "cfs*", "kcfs*":
+ // https://water.weather.gov/ahps2/hydrograph.php?wfo=slc&gage=lctu1
+ flowPrimary = true
+ flowUnit = "kcfs"
+ levelUnit = attrs.Secunit
+ case "ft*":
+ // https://water.weather.gov/ahps2/hydrograph.php?wfo=boi&gage=andi1
+ flowPrimary = true
+ flowUnit = "kcfs"
+ levelUnit = attrs.Secunit
+ }
+ levelUnit = strings.TrimSpace(levelUnit)
+ flowUnit = strings.TrimSpace(flowUnit)
+ return
+}
diff --git a/scripts/usnws/script.go b/scripts/usnws/script.go
index 3b0a70d..e104462 100644
--- a/scripts/usnws/script.go
+++ b/scripts/usnws/script.go
@@ -6,11 +6,16 @@ import (
"github.com/whitewater-guide/gorge/core"
)
-type optionsUsnws struct{}
+type optionsUsnws struct {
+ pageSize int
+ numWorkers int
+}
type scriptUsnws struct {
core.LoggingScript
- name string
- kmzUrl string
+ name string
+ url string
+ pageSize int
+ numWorkers int
}
func (s *scriptUsnws) ListGauges() (core.Gauges, error) {
@@ -19,7 +24,7 @@ func (s *scriptUsnws) ListGauges() (core.Gauges, error) {
go func() {
defer close(gaugesCh)
defer close(errCh)
- s.parseKmz(gaugesCh, nil, errCh)
+ s.parseJson(gaugesCh, nil, errCh)
}()
return core.GaugeSinkToSlice(gaugesCh, errCh)
}
@@ -27,5 +32,5 @@ func (s *scriptUsnws) ListGauges() (core.Gauges, error) {
func (s *scriptUsnws) Harvest(ctx context.Context, recv chan<- *core.Measurement, errs chan<- error, codes core.StringSet, since int64) {
defer close(recv)
defer close(errs)
- s.parseKmz(nil, recv, errs)
+ s.parseJson(nil, recv, errs)
}
diff --git a/scripts/usnws/script_test.go b/scripts/usnws/script_test.go
index 212d0ca..a21acba 100644
--- a/scripts/usnws/script_test.go
+++ b/scripts/usnws/script_test.go
@@ -13,7 +13,7 @@ import (
func setupTestServer() *httptest.Server {
return testutils.SetupFileServer(map[string]string{
- "": "ahps_national_obs.kmz",
+ "": "data_{{ if ne .returnCountOnly nil }}count{{ end }}{{ .resultOffset }}.json",
}, nil)
}
@@ -21,42 +21,64 @@ func TestUsnws_ListGauges(t *testing.T) {
ts := setupTestServer()
defer ts.Close()
s := scriptUsnws{
- name: "usnws",
- kmzUrl: ts.URL,
+ name: "usnws",
+ url: ts.URL,
+ pageSize: 1,
+ numWorkers: 2,
}
actual, err := s.ListGauges()
expected := core.Gauges{
core.Gauge{
GaugeID: core.GaugeID{
Script: "usnws",
- Code: "aplw3",
+ Code: "AAIT2",
},
LevelUnit: "ft",
+ FlowUnit: "kcfs",
Location: &core.Location{
- Latitude: 44.248056,
- Longitude: -88.423056,
+ Latitude: 30.22111,
+ Longitude: -97.79333,
},
- Name: "Fox River (North) at Appleton",
- URL: "https://water.weather.gov/ahps2/hydrograph.php?wfo=GRB&gage=aplw3",
+ Name: "Williamson Creek / Manchaca Road at Austin / TX",
+ URL: "https://water.weather.gov/ahps2/hydrograph.php?wfo=ewx&gage=aait2",
Timezone: "America/Chicago",
},
core.Gauge{
GaugeID: core.GaugeID{
Script: "usnws",
- Code: "aubw1",
+ Code: "LCTU1",
},
- FlowUnit: "kcfs",
+ FlowUnit: "kcfs",
+ LevelUnit: "ft",
+ Location: &core.Location{
+ Latitude: 40.57777,
+ Longitude: -111.79722,
+ },
+ Name: "Little Cottonwood Creek / Salt Lake City / UT",
+ URL: "https://water.weather.gov/ahps2/hydrograph.php?wfo=slc&gage=lctu1",
+ Timezone: "America/Denver",
+ },
+ core.Gauge{
+ GaugeID: core.GaugeID{
+ Script: "usnws",
+ Code: "ANDI1",
+ },
+ FlowUnit: "kcfs",
+ LevelUnit: "ft",
Location: &core.Location{
- Latitude: 47.312500,
- Longitude: -122.202778,
+ Latitude: 43.34361,
+ Longitude: -115.4775,
},
- Name: "Green River (WA) near Auburn",
- URL: "https://water.weather.gov/ahps2/hydrograph.php?wfo=SEW&gage=aubw1",
- Timezone: "America/Los_Angeles",
+ Name: "South Fork Boise River / Anderson Ranch Dam / ID",
+ URL: "https://water.weather.gov/ahps2/hydrograph.php?wfo=boi&gage=andi1",
+ Timezone: "America/Boise",
},
}
if assert.NoError(t, err) {
- assert.Equal(t, expected, actual)
+ assert.Len(t, actual, 3)
+ assert.Contains(t, actual, expected[0])
+ assert.Contains(t, actual, expected[1])
+ assert.Contains(t, actual, expected[2])
}
}
@@ -64,29 +86,44 @@ func TestUsnws_Harvest(t *testing.T) {
ts := setupTestServer()
defer ts.Close()
s := scriptUsnws{
- name: "usnws",
- kmzUrl: ts.URL,
+ name: "usnws",
+ url: ts.URL,
+ pageSize: 1,
+ numWorkers: 2,
}
actual, err := core.HarvestSlice(&s, core.StringSet{}, 0)
expected := core.Measurements{
&core.Measurement{
GaugeID: core.GaugeID{
Script: "usnws",
- Code: "aplw3",
+ Code: "AAIT2",
+ },
+ Level: nulltype.NullFloat64Of(1.99),
+ Timestamp: core.HTime{Time: time.Date(2023, time.October, 1, 18, 30, 0, 0, time.UTC)},
+ },
+ &core.Measurement{
+ GaugeID: core.GaugeID{
+ Script: "usnws",
+ Code: "LCTU1",
},
- Level: nulltype.NullFloat64Of(5.53),
- Timestamp: core.HTime{Time: time.Date(2023, time.September, 3, 14, 0, 0, 0, time.UTC)},
+ Flow: nulltype.NullFloat64Of(0.03),
+ Level: nulltype.NullFloat64Of(0.5),
+ Timestamp: core.HTime{Time: time.Date(2023, time.October, 1, 12, 0, 0, 0, time.UTC)},
},
&core.Measurement{
GaugeID: core.GaugeID{
Script: "usnws",
- Code: "aubw1",
+ Code: "ANDI1",
},
- Flow: nulltype.NullFloat64Of(0.302),
- Timestamp: core.HTime{Time: time.Date(2023, time.September, 3, 13, 45, 0, 0, time.UTC)},
+ Flow: nulltype.NullFloat64Of(0.3),
+ Level: nulltype.NullFloat64Of(3.0),
+ Timestamp: core.HTime{Time: time.Date(2023, time.October, 1, 20, 15, 0, 0, time.UTC)},
},
}
if assert.NoError(t, err) {
- assert.Equal(t, expected, actual)
+ assert.Len(t, actual, 3)
+ assert.Contains(t, actual, expected[0])
+ assert.Contains(t, actual, expected[1])
+ assert.Contains(t, actual, expected[2])
}
}
diff --git a/scripts/usnws/test_data/ahps_national_obs.kmz b/scripts/usnws/test_data/ahps_national_obs.kmz
deleted file mode 100644
index c60cbd9..0000000
Binary files a/scripts/usnws/test_data/ahps_national_obs.kmz and /dev/null differ
diff --git a/scripts/usnws/test_data/data_0.json b/scripts/usnws/test_data/data_0.json
new file mode 100644
index 0000000..ae99af7
--- /dev/null
+++ b/scripts/usnws/test_data/data_0.json
@@ -0,0 +1,115 @@
+{
+ "displayFieldName": "gaugelid",
+ "fieldAliases": {
+ "objectid": "objectid",
+ "gaugelid": "gaugelid",
+ "location": "location",
+ "latitude": "latitude",
+ "longitude": "longitude",
+ "waterbody": "waterbody",
+ "state": "state",
+ "obstime": "obstime",
+ "units": "units",
+ "secunit": "secunit",
+ "url": "url",
+ "observed": "observed",
+ "secvalue": "secvalue"
+ },
+ "fields": [
+ {
+ "name": "objectid",
+ "type": "esriFieldTypeOID",
+ "alias": "objectid"
+ },
+ {
+ "name": "gaugelid",
+ "type": "esriFieldTypeString",
+ "alias": "gaugelid",
+ "length": 5
+ },
+ {
+ "name": "location",
+ "type": "esriFieldTypeString",
+ "alias": "location",
+ "length": 90
+ },
+ {
+ "name": "latitude",
+ "type": "esriFieldTypeDouble",
+ "alias": "latitude"
+ },
+ {
+ "name": "longitude",
+ "type": "esriFieldTypeDouble",
+ "alias": "longitude"
+ },
+ {
+ "name": "waterbody",
+ "type": "esriFieldTypeString",
+ "alias": "waterbody",
+ "length": 255
+ },
+ {
+ "name": "state",
+ "type": "esriFieldTypeString",
+ "alias": "state",
+ "length": 2
+ },
+ {
+ "name": "obstime",
+ "type": "esriFieldTypeString",
+ "alias": "obstime",
+ "length": 26
+ },
+ {
+ "name": "units",
+ "type": "esriFieldTypeString",
+ "alias": "units",
+ "length": 5
+ },
+ {
+ "name": "secunit",
+ "type": "esriFieldTypeString",
+ "alias": "secunit",
+ "length": 5
+ },
+ {
+ "name": "url",
+ "type": "esriFieldTypeString",
+ "alias": "url",
+ "length": 231
+ },
+ {
+ "name": "observed",
+ "type": "esriFieldTypeString",
+ "alias": "observed",
+ "length": 24
+ },
+ {
+ "name": "secvalue",
+ "type": "esriFieldTypeString",
+ "alias": "secvalue",
+ "length": 24
+ }
+ ],
+ "features": [
+ {
+ "attributes": {
+ "objectid": 1,
+ "gaugelid": "AAIT2",
+ "location": "Manchaca Road at Austin",
+ "latitude": 30.221111,
+ "longitude": -97.793333,
+ "waterbody": "Williamson Creek",
+ "state": "TX",
+ "obstime": "2023-10-01 18:30:00",
+ "units": "ft",
+ "secunit": "kcfs",
+ "url": "https://water.weather.gov/ahps2/hydrograph.php?wfo=ewx&gage=aait2",
+ "observed": "1.99",
+ "secvalue": "-999.00"
+ }
+ }
+ ],
+ "exceededTransferLimit": true
+}
\ No newline at end of file
diff --git a/scripts/usnws/test_data/data_1.json b/scripts/usnws/test_data/data_1.json
new file mode 100644
index 0000000..12ed8a2
--- /dev/null
+++ b/scripts/usnws/test_data/data_1.json
@@ -0,0 +1,115 @@
+{
+ "displayFieldName": "gaugelid",
+ "fieldAliases": {
+ "objectid": "objectid",
+ "gaugelid": "gaugelid",
+ "location": "location",
+ "latitude": "latitude",
+ "longitude": "longitude",
+ "waterbody": "waterbody",
+ "state": "state",
+ "obstime": "obstime",
+ "units": "units",
+ "secunit": "secunit",
+ "url": "url",
+ "observed": "observed",
+ "secvalue": "secvalue"
+ },
+ "fields": [
+ {
+ "name": "objectid",
+ "type": "esriFieldTypeOID",
+ "alias": "objectid"
+ },
+ {
+ "name": "gaugelid",
+ "type": "esriFieldTypeString",
+ "alias": "gaugelid",
+ "length": 5
+ },
+ {
+ "name": "location",
+ "type": "esriFieldTypeString",
+ "alias": "location",
+ "length": 90
+ },
+ {
+ "name": "latitude",
+ "type": "esriFieldTypeDouble",
+ "alias": "latitude"
+ },
+ {
+ "name": "longitude",
+ "type": "esriFieldTypeDouble",
+ "alias": "longitude"
+ },
+ {
+ "name": "waterbody",
+ "type": "esriFieldTypeString",
+ "alias": "waterbody",
+ "length": 255
+ },
+ {
+ "name": "state",
+ "type": "esriFieldTypeString",
+ "alias": "state",
+ "length": 2
+ },
+ {
+ "name": "obstime",
+ "type": "esriFieldTypeString",
+ "alias": "obstime",
+ "length": 26
+ },
+ {
+ "name": "units",
+ "type": "esriFieldTypeString",
+ "alias": "units",
+ "length": 5
+ },
+ {
+ "name": "secunit",
+ "type": "esriFieldTypeString",
+ "alias": "secunit",
+ "length": 5
+ },
+ {
+ "name": "url",
+ "type": "esriFieldTypeString",
+ "alias": "url",
+ "length": 231
+ },
+ {
+ "name": "observed",
+ "type": "esriFieldTypeString",
+ "alias": "observed",
+ "length": 24
+ },
+ {
+ "name": "secvalue",
+ "type": "esriFieldTypeString",
+ "alias": "secvalue",
+ "length": 24
+ }
+ ],
+ "features": [
+ {
+ "attributes": {
+ "objectid": 4990,
+ "gaugelid": "LCTU1",
+ "location": "Salt Lake City",
+ "latitude": 40.577778,
+ "longitude": -111.797222,
+ "waterbody": "Little Cottonwood Creek",
+ "state": "UT",
+ "obstime": "2023-10-01 12:00:00",
+ "units": "cfs*",
+ "secunit": "ft",
+ "url": "https://water.weather.gov/ahps2/hydrograph.php?wfo=slc&gage=lctu1",
+ "observed": "0.03",
+ "secvalue": "0.50"
+ }
+ }
+ ],
+ "exceededTransferLimit": true
+}
\ No newline at end of file
diff --git a/scripts/usnws/test_data/data_2.json b/scripts/usnws/test_data/data_2.json
new file mode 100644
index 0000000..8be5adc
--- /dev/null
+++ b/scripts/usnws/test_data/data_2.json
@@ -0,0 +1,115 @@
+{
+ "displayFieldName": "gaugelid",
+ "fieldAliases": {
+ "objectid": "objectid",
+ "gaugelid": "gaugelid",
+ "location": "location",
+ "latitude": "latitude",
+ "longitude": "longitude",
+ "waterbody": "waterbody",
+ "state": "state",
+ "obstime": "obstime",
+ "units": "units",
+ "secunit": "secunit",
+ "url": "url",
+ "observed": "observed",
+ "secvalue": "secvalue"
+ },
+ "fields": [
+ {
+ "name": "objectid",
+ "type": "esriFieldTypeOID",
+ "alias": "objectid"
+ },
+ {
+ "name": "gaugelid",
+ "type": "esriFieldTypeString",
+ "alias": "gaugelid",
+ "length": 5
+ },
+ {
+ "name": "location",
+ "type": "esriFieldTypeString",
+ "alias": "location",
+ "length": 90
+ },
+ {
+ "name": "latitude",
+ "type": "esriFieldTypeDouble",
+ "alias": "latitude"
+ },
+ {
+ "name": "longitude",
+ "type": "esriFieldTypeDouble",
+ "alias": "longitude"
+ },
+ {
+ "name": "waterbody",
+ "type": "esriFieldTypeString",
+ "alias": "waterbody",
+ "length": 255
+ },
+ {
+ "name": "state",
+ "type": "esriFieldTypeString",
+ "alias": "state",
+ "length": 2
+ },
+ {
+ "name": "obstime",
+ "type": "esriFieldTypeString",
+ "alias": "obstime",
+ "length": 26
+ },
+ {
+ "name": "units",
+ "type": "esriFieldTypeString",
+ "alias": "units",
+ "length": 5
+ },
+ {
+ "name": "secunit",
+ "type": "esriFieldTypeString",
+ "alias": "secunit",
+ "length": 5
+ },
+ {
+ "name": "url",
+ "type": "esriFieldTypeString",
+ "alias": "url",
+ "length": 231
+ },
+ {
+ "name": "observed",
+ "type": "esriFieldTypeString",
+ "alias": "observed",
+ "length": 24
+ },
+ {
+ "name": "secvalue",
+ "type": "esriFieldTypeString",
+ "alias": "secvalue",
+ "length": 24
+ }
+ ],
+ "features": [
+ {
+ "attributes": {
+ "objectid": 232,
+ "gaugelid": "ANDI1",
+ "location": "Anderson Ranch Dam",
+ "latitude": 43.343611,
+ "longitude": -115.4775,
+ "waterbody": "South Fork Boise River",
+ "state": "ID",
+ "obstime": "2023-10-01 20:15:00",
+ "units": "ft*",
+ "secunit": "ft",
+ "url": "https://water.weather.gov/ahps2/hydrograph.php?wfo=boi&gage=andi1",
+ "observed": "0.30",
+ "secvalue": "3.00"
+ }
+ }
+ ],
+ "exceededTransferLimit": true
+}
\ No newline at end of file
diff --git a/scripts/usnws/test_data/data_count.json b/scripts/usnws/test_data/data_count.json
new file mode 100644
index 0000000..ab774a5
--- /dev/null
+++ b/scripts/usnws/test_data/data_count.json
@@ -0,0 +1,3 @@
+{
+ "count": 3
+}
\ No newline at end of file