Skip to content

Commit

Permalink
Merge branch 'release/3.25.1'
Browse files Browse the repository at this point in the history
  • Loading branch information
msqr committed Aug 12, 2024
2 parents e3df0af + b293573 commit e759f40
Show file tree
Hide file tree
Showing 7 changed files with 89 additions and 32 deletions.
3 changes: 3 additions & 0 deletions solarnet-db-setup/postgres/migrations/migrate-20240812.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
-- Run this script from the parent directory, e.g. psql -f migrations/migrate-20240812.sql

\i updates/NET-387-xofy-agg-accum-fix.sql
6 changes: 4 additions & 2 deletions solarnet-db-setup/postgres/postgres-init-datm-agg-util.sql
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,9 @@ CREATE AGGREGATE solardatm.rollup_agg_data(solardatm.agg_data) (
* Aggregate datum rollup state transition function, to average aggregate datum into a higher-level
* aggregate datum.
*
* Note that the `data_s` and `read_a` columns are not supported.
* Note that the `data_s` column are not supported. The `data_r` column is handled differently
* in that each 3-element sub-array contains the average, min, and max accumulated value (instead
* of the difference, start, and end accumulated value).
*
* @see solardatm.avg_agg_data_ffunc()
*/
Expand Down Expand Up @@ -348,7 +350,7 @@ BEGIN
SELECT
vec_trim_scale(array_agg(val ORDER BY idx)) AS data_a
, array_agg(
ARRAY[NULL, val_min, val_max] ORDER BY idx
ARRAY[val, val_min, val_max] ORDER BY idx
) AS read_a
FROM da
)
Expand Down
52 changes: 52 additions & 0 deletions solarnet-db-setup/postgres/updates/NET-387-xofy-agg-accum-fix.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
CREATE OR REPLACE FUNCTION solardatm.avg_agg_data_ffunc(agg_state solardatm.agg_data)
RETURNS solardatm.agg_data LANGUAGE plpgsql STRICT IMMUTABLE AS
$$
BEGIN
WITH di AS (
SELECT
p.idx
, p.val / s.stat[1] AS val
, s.stat[1] AS cnt
, s.stat[2] AS val_min
, s.stat[3] AS val_max
FROM unnest(agg_state.data_i) WITH ORDINALITY AS p(val, idx)
INNER JOIN solarcommon.reduce_dim(agg_state.stat_i) WITH ORDINALITY AS s(stat, idx) ON s.idx = p.idx
)
, di_ary AS (
SELECT
vec_trim_scale(array_agg(val ORDER BY idx)) AS data_i
, array_agg(
vec_trim_scale(ARRAY[cnt, val_min, val_max]) ORDER BY idx
) AS stat_i
FROM di
)
, da AS (
SELECT
p.idx
, p.val / s.stat[1] AS val
, s.stat[2] AS val_min
, s.stat[3] AS val_max
FROM unnest(agg_state.data_a) WITH ORDINALITY AS p(val, idx)
INNER JOIN solarcommon.reduce_dim(agg_state.read_a) WITH ORDINALITY AS s(stat, idx) ON s.idx = p.idx
)
, da_ary AS (
SELECT
vec_trim_scale(array_agg(val ORDER BY idx)) AS data_a
, array_agg(
vec_trim_scale(ARRAY[val, val_min, val_max]) ORDER BY idx
) AS read_a
FROM da
)
SELECT
di_ary.data_i
, da_ary.data_a
, agg_state.data_s
, agg_state.data_t
, di_ary.stat_i
, da_ary.read_a
FROM di_ary, da_ary
INTO agg_state;

return agg_state;
END;
$$;
Original file line number Diff line number Diff line change
@@ -1,21 +1,21 @@
/* ==================================================================
* DbFindAggDowTests.java - 10/12/2020 9:12:05 pm
*
*
* Copyright 2020 SolarNetwork.net Dev Team
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation; either version 2 of
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation; either version 2 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
* 02111-1307 USA
* ==================================================================
*/
Expand All @@ -31,8 +31,8 @@
import static net.solarnetwork.domain.datum.DatumPropertiesStatistics.statisticsOf;
import static net.solarnetwork.domain.datum.ObjectDatumStreamMetadataProvider.staticProvider;
import static net.solarnetwork.util.NumberUtils.decimalArray;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.hasSize;
import java.io.IOException;
import java.math.BigDecimal;
import java.sql.CallableStatement;
Expand All @@ -56,14 +56,14 @@
import net.solarnetwork.central.datum.v2.dao.jdbc.AggregateDatumEntityRowMapper;
import net.solarnetwork.central.datum.v2.domain.AggregateDatum;
import net.solarnetwork.central.datum.v2.domain.BasicObjectDatumStreamMetadata;
import net.solarnetwork.domain.datum.Aggregation;
import net.solarnetwork.domain.datum.ObjectDatumKind;
import net.solarnetwork.domain.datum.ObjectDatumStreamMetadata;
import net.solarnetwork.domain.datum.Aggregation;

/**
* Test cases for the {@literal solardatm.find_agg_dow} database stored
* procedure.
*
*
* @author matt
* @version 1.0
*/
Expand Down Expand Up @@ -130,8 +130,8 @@ public void find_dow_typical() throws IOException {
propertiesOf(decimalArray("1.6", "6.1"), decimalArray("500"), null, null),
statisticsOf(
new BigDecimal[][] { decimalArray("18", "1.1", "3.1"),
decimalArray("18", "2.0", "7.1") },
new BigDecimal[][] { decimalArray(null, "100", "800") })));
decimalArray("18", "2", "7.1") },
new BigDecimal[][] { decimalArray("500", "100", "800") })));
assertAggregateDatum("Tuesday result", results.get(1),
new AggregateDatumEntity(streamId, date.plusDays(1).toInstant(), Aggregation.DayOfWeek,
propertiesOf(decimalArray("1.4", "4.1"), decimalArray("600"), null, null),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -158,8 +158,8 @@ public void find_doy_utc() {
DatumPropertiesStatistics stats = statisticsOf(
new BigDecimal[][] { new BigDecimal[] { new BigDecimal(12), new BigDecimal(i - 1),
new BigDecimal(i + 365 + 1) } },
new BigDecimal[][] { new BigDecimal[] { null, new BigDecimal(i + 1),
new BigDecimal(i + 365 + 1) } });
new BigDecimal[][] { new BigDecimal[] { new BigDecimal(pi + 1),
new BigDecimal(i + 1), new BigDecimal(i + 365 + 1) } });

// skip 29 Feb as data not over leap year
ZonedDateTime expectedDate = date.plusDays(i);
Expand Down Expand Up @@ -218,8 +218,8 @@ public void find_doy_tz() {
DatumPropertiesStatistics stats = statisticsOf(
new BigDecimal[][] { new BigDecimal[] { new BigDecimal(12), new BigDecimal(i - 1),
new BigDecimal(i + 365 + 1) } },
new BigDecimal[][] { new BigDecimal[] { null, new BigDecimal(i + 1),
new BigDecimal(i + 365 + 1) } });
new BigDecimal[][] { new BigDecimal[] { new BigDecimal(pi + 1),
new BigDecimal(i + 1), new BigDecimal(i + 365 + 1) } });

// skip 29 Feb as data not over leap year
ZonedDateTime expectedDate = date.plusDays(i);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -153,8 +153,8 @@ public void find_hod_typical() {
DatumPropertiesStatistics stats = statisticsOf(
new BigDecimal[][] { new BigDecimal[] { new BigDecimal(18), new BigDecimal(i - 1),
new BigDecimal(i + 3) } },
new BigDecimal[][] { new BigDecimal[] { null, new BigDecimal(i + 1),
new BigDecimal(3 * (i + 1)) } });
new BigDecimal[][] { new BigDecimal[] { new BigDecimal((i + 1) * 2),
new BigDecimal(i + 1), new BigDecimal(3 * (i + 1)) } });
assertAggregateDatum("Hour " + i, d, new AggregateDatumEntity(meta.getStreamId(),
date.plusHours(i).toInstant(), Aggregation.HourOfDay, props, stats));
}
Expand Down Expand Up @@ -204,8 +204,8 @@ public void find_hod_tz() {
DatumPropertiesStatistics stats = statisticsOf(
new BigDecimal[][] { new BigDecimal[] { new BigDecimal(18), new BigDecimal(i - 1),
new BigDecimal(i + 3) } },
new BigDecimal[][] { new BigDecimal[] { null, new BigDecimal(i + 1),
new BigDecimal(3 * (i + 1)) } });
new BigDecimal[][] { new BigDecimal[] { new BigDecimal((i + 1) * 2),
new BigDecimal(i + 1), new BigDecimal(3 * (i + 1)) } });
assertAggregateDatum("Hour " + i, d, new AggregateDatumEntity(meta.getStreamId(),
date.plusHours(i).toInstant(), Aggregation.HourOfDay, props, stats));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,8 +159,8 @@ public void find_hoy_utc() {
DatumPropertiesStatistics stats = statisticsOf(
new BigDecimal[][] { new BigDecimal[] { new BigDecimal(12), new BigDecimal(i - 1),
new BigDecimal(i + HOURS_PER_NON_LEAP_YEAR + 1) } },
new BigDecimal[][] { new BigDecimal[] { null, new BigDecimal(i + 1),
new BigDecimal(i + HOURS_PER_NON_LEAP_YEAR + 1) } });
new BigDecimal[][] { new BigDecimal[] { new BigDecimal(pi + 1),
new BigDecimal(i + 1), new BigDecimal(i + HOURS_PER_NON_LEAP_YEAR + 1) } });

// skip 29 Feb as data not over leap year
ZonedDateTime expectedDate = date.plusHours(i);
Expand Down Expand Up @@ -219,8 +219,8 @@ public void find_hoy_tz() {
DatumPropertiesStatistics stats = statisticsOf(
new BigDecimal[][] { new BigDecimal[] { new BigDecimal(12), new BigDecimal(i - 1),
new BigDecimal(i + HOURS_PER_NON_LEAP_YEAR + 1) } },
new BigDecimal[][] { new BigDecimal[] { null, new BigDecimal(i + 1),
new BigDecimal(i + HOURS_PER_NON_LEAP_YEAR + 1) } });
new BigDecimal[][] { new BigDecimal[] { new BigDecimal(pi + 1),
new BigDecimal(i + 1), new BigDecimal(i + HOURS_PER_NON_LEAP_YEAR + 1) } });

// skip 29 Feb as data not over leap year
ZonedDateTime expectedDate = date.plusHours(i);
Expand Down

0 comments on commit e759f40

Please sign in to comment.