summary refs log tree commit diff
path: root/gnu/packages/patches
diff options
context:
space:
mode:
authorjgart <jgart@dismail.de>2022-11-10 20:06:24 -0600
committerLudovic Courtès <ludo@gnu.org>2022-11-18 15:44:39 +0100
commitff00cdcd92d92d5b8ab9a97e66e0afada4405be7 (patch)
treee3029345e3798313693f5a7e068030d6fea2a107 /gnu/packages/patches
parentcdfc1f41c605a98129927feb0165d00ca944df6c (diff)
downloadguix-ff00cdcd92d92d5b8ab9a97e66e0afada4405be7.tar.gz
gnu: timescaledb: Update to 2.8.1.
* gnu/packages/patches/timescaledb-flaky-test.patch: Remove.
* gnu/local.mk (dist_patch_DATA): Remove it.
* gnu/packages/databases.scm (timescaledb): Update to 2.8.1.
[source]: Remove patch.

Signed-off-by: Ludovic Courtès <ludo@gnu.org>
Diffstat (limited to 'gnu/packages/patches')
-rw-r--r--gnu/packages/patches/timescaledb-flaky-test.patch107
1 files changed, 0 insertions, 107 deletions
diff --git a/gnu/packages/patches/timescaledb-flaky-test.patch b/gnu/packages/patches/timescaledb-flaky-test.patch
deleted file mode 100644
index 6268bcecad..0000000000
--- a/gnu/packages/patches/timescaledb-flaky-test.patch
+++ /dev/null
@@ -1,107 +0,0 @@
-Use fixed dates in test for consistent results.
-
-Taken from upstream:
-
-  https://github.com/timescale/timescaledb/commit/1d0670e703862b284c241ab797404f851b25b5df
-
-diff --git a/test/expected/copy-12.out b/test/expected/copy-12.out
-index 5cb28a45a2..37abf6f6ff 100644
---- a/test/expected/copy-12.out
-+++ b/test/expected/copy-12.out
-@@ -324,13 +324,12 @@ INSERT INTO hyper_copy_large
- SELECT time,
- random() AS value
- FROM
--generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day',
--  INTERVAL '1 hour') AS g1(time)
-+generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time)
- ORDER BY time;
- SELECT COUNT(*) FROM hyper_copy_large;
-  count 
- -------
--   697
-+   721
- (1 row)
- 
- -- Migrate data to chunks by using copy
-@@ -345,7 +344,7 @@ NOTICE:  migrating data to chunks
- SELECT COUNT(*) FROM hyper_copy_large;
-  count 
- -------
--   697
-+   721
- (1 row)
- 
- ----------------------------------------------------------------
-diff --git a/test/expected/copy-13.out b/test/expected/copy-13.out
-index 02bf913eff..89e16fe8e2 100644
---- a/test/expected/copy-13.out
-+++ b/test/expected/copy-13.out
-@@ -324,13 +324,12 @@ INSERT INTO hyper_copy_large
- SELECT time,
- random() AS value
- FROM
--generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day',
--  INTERVAL '1 hour') AS g1(time)
-+generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time)
- ORDER BY time;
- SELECT COUNT(*) FROM hyper_copy_large;
-  count 
- -------
--   697
-+   721
- (1 row)
- 
- -- Migrate data to chunks by using copy
-@@ -345,7 +344,7 @@ NOTICE:  migrating data to chunks
- SELECT COUNT(*) FROM hyper_copy_large;
-  count 
- -------
--   697
-+   721
- (1 row)
- 
- ----------------------------------------------------------------
-diff --git a/test/expected/copy-14.out b/test/expected/copy-14.out
-index 02bf913eff..89e16fe8e2 100644
---- a/test/expected/copy-14.out
-+++ b/test/expected/copy-14.out
-@@ -324,13 +324,12 @@ INSERT INTO hyper_copy_large
- SELECT time,
- random() AS value
- FROM
--generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day',
--  INTERVAL '1 hour') AS g1(time)
-+generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time)
- ORDER BY time;
- SELECT COUNT(*) FROM hyper_copy_large;
-  count 
- -------
--   697
-+   721
- (1 row)
- 
- -- Migrate data to chunks by using copy
-@@ -345,7 +344,7 @@ NOTICE:  migrating data to chunks
- SELECT COUNT(*) FROM hyper_copy_large;
-  count 
- -------
--   697
-+   721
- (1 row)
- 
- ----------------------------------------------------------------
-diff --git a/test/sql/copy.sql.in b/test/sql/copy.sql.in
-index 91402c2ab8..bba4265064 100644
---- a/test/sql/copy.sql.in
-+++ b/test/sql/copy.sql.in
-@@ -276,8 +276,7 @@ INSERT INTO hyper_copy_large
- SELECT time,
- random() AS value
- FROM
--generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day',
--  INTERVAL '1 hour') AS g1(time)
-+generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time)
- ORDER BY time;
- 
- SELECT COUNT(*) FROM hyper_copy_large;