me
/
guix
Archived
1
0
Fork 0

gnu: TimescaleDB: Update to 2.7.0.

* gnu/packages/databases.scm (timescaledb): Update to 2.7.0.
[source](snippet): Remove more files.
[source](patches): New field.
* gnu/packages/patches/timescaledb-flaky-test.patch: New file.
* gnu/local.mk (dist_patch_DATA): Adjust accordingly.
Marius Bakke 2022-06-14 17:58:08 +02:00
parent b98a61a8f8
commit d73b88d826
No known key found for this signature in database
GPG Key ID: A2A06DF2A33A54FA
3 changed files with 115 additions and 3 deletions

View File

@ -1844,6 +1844,7 @@ dist_patch_DATA = \
%D%/packages/patches/texi2html-i18n.patch \
%D%/packages/patches/thefuck-test-environ.patch \
%D%/packages/patches/tidy-CVE-2015-5522+5523.patch \
%D%/packages/patches/timescaledb-flaky-test.patch \
%D%/packages/patches/tinyxml-use-stl.patch \
%D%/packages/patches/tipp10-disable-downloader.patch \
%D%/packages/patches/tipp10-fix-compiling.patch \

View File

@ -1320,7 +1320,7 @@ pictures, sounds, or video.")
(define-public timescaledb
(package
(name "timescaledb")
(version "2.5.1")
(version "2.7.0")
(source (origin
(method git-fetch)
(uri (git-reference
@ -1329,14 +1329,18 @@ pictures, sounds, or video.")
(file-name (git-file-name name version))
(sha256
(base32
"174dm3higa0i7al9r2hdv5hk36pd0d5fnqj57w5a350kxshxyvyw"))
"18wszj8ia5rs4y4zkyfb0f5z4y1g7ac3jym748nbkbszhxmq7nc7"))
(patches (search-patches "timescaledb-flaky-test.patch"))
(modules '((guix build utils)))
(snippet
;; Remove files carrying the proprietary TIMESCALE license.
'(begin
(delete-file-recursively "tsl")
(for-each delete-file
'("test/perl/AccessNode.pm"
'("scripts/c_license_header-timescale.h"
"scripts/license_tsl.spec"
"scripts/sql_license_tsl.sql"
"test/perl/AccessNode.pm"
"test/perl/DataNode.pm"
"test/perl/TimescaleNode.pm"))))))
(build-system cmake-build-system)

View File

@ -0,0 +1,107 @@
Use fixed dates in test for consistent results.
Taken from upstream:
https://github.com/timescale/timescaledb/commit/1d0670e703862b284c241ab797404f851b25b5df
diff --git a/test/expected/copy-12.out b/test/expected/copy-12.out
index 5cb28a45a2..37abf6f6ff 100644
--- a/test/expected/copy-12.out
+++ b/test/expected/copy-12.out
@@ -324,13 +324,12 @@ INSERT INTO hyper_copy_large
SELECT time,
random() AS value
FROM
-generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day',
- INTERVAL '1 hour') AS g1(time)
+generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time)
ORDER BY time;
SELECT COUNT(*) FROM hyper_copy_large;
count
-------
- 697
+ 721
(1 row)
-- Migrate data to chunks by using copy
@@ -345,7 +344,7 @@ NOTICE: migrating data to chunks
SELECT COUNT(*) FROM hyper_copy_large;
count
-------
- 697
+ 721
(1 row)
----------------------------------------------------------------
diff --git a/test/expected/copy-13.out b/test/expected/copy-13.out
index 02bf913eff..89e16fe8e2 100644
--- a/test/expected/copy-13.out
+++ b/test/expected/copy-13.out
@@ -324,13 +324,12 @@ INSERT INTO hyper_copy_large
SELECT time,
random() AS value
FROM
-generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day',
- INTERVAL '1 hour') AS g1(time)
+generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time)
ORDER BY time;
SELECT COUNT(*) FROM hyper_copy_large;
count
-------
- 697
+ 721
(1 row)
-- Migrate data to chunks by using copy
@@ -345,7 +344,7 @@ NOTICE: migrating data to chunks
SELECT COUNT(*) FROM hyper_copy_large;
count
-------
- 697
+ 721
(1 row)
----------------------------------------------------------------
diff --git a/test/expected/copy-14.out b/test/expected/copy-14.out
index 02bf913eff..89e16fe8e2 100644
--- a/test/expected/copy-14.out
+++ b/test/expected/copy-14.out
@@ -324,13 +324,12 @@ INSERT INTO hyper_copy_large
SELECT time,
random() AS value
FROM
-generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day',
- INTERVAL '1 hour') AS g1(time)
+generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time)
ORDER BY time;
SELECT COUNT(*) FROM hyper_copy_large;
count
-------
- 697
+ 721
(1 row)
-- Migrate data to chunks by using copy
@@ -345,7 +344,7 @@ NOTICE: migrating data to chunks
SELECT COUNT(*) FROM hyper_copy_large;
count
-------
- 697
+ 721
(1 row)
----------------------------------------------------------------
diff --git a/test/sql/copy.sql.in b/test/sql/copy.sql.in
index 91402c2ab8..bba4265064 100644
--- a/test/sql/copy.sql.in
+++ b/test/sql/copy.sql.in
@@ -276,8 +276,7 @@ INSERT INTO hyper_copy_large
SELECT time,
random() AS value
FROM
-generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day',
- INTERVAL '1 hour') AS g1(time)
+generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time)
ORDER BY time;
SELECT COUNT(*) FROM hyper_copy_large;