Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit b229dbf

Browse filesBrowse files
Merge pull request theseus-rs#93 from theseus-rs/add-zonky-binary-support
feat: add support for installing binaries from the zonky project
2 parents 2c33fc4 + e16c522 commit b229dbf
Copy full SHA for b229dbf

File tree

Expand file treeCollapse file tree

35 files changed

+1183
-77
lines changed
Filter options
Expand file treeCollapse file tree

35 files changed

+1183
-77
lines changed

‎Cargo.lock

Copy file name to clipboardExpand all lines: Cargo.lock
+298Lines changed: 298 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

‎Cargo.toml

Copy file name to clipboardExpand all lines: Cargo.toml
+3Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ http = "1.1.0"
3333
human_bytes = { version = "0.4.3", default-features = false }
3434
lazy_static = "1.5.0"
3535
num-format = "0.4.4"
36+
quick-xml = "0.35.0"
3637
rand = "0.8.5"
3738
regex = "1.10.5"
3839
reqwest = { version = "0.12.5", default-features = false }
@@ -52,6 +53,8 @@ thiserror = "1.0.61"
5253
tokio = "1.38.0"
5354
tracing = "0.1.40"
5455
url = "2.5.2"
56+
xz2 = "0.1.7"
57+
zip = "2.1.3"
5558

5659
[workspace.metadata.release]
5760
shared-version = true

‎deny.toml

Copy file name to clipboardExpand all lines: deny.toml
+1-1Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,11 +18,11 @@ allow = [
1818
"Apache-2.0",
1919
"BSD-2-Clause",
2020
"BSD-3-Clause",
21+
"BSL-1.0",
2122
"ISC",
2223
"MIT",
2324
"OpenSSL",
2425
"PostgreSQL",
25-
"Unicode-3.0",
2626
"Unicode-DFS-2016",
2727
]
2828

‎examples/archive_async/src/main.rs

Copy file name to clipboardExpand all lines: examples/archive_async/src/main.rs
+3-4Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,12 @@
11
#![forbid(unsafe_code)]
22
#![deny(clippy::pedantic)]
33

4-
use postgresql_archive::{
5-
extract, get_archive, Result, VersionReq, THESEUS_POSTGRESQL_BINARIES_URL,
6-
};
4+
use postgresql_archive::configuration::theseus;
5+
use postgresql_archive::{extract, get_archive, Result, VersionReq};
76

87
#[tokio::main]
98
async fn main() -> Result<()> {
10-
let url = THESEUS_POSTGRESQL_BINARIES_URL;
9+
let url = theseus::URL;
1110
let version_req = VersionReq::STAR;
1211
let (archive_version, archive) = get_archive(url, &version_req).await?;
1312
let out_dir = tempfile::tempdir()?.into_path();

‎examples/archive_sync/src/main.rs

Copy file name to clipboardExpand all lines: examples/archive_sync/src/main.rs
+3-2Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,11 @@
22
#![deny(clippy::pedantic)]
33

44
use postgresql_archive::blocking::{extract, get_archive};
5-
use postgresql_archive::{Result, VersionReq, THESEUS_POSTGRESQL_BINARIES_URL};
5+
use postgresql_archive::configuration::theseus;
6+
use postgresql_archive::{Result, VersionReq};
67

78
fn main() -> Result<()> {
8-
let url = THESEUS_POSTGRESQL_BINARIES_URL;
9+
let url = theseus::URL;
910
let version_req = VersionReq::STAR;
1011
let (archive_version, archive) = get_archive(url, &version_req)?;
1112
let out_dir = tempfile::tempdir()?.into_path();

‎postgresql_archive/Cargo.toml

Copy file name to clipboardExpand all lines: postgresql_archive/Cargo.toml
+3Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ http = { workspace = true }
1919
human_bytes = { workspace = true, default-features = false }
2020
lazy_static = { workspace = true }
2121
num-format = { workspace = true }
22+
quick-xml = { workspace = true, features = ["serialize"] }
2223
regex = { workspace = true }
2324
reqwest = { workspace = true, default-features = false, features = ["json"] }
2425
reqwest-middleware = { workspace = true }
@@ -36,6 +37,8 @@ thiserror = { workspace = true }
3637
tokio = { workspace = true, features = ["full"], optional = true }
3738
tracing = { workspace = true, features = ["log"] }
3839
url = { workspace = true }
40+
xz2 = { workspace = true }
41+
zip = { workspace = true }
3942

4043
[dev-dependencies]
4144
criterion = { workspace = true }

‎postgresql_archive/benches/archive.rs

Copy file name to clipboardExpand all lines: postgresql_archive/benches/archive.rs
+4-3Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
use criterion::{criterion_group, criterion_main, Criterion};
22
use postgresql_archive::blocking::{extract, get_archive};
3-
use postgresql_archive::{Result, VersionReq, THESEUS_POSTGRESQL_BINARIES_URL};
3+
use postgresql_archive::configuration::theseus;
4+
use postgresql_archive::{Result, VersionReq};
45
use std::fs::{create_dir_all, remove_dir_all};
56
use std::time::Duration;
67

@@ -10,7 +11,7 @@ fn benchmarks(criterion: &mut Criterion) {
1011

1112
fn bench_extract(criterion: &mut Criterion) -> Result<()> {
1213
let version_req = VersionReq::STAR;
13-
let (_archive_version, archive) = get_archive(THESEUS_POSTGRESQL_BINARIES_URL, &version_req)?;
14+
let (_archive_version, archive) = get_archive(theseus::URL, &version_req)?;
1415

1516
criterion.bench_function("extract", |bencher| {
1617
bencher.iter(|| {
@@ -24,7 +25,7 @@ fn bench_extract(criterion: &mut Criterion) -> Result<()> {
2425
fn extract_archive(archive: &Vec<u8>) -> Result<()> {
2526
let out_dir = tempfile::tempdir()?.path().to_path_buf();
2627
create_dir_all(&out_dir)?;
27-
extract(THESEUS_POSTGRESQL_BINARIES_URL, archive, &out_dir)?;
28+
extract(theseus::URL, archive, &out_dir)?;
2829
remove_dir_all(&out_dir)?;
2930
Ok(())
3031
}

‎postgresql_archive/src/archive.rs

Copy file name to clipboardExpand all lines: postgresql_archive/src/archive.rs
+3-5Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,6 @@ use semver::{Version, VersionReq};
77
use std::path::Path;
88
use tracing::instrument;
99

10-
pub const THESEUS_POSTGRESQL_BINARIES_URL: &str =
11-
"https://github.com/theseus-rs/postgresql-binaries";
12-
1310
/// Gets the version for the specified [version requirement](VersionReq). If a version for the
1411
/// [version requirement](VersionReq) is not found, then an error is returned.
1512
///
@@ -52,19 +49,20 @@ pub async fn extract(url: &str, bytes: &Vec<u8>, out_dir: &Path) -> Result<()> {
5249
#[cfg(test)]
5350
mod tests {
5451
use super::*;
52+
use crate::configuration::theseus::URL;
5553

5654
#[tokio::test]
5755
async fn test_get_version() -> Result<()> {
5856
let version_req = VersionReq::parse("=16.3.0")?;
59-
let version = get_version(THESEUS_POSTGRESQL_BINARIES_URL, &version_req).await?;
57+
let version = get_version(URL, &version_req).await?;
6058
assert_eq!(Version::new(16, 3, 0), version);
6159
Ok(())
6260
}
6361

6462
#[tokio::test]
6563
async fn test_get_archive() -> Result<()> {
6664
let version_req = VersionReq::parse("=16.3.0")?;
67-
let (version, bytes) = get_archive(THESEUS_POSTGRESQL_BINARIES_URL, &version_req).await?;
65+
let (version, bytes) = get_archive(URL, &version_req).await?;
6866
assert_eq!(Version::new(16, 3, 0), version);
6967
assert!(!bytes.is_empty());
7068
Ok(())
+2Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
pub mod theseus;
2+
pub mod zonky;
+7Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
mod extractor;
2+
mod matcher;
3+
4+
pub const URL: &str = "https://github.com/theseus-rs/postgresql-binaries";
5+
6+
pub use extractor::extract;
7+
pub use matcher::matcher;
+190Lines changed: 190 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,190 @@
1+
use crate::Error::Unexpected;
2+
use crate::Result;
3+
use human_bytes::human_bytes;
4+
use num_format::{Locale, ToFormattedString};
5+
use std::fs::{create_dir_all, remove_dir_all, remove_file, rename, File};
6+
use std::io::{copy, BufReader, Cursor};
7+
use std::path::{Path, PathBuf};
8+
use std::thread::sleep;
9+
use std::time::Duration;
10+
use tar::Archive;
11+
use tracing::{debug, instrument, warn};
12+
use xz2::bufread::XzDecoder;
13+
use zip::ZipArchive;
14+
15+
/// Extracts the compressed tar `bytes` to the [out_dir](Path).
16+
///
17+
/// # Errors
18+
/// Returns an error if the extraction fails.
19+
#[allow(clippy::case_sensitive_file_extension_comparisons)]
20+
#[allow(clippy::cast_precision_loss)]
21+
#[instrument(skip(bytes))]
22+
pub fn extract(bytes: &Vec<u8>, out_dir: &Path) -> Result<()> {
23+
let parent_dir = if let Some(parent) = out_dir.parent() {
24+
parent
25+
} else {
26+
debug!("No parent directory for {}", out_dir.to_string_lossy());
27+
out_dir
28+
};
29+
30+
create_dir_all(parent_dir)?;
31+
32+
let lock_file = acquire_lock(parent_dir)?;
33+
// If the directory already exists, then the archive has already been
34+
// extracted by another process.
35+
if out_dir.exists() {
36+
debug!(
37+
"Directory already exists {}; skipping extraction: ",
38+
out_dir.to_string_lossy()
39+
);
40+
remove_file(&lock_file)?;
41+
return Ok(());
42+
}
43+
44+
let extract_dir = tempfile::tempdir_in(parent_dir)?.into_path();
45+
debug!("Extracting archive to {}", extract_dir.to_string_lossy());
46+
47+
let reader = Cursor::new(bytes);
48+
let mut archive = ZipArchive::new(reader).map_err(|error| Unexpected(error.to_string()))?;
49+
let mut archive_bytes = Vec::new();
50+
for i in 0..archive.len() {
51+
let mut file = archive
52+
.by_index(i)
53+
.map_err(|error| Unexpected(error.to_string()))?;
54+
let file_name = file.name().to_string();
55+
if file_name.ends_with(".txz") {
56+
debug!("Found archive file: {file_name}");
57+
std::io::copy(&mut file, &mut archive_bytes)?;
58+
break;
59+
}
60+
}
61+
62+
if archive_bytes.is_empty() {
63+
return Err(Unexpected("Failed to find archive file".to_string()));
64+
}
65+
66+
let input = BufReader::new(Cursor::new(archive_bytes));
67+
let decoder = XzDecoder::new(input);
68+
let mut archive = Archive::new(decoder);
69+
let mut files = 0;
70+
let mut extracted_bytes = 0;
71+
72+
for archive_entry in archive.entries()? {
73+
let mut entry = archive_entry?;
74+
let entry_header = entry.header();
75+
let entry_type = entry_header.entry_type();
76+
let entry_size = entry_header.size()?;
77+
#[cfg(unix)]
78+
let file_mode = entry_header.mode()?;
79+
80+
let entry_header_path = entry_header.path()?.to_path_buf();
81+
let mut entry_name = extract_dir.clone();
82+
entry_name.push(entry_header_path);
83+
84+
if let Some(parent) = entry_name.parent() {
85+
if !parent.exists() {
86+
create_dir_all(parent)?;
87+
}
88+
}
89+
90+
if entry_type.is_dir() || entry_name.is_dir() {
91+
create_dir_all(&entry_name)?;
92+
} else if entry_type.is_file() {
93+
let mut output_file = File::create(&entry_name)?;
94+
copy(&mut entry, &mut output_file)?;
95+
96+
files += 1;
97+
extracted_bytes += entry_size;
98+
99+
#[cfg(unix)]
100+
{
101+
use std::os::unix::fs::PermissionsExt;
102+
output_file.set_permissions(std::fs::Permissions::from_mode(file_mode))?;
103+
}
104+
} else if entry_type.is_symlink() {
105+
#[cfg(unix)]
106+
if let Some(symlink_target) = entry.link_name()? {
107+
let symlink_path = entry_name;
108+
std::os::unix::fs::symlink(symlink_target.as_ref(), symlink_path)?;
109+
}
110+
}
111+
}
112+
113+
if out_dir.exists() {
114+
debug!(
115+
"Directory already exists {}; skipping rename and removing extraction directory: {}",
116+
out_dir.to_string_lossy(),
117+
extract_dir.to_string_lossy()
118+
);
119+
remove_dir_all(&extract_dir)?;
120+
} else {
121+
debug!(
122+
"Renaming {} to {}",
123+
extract_dir.to_string_lossy(),
124+
out_dir.to_string_lossy()
125+
);
126+
rename(extract_dir, out_dir)?;
127+
}
128+
129+
if lock_file.is_file() {
130+
debug!("Removing lock file: {}", lock_file.to_string_lossy());
131+
remove_file(lock_file)?;
132+
}
133+
134+
debug!(
135+
"Extracting {} files totalling {}",
136+
files.to_formatted_string(&Locale::en),
137+
human_bytes(extracted_bytes as f64)
138+
);
139+
140+
Ok(())
141+
}
142+
143+
/// Acquires a lock file in the [out_dir](Path) to prevent multiple processes from extracting the
144+
/// archive at the same time.
145+
///
146+
/// # Errors
147+
/// * If the lock file cannot be acquired.
148+
#[instrument(level = "debug")]
149+
fn acquire_lock(out_dir: &Path) -> crate::Result<PathBuf> {
150+
let lock_file = out_dir.join("postgresql-archive.lock");
151+
152+
if lock_file.is_file() {
153+
let metadata = lock_file.metadata()?;
154+
let created = metadata.created()?;
155+
156+
if created.elapsed()?.as_secs() > 300 {
157+
warn!(
158+
"Stale lock file detected; removing file to attempt process recovery: {}",
159+
lock_file.to_string_lossy()
160+
);
161+
remove_file(&lock_file)?;
162+
}
163+
}
164+
165+
debug!(
166+
"Attempting to acquire lock: {}",
167+
lock_file.to_string_lossy()
168+
);
169+
170+
for _ in 0..30 {
171+
let lock = std::fs::OpenOptions::new()
172+
.create(true)
173+
.truncate(true)
174+
.write(true)
175+
.open(&lock_file);
176+
177+
match lock {
178+
Ok(_) => {
179+
debug!("Lock acquired: {}", lock_file.to_string_lossy());
180+
return Ok(lock_file);
181+
}
182+
Err(error) => {
183+
warn!("unable to acquire lock: {error}");
184+
sleep(Duration::from_secs(1));
185+
}
186+
}
187+
}
188+
189+
Err(Unexpected("Failed to acquire lock".to_string()))
190+
}

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.