Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 12 additions & 11 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ resolver = "2"

[workspace.package]
edition = "2021"
version = "0.3.60"
version = "0.3.61"
description = "Tower is the best way to host Python data apps in production"
rust-version = "1.81"
authors = ["Brad Heller <brad@tower.dev>", "Ben Lovell <ben@tower.dev>"]
Expand Down Expand Up @@ -54,6 +54,7 @@ sha2 = "0.10"
snafu = "0.7"
tar = "0.4"
spinners = "4"
tempfile = "3.12"
testutils = { path = "crates/testutils" }
tmpdir = "1.0"
tokio = { version = "1", features = ["full"] }
Expand Down
44 changes: 43 additions & 1 deletion crates/tower-runtime/src/local.rs
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,49 @@ async fn execute_local_app(
));

// Let's wait for the setup to finish. We don't care about the results.
let res = wait_for_process(ctx.clone(), &cancel_token, child).await;
let mut res = wait_for_process(ctx.clone(), &cancel_token, child).await;

// If the requirements.txt install failed, retry with the legacy
// setuptools<82 pin. Some apps (those whose transitive deps rely on
// pkg_resources) need that pin to install successfully; we don't
// apply it by default because it conflicts with apps whose deps
// require setuptools>=82.
if res != 0 && uv.should_use_legacy_setuptools_pin(&working_dir) {
let _ = opts.output_sender.send(Output {
channel: Channel::Setup,
fd: FD::Stdout,
line: "tower: dependency install failed; retrying with setuptools<82 pin for pkg_resources compatibility".to_string(),
time: chrono::Utc::now(),
});

match uv
.sync_with_legacy_setuptools_pin(&working_dir, &env_vars)
.await
{
Err(e) => {
return Err(e.into());
}
Ok(mut retry_child) => {
let stdout = retry_child.stdout.take().expect("no stdout");
tokio::spawn(drain_output(
FD::Stdout,
Channel::Setup,
opts.output_sender.clone(),
BufReader::new(stdout),
));

let stderr = retry_child.stderr.take().expect("no stderr");
tokio::spawn(drain_output(
FD::Stderr,
Channel::Setup,
opts.output_sender.clone(),
BufReader::new(stderr),
));

res = wait_for_process(ctx.clone(), &cancel_token, retry_child).await;
}
}
}
Comment on lines +278 to +320
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Skip the fallback when cancellation already happened.

If the first sync exits with -1 because cancel_token fired, Line 285 still retries and spawns a second install. That makes terminate() restart work instead of stopping it.

Suggested fix
-                if res != 0 && uv.should_use_legacy_setuptools_pin(&working_dir) {
+                if res != 0
+                    && !cancel_token.is_cancelled()
+                    && uv.should_use_legacy_setuptools_pin(&working_dir)
+                {
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
let mut res = wait_for_process(ctx.clone(), &cancel_token, child).await;
// If the requirements.txt install failed, retry with the legacy
// setuptools<82 pin. Some apps (those whose transitive deps rely on
// pkg_resources) need that pin to install successfully; we don't
// apply it by default because it conflicts with apps whose deps
// require setuptools>=82.
if res != 0 && uv.should_use_legacy_setuptools_pin(&working_dir) {
let _ = opts.output_sender.send(Output {
channel: Channel::Setup,
fd: FD::Stdout,
line: "tower: dependency install failed; retrying with setuptools<82 pin for pkg_resources compatibility".to_string(),
time: chrono::Utc::now(),
});
match uv
.sync_with_legacy_setuptools_pin(&working_dir, &env_vars)
.await
{
Err(e) => {
return Err(e.into());
}
Ok(mut retry_child) => {
let stdout = retry_child.stdout.take().expect("no stdout");
tokio::spawn(drain_output(
FD::Stdout,
Channel::Setup,
opts.output_sender.clone(),
BufReader::new(stdout),
));
let stderr = retry_child.stderr.take().expect("no stderr");
tokio::spawn(drain_output(
FD::Stderr,
Channel::Setup,
opts.output_sender.clone(),
BufReader::new(stderr),
));
res = wait_for_process(ctx.clone(), &cancel_token, retry_child).await;
}
}
}
let mut res = wait_for_process(ctx.clone(), &cancel_token, child).await;
// If the requirements.txt install failed, retry with the legacy
// setuptools<82 pin. Some apps (those whose transitive deps rely on
// pkg_resources) need that pin to install successfully; we don't
// apply it by default because it conflicts with apps whose deps
// require setuptools>=82.
if res != 0
&& !cancel_token.is_cancelled()
&& uv.should_use_legacy_setuptools_pin(&working_dir)
{
let _ = opts.output_sender.send(Output {
channel: Channel::Setup,
fd: FD::Stdout,
line: "tower: dependency install failed; retrying with setuptools<82 pin for pkg_resources compatibility".to_string(),
time: chrono::Utc::now(),
});
match uv
.sync_with_legacy_setuptools_pin(&working_dir, &env_vars)
.await
{
Err(e) => {
return Err(e.into());
}
Ok(mut retry_child) => {
let stdout = retry_child.stdout.take().expect("no stdout");
tokio::spawn(drain_output(
FD::Stdout,
Channel::Setup,
opts.output_sender.clone(),
BufReader::new(stdout),
));
let stderr = retry_child.stderr.take().expect("no stderr");
tokio::spawn(drain_output(
FD::Stderr,
Channel::Setup,
opts.output_sender.clone(),
BufReader::new(stderr),
));
res = wait_for_process(ctx.clone(), &cancel_token, retry_child).await;
}
}
}
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@crates/tower-runtime/src/local.rs` around lines 278 - 320, The code retries
with sync_with_legacy_setuptools_pin even when the initial wait_for_process
returned due to cancellation; update the if guard to skip the fallback if
cancellation already happened by checking the cancel token (e.g. require
!cancel_token.is_cancelled() or equivalent) before calling
uv.should_use_legacy_setuptools_pin and spawning the retry; modify the block
around wait_for_process / res so that retry logic
(uv.sync_with_legacy_setuptools_pin, spawning retry_child drains and
wait_for_process on retry_child) only runs when not cancelled to prevent
terminate() restarts.


if res != 0 {
// If the sync process failed, we want to return an error.
Expand Down
3 changes: 3 additions & 0 deletions crates/tower-uv/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,6 @@ seahash = { workspace = true }
tokio = { workspace = true }
tokio-tar = { workspace = true }
tower-telemetry = { workspace = true }

[dev-dependencies]
tempfile = { workspace = true }
107 changes: 74 additions & 33 deletions crates/tower-uv/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -357,44 +357,85 @@ impl Uv {
&self.uv_path, cwd
);

// If there is a requirements.txt, then we can use that to sync.
let mut cmd = Command::new(&self.uv_path);
cmd.kill_on_drop(true)
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.current_dir(cwd)
.arg("--color")
.arg("never")
.arg("pip")
.arg("install")
.arg("-r")
.arg(cwd.join("requirements.txt"))
// setuptools 82 removed pkg_resources, but many legacy packages
// still import it without declaring the dependency. Let's always install
// a version that includes pkg_resources for requirements.txt, on the
// basis that requirements.txt projects are probably not using the latest
// and greatest deps (then they'd likely be using pyproject.toml anyway)
// https://github.com/pypa/setuptools/issues/5174
.arg("setuptools<82")
.envs(env_vars);
self.spawn_requirements_install(cwd, env_vars, false).await
} else {
// If there is no pyproject.toml or requirements.txt, then we can't sync.
Err(Error::MissingPyprojectToml)
}
}

#[cfg(unix)]
{
cmd.process_group(0);
}
/// Returns whether a failed `sync()` for this directory is eligible for a
/// retry via [`sync_with_legacy_setuptools_pin`]. Only applies to projects
/// driven by `requirements.txt`; pyproject-based projects manage their own
/// setuptools dependency.
pub fn should_use_legacy_setuptools_pin(&self, cwd: &Path) -> bool {
cwd.join("requirements.txt").exists()
}
Comment on lines +367 to +373
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Match the retry gate to sync()'s strategy selection.

sync() treats pyproject.toml as authoritative when both files exist, but this helper returns true for any directory with requirements.txt. That means a failed pyproject-based sync can fall back to the requirements path and install a different dependency set.

Suggested fix
     pub fn should_use_legacy_setuptools_pin(&self, cwd: &Path) -> bool {
-        cwd.join("requirements.txt").exists()
+        cwd.join("requirements.txt").exists() && !cwd.join("pyproject.toml").exists()
     }
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
/// Returns whether a failed `sync()` for this directory is eligible for a
/// retry via [`sync_with_legacy_setuptools_pin`]. Only applies to projects
/// driven by `requirements.txt`; pyproject-based projects manage their own
/// setuptools dependency.
pub fn should_use_legacy_setuptools_pin(&self, cwd: &Path) -> bool {
cwd.join("requirements.txt").exists()
}
/// Returns whether a failed `sync()` for this directory is eligible for a
/// retry via [`sync_with_legacy_setuptools_pin`]. Only applies to projects
/// driven by `requirements.txt`; pyproject-based projects manage their own
/// setuptools dependency.
pub fn should_use_legacy_setuptools_pin(&self, cwd: &Path) -> bool {
cwd.join("requirements.txt").exists() && !cwd.join("pyproject.toml").exists()
}
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@crates/tower-uv/src/lib.rs` around lines 367 - 373, The helper
should_use_legacy_setuptools_pin currently returns true if requirements.txt
exists even when pyproject.toml is present; change it to match sync()'s strategy
selection by returning true only when requirements.txt exists and pyproject.toml
does not (i.e., check both cwd.join("requirements.txt").exists() and
!cwd.join("pyproject.toml").exists()). Update the function body for
should_use_legacy_setuptools_pin to reflect that logic so a pyproject-driven
project won't fall back to the legacy requirements path.


if let Some(dir) = &self.cache_dir {
cmd.arg("--cache-dir").arg(dir);
}
/// Re-runs the `requirements.txt` install with a `setuptools<82` pin appended.
///
/// setuptools 82 removed `pkg_resources`, but many legacy packages still import
/// it without declaring the dependency. Pinning `setuptools<82` keeps it
/// available. Some modern packages (e.g. dlt's transitive graph pinning
/// `setuptools==82.0.1`) make this pin unsatisfiable, so it isn't applied up
/// front — callers should fall back to this only after a plain `sync()`
/// fails for a project using `requirements.txt`.
///
/// https://github.com/pypa/setuptools/issues/5174
pub async fn sync_with_legacy_setuptools_pin(
&self,
cwd: &PathBuf,
env_vars: &HashMap<String, String>,
) -> Result<Child, Error> {
if !cwd.join("requirements.txt").exists() {
return Err(Error::MissingPyprojectToml);
}

let child = cmd.spawn()?;
debug!(
"Retrying UV ({:?}) sync with setuptools<82 pin in {:?}",
&self.uv_path, cwd
);

Ok(child)
} else {
// If there is no pyproject.toml or requirements.txt, then we can't sync.
Err(Error::MissingPyprojectToml)
self.spawn_requirements_install(cwd, env_vars, true).await
}

async fn spawn_requirements_install(
&self,
cwd: &PathBuf,
env_vars: &HashMap<String, String>,
pin_legacy_setuptools: bool,
) -> Result<Child, Error> {
let req_path = cwd.join("requirements.txt");

let mut cmd = Command::new(&self.uv_path);
cmd.kill_on_drop(true)
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.current_dir(cwd)
.arg("--color")
.arg("never")
.arg("pip")
.arg("install")
.arg("-r")
.arg(&req_path);

if pin_legacy_setuptools {
cmd.arg("setuptools<82");
}

cmd.envs(env_vars);

#[cfg(unix)]
{
cmd.process_group(0);
}

if let Some(dir) = &self.cache_dir {
cmd.arg("--cache-dir").arg(dir);
}

Ok(cmd.spawn()?)
}

pub async fn run(
Expand Down
Loading
Loading