Skip to content

Commit 5eebdb2

Browse files
authored
Merge pull request #226 from tristan-f-r/oi2p
fix: make omicsintegrator2 work offline
2 parents 336b18c + 4fd96f5 commit 5eebdb2

8 files changed

Lines changed: 74 additions & 9 deletions

File tree

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
From 5dc0e69fa3d1049ae8e1d8f51859335910245ad7 Mon Sep 17 00:00:00 2001
2+
From: "Tristan F.-R." <pub.tristanf@gmail.com>
3+
Date: Mon, 26 May 2025 09:52:34 -0700
4+
Subject: [PATCH] fix: disable graph exports
5+
6+
this allows OI2 to work offline; plus, SPRAS already has graph visualizers.
7+
---
8+
src/__main__.py | 2 +-
9+
1 file changed, 1 insertion(+), 1 deletion(-)
10+
11+
diff --git a/src/__main__.py b/src/__main__.py
12+
index 49ef402..b7afbca 100644
13+
--- a/src/__main__.py
14+
+++ b/src/__main__.py
15+
@@ -80,7 +80,7 @@ def main():
16+
forest, augmented_forest = graph.output_forest_as_networkx(vertex_indices, edge_indices)
17+
18+
#oi.output_networkx_graph_as_graphml_for_cytoscape(augmented_forest, args.output_dir)
19+
- oi.output_networkx_graph_as_interactive_html(augmented_forest, args.output_dir, args.filename+'.html')
20+
+ # oi.output_networkx_graph_as_interactive_html(augmented_forest, args.output_dir, args.filename+'.html')
21+
augmented_forest_df = oi.get_networkx_graph_as_dataframe_of_edges(augmented_forest)
22+
output_dataframe_to_tsv(augmented_forest_df, args.output_dir, args.filename+'.tsv')
23+
24+
--
25+
2.47.0
26+

docker-wrappers/OmicsIntegrator2/Dockerfile

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,5 +2,14 @@
22
# https://github.com/fraenkel-lab/OmicsIntegrator2
33
FROM continuumio/miniconda3:4.9.2
44

5+
COPY 0001-disable-graph-exports.patch .
6+
7+
RUN git clone https://github.com/agitter/OmicsIntegrator2 && \
8+
cd OmicsIntegrator2 && \
9+
git reset --hard 568f170eae388e42e923c478ac9f3308b487760b && \
10+
git config user.email "email@example.com" && \
11+
git config user.name "Non-existent User" && \
12+
git am /0001-disable-graph-exports.patch
13+
514
COPY environment.yml .
615
RUN conda env update --name base --file environment.yml --prune

docker-wrappers/OmicsIntegrator2/README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ The Docker wrapper can be tested with `pytest`.
2525
## Versions:
2626
- v1: Created a named conda environment in the container and used `ENTRYPOINT` to execute commands inside that environment. Not compatible with Singularity.
2727
- v2: Used the environment file to update the base conda environment so the `ENTRYPOINT` command was no longer needed. Compatible with Singularity.
28+
- v3: Patch to work offline by never running `output_networkx_graph_as_interactive_html` ([#226](https://github.com/Reed-CompBio/spras/pull/226))
2829

2930
## TODO
3031
- Attribute https://github.com/fraenkel-lab/OmicsIntegrator2

docker-wrappers/OmicsIntegrator2/environment.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,4 +18,4 @@ dependencies:
1818
- pcst_fast==1.0.7
1919
- goenrich==1.7.0
2020
- axial==0.1.10
21-
- git+https://github.com/agitter/OmicsIntegrator2@568f170eae388e42e923c478ac9f3308b487760b
21+
- git+file:///OmicsIntegrator2

docs/contributing/patching.rst

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
Patching Algorithms
2+
===================
3+
4+
Some wrapped algorithms require extra fixes inside their code. For permissively licensed algorithms,
5+
we use ``.patch`` files generated from ``git format-patch``.
6+
7+
To create patch files using ``git format-patch`` (assuming your wrapped algorithm is in a git repository):
8+
9+
#. Clone the repository locally.
10+
#. Commit the changes you want to make (with good commit messages and descriptions).
11+
12+
* Distinct changes should be made in different commits to make patch files easy to read
13+
* For removing code, we prefer to comment out code instead of removing it, to make potential stacktraces less confusing for end users.
14+
15+
#. Run ``git format-patch HEAD~[N]`` where ``N`` is the number of commits you made.
16+
17+
To use ``.patch`` files in ``Dockerfiles``, we create a fake user for ``git`` and apply the patch files using ``git am``:
18+
19+
.. code:: shell
20+
21+
git config user.email "email@example.com"
22+
git config user.name "Non-existent User"
23+
git am /0001-my-patch.patch

docs/index.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,7 @@ methods (PRMs) to omics data.
5555

5656
contributing/index
5757
contributing/maintain
58+
contributing/patching
5859

5960
Indices and tables
6061
==================

spras/containers.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -132,31 +132,32 @@ def env_to_items(environment: dict[str, str]) -> Iterator[str]:
132132
# TODO consider a better default environment variable
133133
# Follow docker-py's naming conventions (https://docker-py.readthedocs.io/en/stable/containers.html)
134134
# Technically the argument is an image, not a container, but we use container here.
135-
def run_container(framework: str, container_suffix: str, command: List[str], volumes: List[Tuple[PurePath, PurePath]], working_dir: str, out_dir: str | os.PathLike, environment: Optional[dict[str, str]] = None):
135+
def run_container(framework: str, container_suffix: str, command: List[str], volumes: List[Tuple[PurePath, PurePath]], working_dir: str, out_dir: str | os.PathLike, environment: Optional[dict[str, str]] = None, network_disabled = False):
136136
"""
137137
Runs a command in the container using Singularity or Docker
138138
@param framework: singularity or docker
139139
@param container_suffix: name of the DockerHub container without the 'docker://' prefix
140140
@param command: command to run in the container
141141
@param volumes: a list of volumes to mount where each item is a (source, destination) tuple
142142
@param working_dir: the working directory in the container
143-
@param environment: environment variables to set in the container
144143
@param out_dir: output directory for the rule's artifacts. Only passed into run_container_singularity for the purpose of profiling.
144+
@param environment: environment variables to set in the container
145+
@param network_disabled: Disables the network on the container. Only works for docker for now. This acts as a 'runtime assertion' that a container works w/o networking.
145146
@return: output from Singularity execute or Docker run
146147
"""
147148
normalized_framework = framework.casefold()
148149

149150
container = config.config.container_prefix + "/" + container_suffix
150151
if normalized_framework == 'docker':
151-
return run_container_docker(container, command, volumes, working_dir, environment)
152+
return run_container_docker(container, command, volumes, working_dir, environment, network_disabled)
152153
elif normalized_framework == 'singularity' or normalized_framework == "apptainer":
153154
return run_container_singularity(container, command, volumes, working_dir, out_dir, environment)
154155
elif normalized_framework == 'dsub':
155156
return run_container_dsub(container, command, volumes, working_dir, environment)
156157
else:
157158
raise ValueError(f'{framework} is not a recognized container framework. Choose "docker", "dsub", or "singularity".')
158159

159-
def run_container_and_log(name: str, framework: str, container_suffix: str, command: List[str], volumes: List[Tuple[PurePath, PurePath]], working_dir: str, out_dir: str | os.PathLike, environment: Optional[dict[str, str]] = None):
160+
def run_container_and_log(name: str, framework: str, container_suffix: str, command: List[str], volumes: List[Tuple[PurePath, PurePath]], working_dir: str, out_dir: str | os.PathLike, environment: Optional[dict[str, str]] = None, network_disabled=False):
160161
"""
161162
Runs a command in the container using Singularity or Docker with associated pretty printed messages.
162163
@param name: the display name of the running container for logging purposes
@@ -166,14 +167,15 @@ def run_container_and_log(name: str, framework: str, container_suffix: str, comm
166167
@param volumes: a list of volumes to mount where each item is a (source, destination) tuple
167168
@param working_dir: the working directory in the container
168169
@param environment: environment variables to set in the container
170+
@param network_disabled: Disables the network on the container. Only works for docker for now. This acts as a 'runtime assertion' that a container works w/o networking.
169171
@return: output from Singularity execute or Docker run
170172
"""
171173
if not environment:
172174
environment = {'SPRAS': 'True'}
173175

174176
print('Running {} on container framework "{}" on env {} with command: {}'.format(name, framework, list(env_to_items(environment)), ' '.join(command)), flush=True)
175177
try:
176-
out = run_container(framework=framework, container_suffix=container_suffix, command=command, volumes=volumes, working_dir=working_dir, out_dir=out_dir, environment=environment)
178+
out = run_container(framework=framework, container_suffix=container_suffix, command=command, volumes=volumes, working_dir=working_dir, out_dir=out_dir, environment=environment, network_disabled=network_disabled)
177179
if out is not None:
178180
if isinstance(out, list):
179181
out = ''.join(out)
@@ -199,7 +201,7 @@ def run_container_and_log(name: str, framework: str, container_suffix: str, comm
199201
raise err
200202

201203
# TODO any issue with creating a new client each time inside this function?
202-
def run_container_docker(container: str, command: List[str], volumes: List[Tuple[PurePath, PurePath]], working_dir: str, environment: Optional[dict[str, str]] = None):
204+
def run_container_docker(container: str, command: List[str], volumes: List[Tuple[PurePath, PurePath]], working_dir: str, environment: Optional[dict[str, str]] = None, network_disabled=False):
203205
"""
204206
Runs a command in the container using Docker.
205207
Attempts to automatically correct file owner and group for new files created by the container, setting them to the
@@ -244,6 +246,7 @@ def run_container_docker(container: str, command: List[str], volumes: List[Tuple
244246
stderr=True,
245247
volumes=bind_paths,
246248
working_dir=working_dir,
249+
network_disabled=network_disabled,
247250
environment=environment).decode('utf-8')
248251

249252
# TODO does this cleanup need to still run even if there was an error in the above run command?
@@ -278,6 +281,7 @@ def run_container_docker(container: str, command: List[str], volumes: List[Tuple
278281
stderr=True,
279282
volumes=bind_paths,
280283
working_dir=working_dir,
284+
network_disabled=network_disabled,
281285
environment=environment).decode('utf-8')
282286

283287
# Raised on non-Unix systems

spras/omicsintegrator2.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -128,14 +128,15 @@ def run(edges=None, prizes=None, output_file=None, w=None, b=None, g=None, noise
128128
if seed is not None:
129129
command.extend(['--seed', str(seed)])
130130

131-
container_suffix = "omics-integrator-2:v2"
131+
container_suffix = "omics-integrator-2:v3"
132132
run_container_and_log('Omics Integrator 2',
133133
container_framework,
134134
container_suffix,
135135
command,
136136
volumes,
137137
work_dir,
138-
out_dir)
138+
out_dir,
139+
network_disabled=True)
139140

140141
# TODO do we want to retain other output files?
141142
# TODO if deleting other output files, write them all to a tmp directory and copy

0 commit comments

Comments
 (0)