Skip to content
Draft
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions planemo/commands/cmd_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@
@options.run_output_directory_option()
@options.run_output_json_option()
@options.run_download_outputs_option()
@options.run_upload_instance_url_option()
@options.run_upload_api_key_option()
@options.write_run_archive_to_file_option()
@options.engine_options()
@options.test_options()
@command_function
Expand Down
49 changes: 48 additions & 1 deletion planemo/galaxy/activity.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
"""Module provides generic interface to running Galaxy tools and workflows."""

import os
import requests
import shutil
import sys
import tempfile
import time
Expand Down Expand Up @@ -42,7 +44,11 @@
)

from planemo.galaxy.api import summarize_history
from planemo.io import wait_on
from planemo.io import (
error,
wait_on,
warn,
)
from planemo.runnable import (
ErrorRunResponse,
get_outputs,
Expand Down Expand Up @@ -236,6 +242,47 @@ def _execute( # noqa C901
ctx.vlog("collecting outputs from run...")
run_response.collect_outputs(output_directory)
ctx.vlog("collecting outputs complete")

if kwds.get("upload_instance_url") or kwds.get("archive_file"):
ctx.vlog(f"Preparing galaxy run export, history {history_id}.")
archive_file = kwds.get("archive_file")

jeha_id = user_gi.histories.export_history(
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We'll want to export the invocation here.

history_id=history_id,
wait=True,
maxwait=3600,
)

with tempfile.TemporaryDirectory() as temp_dir:
archive_file_output = os.path.join(temp_dir, "archive.tar.gz")

with open(archive_file_output, 'bw') as archive:
user_gi.histories.download_history(
history_id=history_id,
jeha_id=jeha_id,
outf=archive
)

if kwds.get("arhicve_file"):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There's a typo here. Can you add tests ?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, that's a type.

I have added a test for the download option.

I'm considering of writing a test that involves uploading a history to a separate Galaxy instance, and I'd appreciate some guidance on how to approach this. Would it make sense to look at the existing serve tests and try spinning up a Galaxy instance manually? Or would it be better to use the instance that Planemo starts during testing?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please work with invocations though, the history will not include the invocation, so for the live use-case that's not so interesting since we can't show much there.

Would it make sense to look at the existing serve tests and try spinning up a Galaxy instance manually? Or would it be better to use the instance that Planemo starts during testing?

Yes, the test should manage the instance(s), probably best done using _launch_thread_and_wait as in the serve tests. I would implement a separate export (where export takes an existing invocation id) and an import command, so then you can export from the first instance to a file and import to a new instance.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I have looked into using invocations instead, didn't realize that everything wasn't exported. I have no problems exporting the invocation using:

1 - /api/invocations/{invocation_id}/prepare_store_download
2 - /api/short_term_storage/{storage_id}/ready
3 - /api/short_term_storage/{storage_id}

This should maybe be implement in bioblend, similar the download_history function?

However, I haven't managed to import the export invocation. Looked at the galaxy client which seem to be using /api/histories to import a invocation, but haven't been able to get it to work. And also not been able to import it using the web interface even though I export it using the same interface.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Looks like I can import an invocation exported as a RO-Crate, but not when I have exported it as "Compressed File: Export the invocation to a compressed File containing the invocation data in Galaxy native format."

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@Smeds If you'd like to implement invocation export in BioBlend, it should be pretty similar to https://github.com/galaxyproject/bioblend/blob/9bdc2ce57c4443d29699ece7437d2bdf3692d4dc/bioblend/galaxy/invocations/__init__.py#L441 , I think.
Pull requests welcome!

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Interesting, the tar.gz export/import worked for me. Do you still have the archive ?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could be an issue with test.galaxyproject.org that I was using, seem to work on usegalaxy.org.

Is there any preference what kind of export format to use, RO-crate or Compressed Galaxy native format?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Go with RO-crate imo, it's essentially the native format with a bit of extra metadata

shutil.copy(archive_file_output, archive_file)
ctx.vlog(f"Archive {kwds.get('arhicve_file')} created.")

if kwds.get("upload_instance_url"):
upload_url = kwds.get("upload_instance_url", None)
upload_key = kwds.get("upload_api_key", None)

upload_url = f"{upload_url}/api/histories"
if upload_key is None:
warn("No API key provided")
else:
upload_url = f"{upload_url}?key={upload_key}"

response = requests.post(upload_url, files={'archive_file': open(archive_file_output, 'rb')})
if response.status_code == 200:
ctx.vlog(f"Upload run to {kwds.get('upload_instance_url')}")
else:
error(f"Failed to upload run to {kwds.get('upload_instance_url')}, status code: {response.status_code}, error {response.text}")

return run_response


Expand Down
42 changes: 42 additions & 0 deletions planemo/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,6 +314,48 @@ def run_download_outputs_option():
)


def run_upload_instance_url_option():
return planemo_option(
"--upload_instance_url",
type=str,
default=None,
help=(
"Upload run to a galaxy instance located at the provided url."
"Ex: https://usegalaxy.org."
),
)


def run_upload_api_key_option():
return planemo_option(
"--upload_api_key",
type=str,
default=None,
help=(
"API key used to upload run to separate instance"
),
)


def validate_archive_type_callback(ctx, param, value):
if value is not None and not value.endswith('.tar.gz'):
ctx.fail(f"archive_file ({value}), filename must end with tar.gz")
return value


def write_run_archive_to_file_option():
return planemo_option(
"--archive_file",
type=click.Path(exists=False, file_okay=True, dir_okay=False, resolve_path=True),
default=None,
help=(
"Compress the run and write it to the provided file path."
"The archive can be imported to a Galaxy instance"
),
callback=validate_archive_type_callback
)


def publish_dockstore_option():
return planemo_option(
"--publish/--no_publish",
Expand Down
Loading