Commit fef6ce07 authored by wuzekai's avatar wuzekai

initial commit

parents
Pipeline #615 failed with stages

Too many changes to show.

To preserve performance only 1000 of 1000+ files are displayed.

#!/bin/bash
set -e
MAX_LINE_COUNT=19900
project_dir=`dirname $0`/..
cd $project_dir
bash -c "[ `find lib/galaxy/webapps/galaxy/controllers/ -name '*.py' | xargs wc -l | tail -n 1 | awk '{ printf \$1; }'` -lt $MAX_LINE_COUNT ]"
#!/bin/bash
set -e
MAX_MAKO_COUNT=330
project_dir=`dirname $0`/..
cd $project_dir
bash -c "[ `find templates -iname '*.mako' | wc -l | cut -f1 -d' '` -lt $MAX_MAKO_COUNT ]"
#!/bin/sh
TEST=$(mktemp)
TEST_EXTRA_CLASSES=$(mktemp)
pytest --collect-only --ignore=test/functional lib/galaxy_test/ test/ > "$TEST"
pytest -o python_classes='Test* *Test *TestCase' --collect-only --ignore=test/functional lib/galaxy_test/ test/ > "$TEST_EXTRA_CLASSES"
n_tests=$(grep 'tests collected' "$TEST" | sed -e 's/[^0-9]*\([0-9]*\) tests collected.*/\1/')
n_tests_extra_classes=$(grep 'tests collected' "$TEST_EXTRA_CLASSES" | sed -e 's/[^0-9]*\([0-9]*\) tests collected.*/\1/')
if [ "$n_tests_extra_classes" -gt "$n_tests" ]; then
echo "New test class with name not starting with Test introduced, change it to have tests collected by pytest"
diff "$TEST" "$TEST_EXTRA_CLASSES"
exit 1
fi
#!/bin/bash
set -e
# Setting NODE_PATH and config appropriately, using dependencies from
# client/node_modules, run eslint against args passed to this script.
# Primary use case here is for a pre-commit check.
NODE_PATH=src/ node client/node_modules/eslint/bin/eslint.js -c client/.eslintrc.js "$@"
#!/bin/sh
case "$1" in
galaxy|"")
SCRIPT=./run.sh
PORT=8080
LOGFILE=galaxy.log
GRAVITY_LOGFILE=database/gravity/log/gunicorn.log
SUPERVISORD_LOGFILE=database/gravity/supervisor/supervisord.log
;;
reports)
SCRIPT=./run_reports.sh
PORT=9001
LOGFILE=reports_webapp.log
;;
*)
echo "ERROR: Unrecognized app"
exit 1
;;
esac
TRIES=120
URL=http://localhost:$PORT
EXIT_CODE=1
i=0
echo "Testing for correct startup:"
$SCRIPT --daemon && \
while [ "$i" -le $TRIES ]; do
curl --max-time 1 "$URL" && EXIT_CODE=0 && break
sleep 1
i=$((i + 1))
done
$SCRIPT --skip-wheels --stop-daemon
echo "exit code:$EXIT_CODE, showing startup log:"
if [ -f "$LOGFILE" ]; then
cat "$LOGFILE"
elif [ ! -f "$LOGFILE" -a -f "$GRAVITY_LOGFILE" ]; then
echo "Warning: $LOGFILE does not exist, showing gravity startup log instead"
cat "$GRAVITY_LOGFILE"
elif [ ! -f "$LOGFILE" -a ! -f "$GRAVITY_LOGFILE" -a -f "$SUPERVISORD_LOGFILE" ]; then
echo "Warning: $LOGFILE and $GRAVITY_LOGFILE do not exist, showing supervisord startup log instead"
cat "$SUPERVISORD_LOGFILE"
else
echo "ERROR: No log files found!"
ls -lR database/gravity
fi
exit $EXIT_CODE
lib/galaxy/jobs/metrics
lib/galaxy/exceptions
lib/galaxy/containers
.git
.tox
.venv*
packages/*/.venv
packages/*/build
packages/*/dist
packages/venv
node_modules
database
doc/build
eggs
lib/galaxy/web/proxy/js/node_modules
lib/tool_shed/test/test_data/repos
static/maps
static/scripts
test/functional/tools/cwl_tools/v1.?/
build
dist
#!/bin/bash
set -e
flake8 --exclude $(paste -sd, .ci/flake8_ignorelist.txt) .
# Apply stricter rules for the directories shared with Pulsar
flake8 --ignore=E203,D --max-line-length=150 lib/galaxy/jobs/runners/util/
#!/bin/bash
set -e
# D100 - Missing docstring in public module.
# D2XX - Whitespace issues.
# D3XX - Quoting issues.
# D401 - First line should be in imperative mood
# D403 - First word of the first line should be properly capitalized
args="--ignore=D --select=D100,D201,D202,D206,D207,D208,D209,D211,D3,D403"
# If the first argument is --include, lint the modules expected to pass. If
# the first argument is --exclude, lint all modules the full Galaxy linter lints
# (this will fail).
if [ "$1" = "--include" ];
then
flake8 $args $(paste -sd' ' .ci/flake8_docstrings_include_list.txt)
else
flake8 $args --exclude $(paste -sd, .ci/flake8_ignorelist.txt) .
fi
hda
implementors
purgable
apiVersion: apps/v1
kind: Deployment
metadata:
labels:
app.kubernetes.io/name: testing
name: testing
spec:
replicas: 1
selector:
matchLabels:
app.kubernetes.io/name: test
template:
metadata:
labels:
app.kubernetes.io/name: test
spec:
containers:
- image: postgres:17
name: postgres
ports:
- containerPort: 5432
env:
- name: POSTGRES_DB
value: postgres
- name: POSTGRES_USER
value: postgres
- name: POSTGRES_PASSWORD
value: postgres
- image: rabbitmq
name: rabbitmq
ports:
- containerPort: 5672
#!/usr/bin/env bash
set -ex
SCRIPTDIR=$(dirname "${BASH_SOURCE[0]}")
kubectl apply -f "$SCRIPTDIR/deployment.yaml"
kubectl expose deployment testing --type=LoadBalancer --name=testing-service
CLUSTER_IP=$(kubectl get service testing-service -o jsonpath='{.spec.clusterIP}')
GALAXY_TEST_DBURI="postgresql://postgres:postgres@${CLUSTER_IP}:5432/galaxy?client_encoding=utf-8"
GALAXY_TEST_AMQP_URL="amqp://${CLUSTER_IP}:5672//"
export GALAXY_TEST_DBURI
export GALAXY_TEST_AMQP_URL
#!/bin/sh
cd "$(dirname "$0")"/..
xsd_path="lib/galaxy/tools/xsd/galaxy.xsd"
# Lint the XSD
xmllint --noout "$xsd_path"
test_tools_path='test/functional/tools'
# test all test tools except upload.xml which uses a non-standard conditional
# (without param) which does not survive xsd validation
tool_files_list=$(ls "$test_tools_path"/*.xml | grep -v '_conf.xml$' | grep -v upload.xml)
sh scripts/validate_tools.sh $tool_files_list
# Python CircleCI 2.0 configuration file
version: 2
variables:
restore_repo_cache: &restore_repo_cache
restore_cache:
keys:
- v1-repo-{{ .Environment.CIRCLE_SHA1 }}
install_tox: &install_tox
run: pip install tox
set_workdir: &set_workdir
working_directory: ~/repo
requires_get_code: &requires_get_code
requires:
- get_code
jobs:
get_code:
docker:
- image: cimg/python:3.10
<<: *set_workdir
steps:
# Replace standard code checkout with shallow clone to speed things up.
- run:
name: Checkout code
command: |-
# Add github.com to known hosts
mkdir -p ~/.ssh
echo 'github.com ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1vN1/wsjk=
' >> ~/.ssh/known_hosts
# Add the user ssh key and set correct perms
(umask 077; touch ~/.ssh/id_rsa)
chmod 0600 ~/.ssh/id_rsa
echo "$CHECKOUT_KEY" > ~/.ssh/id_rsa
# Use git+ssh instead of https
git config --global url."ssh://git@github.com".insteadOf "https://github.com" || true
git config --global gc.auto 0 || true
# Shallow clone
git clone --depth=1 "${CIRCLE_REPOSITORY_URL}" .
if [[ -n "${CIRCLE_PR_NUMBER}" ]]; then
# Update PR refs for testing.
FETCH_REFS="${FETCH_REFS} +refs/pull/${CIRCLE_PR_NUMBER}/head:pr/${CIRCLE_PR_NUMBER}/head"
FETCH_REFS="${FETCH_REFS} +refs/pull/${CIRCLE_PR_NUMBER}/merge:pr/${CIRCLE_PR_NUMBER}/merge"
# Retrieve the refs
git fetch --force origin ${FETCH_REFS}
# Checkout PR merge ref.
git checkout -f "pr/${CIRCLE_PR_NUMBER}/merge"
# Test for *some* merge conflicts.
git branch --merged | grep "pr/${CIRCLE_PR_NUMBER}/head" > /dev/null
else
if [ -n "$CIRCLE_TAG" ]; then
git fetch --depth=1 --force origin "refs/tags/${CIRCLE_TAG}"
else
git fetch --depth=1 --force origin "$CIRCLE_BRANCH:remotes/origin/$CIRCLE_BRANCH"
fi
if [ -n "$CIRCLE_TAG" ]; then
git reset --hard "$CIRCLE_SHA1"
git checkout "$CIRCLE_TAG"
elif [ -n "$CIRCLE_BRANCH" ]; then
git reset --hard "$CIRCLE_SHA1"
git checkout -B "$CIRCLE_BRANCH"
fi
git reset --hard "${CIRCLE_SHA1}"
fi
- save_cache:
key: v1-repo-{{ .Environment.CIRCLE_SHA1 }}
paths:
- ~/repo
validate_test_tools:
docker:
- image: cimg/python:3.10
<<: *set_workdir
steps:
- *restore_repo_cache
- run: sudo apt-get update
- run: sudo apt-get install -y libxml2-utils
- *install_tox
- run: tox -e validate_test_tools
workflows:
version: 2
get_code_and_test:
jobs:
- get_code
- validate_test_tools:
<<: *requires_get_code
Persona: You are a senior engineer responsible for maintaining the Galaxy project.
Arguments:
- $ARGUMENTS - Github issue number and optional work level (low/medium/high, default: medium)
Examples: "21536", "21536 low", "21536 high"
Parse the issue number and work level from $ARGUMENTS. Work levels control triage depth:
- **low**: Research and understand only, skip fix planning
- **medium**: Research + single plan for most probable cause
- **high**: Research + plans for all theories + plan assessment
You will be supplied a Github issue number to triage. A Galaxy developer will be assigned the issue but your job is to structure the conversation around the issue.
Fetch the issue using "gh issue view <number>" and write the issue contents to `ISSUE_<#>.md`.
Write all triage artifacts to the current working directory.
Galaxy versions look like 24.1, 26.2, etc.. and these correspond to branches such as release_24.1, release_26.2, etc.. Be sure you're on the target branch before continuing.
Launch subagents to perform actions to help in the triage process. Run independent tasks in parallel where possible (e.g., code research and importance assessment can run concurrently since they only need the original issue), but tasks that depend on artifacts from earlier subagents must wait (e.g., history research and planning need to read the code research document first). As the agent responsible for the triage process - please read the artifacts generated from subagents and direct the process as it makes sense. Your job is to direct the process though - do not try to fix the issue or do research yourself.
When to launch subagents and what they should do are as follows:
- When: Always (all levels)
What: Launch a subagent to research the issue. This subagent should create a document called `ISSUE_<#>_CODE_RESEARCH.md` where `<#>` is the issue number. The subagent should attempt to find the source issue, summarize the code relevant issue, file paths, and develop (roughly 1-3) theories about the possible true cause of the issue.
- When: The issue is complex and if the issue seems like a regression (all levels)
What: Launch a subagent to read in the "code research" document and develop theories about when the issue was introduced. Create a document called `ISSUE_<#>_HISTORY.md` where `<#>` is the issue number. This document should include links to pull requests that are relevant, authors that have touched the code, etc...
- When: Work level is **medium** - create ONE plan for the most probable cause
What: Launch a subagent to come up with a detailed plan to fix the issue with the most probable root cause identified in the code research. Create a document called `ISSUE_<#>_PLAN.md`.
- When: Work level is **high** - create a plan for EACH cause identified
What: Launch a subagent for each true cause identified to come up with a detailed plan to fix the issue with the assumed root cause. Create a document called `ISSUE_<#>_PLAN_<cause>.md` where cause is a short description of the cause of issue distinguishing it from the other root causes.
- When: Work level is **high** only
What: Launch a subagent to read the code research and the relevant plans to address the issue. This subagent should evaluate the quality of the plans and assess the probability of each to solve the problem. This subagent should create a document called `ISSUE_<#>_PLAN_ASSESSMENT.md`.
- When: Always (all levels)
What: Launch a subagent to assess bug importance. Create a document called `ISSUE_<#>_IMPORTANCE.md`. Assess:
- Severity (critical/high/medium/low): data loss/security > crash/hang > functional breakage > cosmetic/minor
- Blast radius: all users, specific configurations, edge cases only
- Workaround existence: none / painful / acceptable
- Regression status: new regression (which version) vs long-standing
- User impact signals: issue reactions, duplicate reports, support requests
- Recommendation: hotfix / next release / backlog / wontfix with rationale
Once all of the subagents are done - please write a new document called `ISSUE_<#>_SUMMARY.md`.
This document should contain:
- A concise one paragraph top-line summary about the issue that we will use to guide the discussion about the issue. Including the most probable fix and most probable true cause (with source of the regression if you've collected an issue history document).
- Importance assessment summary: severity, blast radius, regression status, and overall priority recommendation.
- Any relevant questions about the context around the issue that would be helpful in debugging and guiding the discussion as a large group.
- A complexity assessment (low/medium/high) for the fix, and how difficult it is to recreate and test the issue.
Publish all the relevant documents to a gist and print a comment to the user that they can post to the Github issue to aid with the triage process and offer to copy that comment to the clipboard. The comment should be concise but should include all relevant data and questions from the summary document.
Persona: You are a senior engineer responsible for maintaining the Galaxy project.
Arguments:
- $ARGUMENTS - Github issue number and optional work level (low/medium/high, default: medium)
Examples: "21474", "21474 low", "21474 high"
Parse the issue number and work level from $ARGUMENTS. Work levels control triage depth:
- **low**: Research and understand only, skip implementation planning
- **medium**: Research + single recommended approach with focused plan
- **high**: Research + multiple approaches + comprehensive plan
You will be supplied a Github issue number for a feature request to triage. A Galaxy developer will be assigned the issue but your job is to structure the conversation around the feature.
Fetch the issue using "gh issue view <number>" and write the issue contents to `FEATURE_<#>.md`.
Write all triage artifacts to the current working directory.
Launch subagents to perform actions to help in the triage process. Run independent tasks in parallel where possible (e.g., demand research and code research can run concurrently since they only need the original issue), but tasks that depend on artifacts from earlier subagents must wait (e.g., implementation planning needs to read the research documents first). As the agent responsible for the triage process - please read the artifacts generated from subagents and direct the process as it makes sense. Your job is to direct the process though - do not try to implement the feature or do research yourself.
When to launch subagents and what they should do are as follows:
- When: Always (all levels)
What: Launch a subagent to research user demand signals. This subagent should create a document called `FEATURE_<#>_DEMAND.md`. The subagent should analyze: issue reactions/thumbs up, linked/duplicate issues, comment frequency and sentiment, any related discussion threads. Quantify demand where possible.
- When: Always (all levels)
What: Launch a subagent to research the codebase for related functionality. This subagent should create a document called `FEATURE_<#>_CODE_RESEARCH.md`. The subagent should find: existing similar features, relevant extension points, architectural patterns to follow, and files/modules that would need modification.
- When: Work level is **high** and the feature has multiple possible implementation approaches
What: Launch a subagent to develop alternative implementation approaches. Create a document called `FEATURE_<#>_APPROACHES.md`. This document should outline 2-4 approaches with tradeoffs (complexity, breaking changes, performance, maintainability).
- When: Always (all levels)
What: Launch a subagent to assess importance. Create a document called `FEATURE_<#>_IMPORTANCE.md`. Assess:
- User demand (high/medium/low) based on reactions, comments, linked issues
- Strategic value (high/medium/low) - does it align with project direction, enable other features, improve UX significantly
- Complexity (low/medium/high) based on code research — do not estimate time or duration
- Risk assessment (breaking changes, migration needs, security considerations)
- Recommendation: prioritize now / backlog / defer / decline with rationale
- When: Work level is **medium** - create a focused implementation plan
What: Launch a subagent to read the research documents and create an implementation plan for the single recommended approach. Create a document called `FEATURE_<#>_PLAN.md`. Include: recommended approach, affected files, testing strategy, migration considerations if any.
- When: Work level is **high** - create a comprehensive implementation plan
What: Launch a subagent to read all research documents (including approaches) and create a detailed implementation plan. Create a document called `FEATURE_<#>_PLAN.md`. Include: recommended approach with rationale for choosing it over alternatives, affected files, testing strategy, migration considerations if any.
Once all of the subagents are done - please write a new document called `FEATURE_<#>_SUMMARY.md`.
This document should contain:
- A concise one paragraph top-line summary about the feature request and recommended approach.
- Importance assessment summary: demand level, strategic value, effort, and overall priority recommendation.
- Key questions for the group discussion that would help refine requirements or approach.
- Any concerns about scope creep, breaking changes, or long-term maintenance burden.
Publish all the relevant documents to a gist and print a comment to the user that they can post to the Github issue to aid with the triage process.
Persona: You are a senior engineer responsible for maintaining the Galaxy project.
Arguments:
- $ARGUMENTS - Github issue number and optional work level (low/medium/high, default: medium)
Examples: "21536", "21536 low", "21536 high"
Parse the issue number and work level from $ARGUMENTS.
Fetch the issue using "gh issue view <number>" and analyze its content to determine if it is:
- **Bug**: A report of something broken, not working as expected, an error, regression, or malfunction
- **Feature**: A request for new functionality, enhancement, improvement, or capability that doesn't exist
Classification signals:
- Bug indicators: "error", "crash", "broken", "doesn't work", "regression", "fails", "exception", stack traces, reproduction steps describing unexpected behavior
- Feature indicators: "would be nice", "please add", "feature request", "enhancement", "suggestion", "support for", "ability to", describing desired new behavior
Once classified, inform the user of your classification and reasoning in one sentence.
Then read the appropriate command file and execute its instructions:
- For bugs: Read `.claude/commands/triage-bug.md` and follow those instructions
- For features: Read `.claude/commands/triage-feature.md` and follow those instructions
Pass through the original issue number and work level to the triage workflow.
If the classification is ambiguous, ask the user which triage path to follow before proceeding.
Persona: You are the lead engineer running the weekly Galaxy triage meeting.
Arguments:
- $ARGUMENTS - Optional: `[project_number] [column_name] [work_level]`
Defaults: project 26, column "Triage/Discuss", work level "medium"
Examples: "" (all defaults), "26", "26 Triage/Discuss low", "26 Triage/Discuss high"
Parse arguments from $ARGUMENTS. If fewer than 3 are provided, use defaults for the missing ones. Work levels: low (research only), medium (research + single plan), high (research + all theories + assessment).
## Step 1: Fetch project board issues
Run `gh project item-list <project_number> --owner galaxyproject --format json --limit 500` to get all items.
Filter the JSON results to only items where `status` matches the target column name. Extract issue numbers and titles. Print the count of issues found in the column.
## Step 2: Filter to uncommented issues
For each issue, run `gh issue view <number> --repo galaxyproject/galaxy --json comments,assignees` to check the comment count and assignees. Skip an issue if:
- It has 1+ comments (already being discussed)
- It has any assignees (already claimed by someone)
Print how many issues need triage out of the total, noting how many were skipped for each reason. If zero issues remain, print a summary and stop.
## Step 3: Launch triage agents in parallel batches
Process uncommented issues in batches of up to 5 parallel Task agents. Each agent receives:
- The issue number and work level
- Instructions to read `.claude/commands/triage-issue.md` and follow its full workflow (which classifies the issue and dispatches to triage-bug.md or triage-feature.md)
- The triage-issue skill already handles: classification, launching research/planning subagents, writing artifact files, creating a gist, and drafting a comment
**Important override for batch mode:** Tell each agent to NOT ask the user for classification confirmation if ambiguous — default to bug. Also tell each agent to post the triage comment directly to the issue using `gh issue comment NUM --repo galaxyproject/galaxy` instead of printing it for the user to copy. The comment should include the gist link.
## Step 4: Collect results and print summary
As each agent completes, collect its findings. Print a final markdown summary table:
| Issue | Title | Type | Recommendation | Complexity | Gist |
| ----- | ----- | ---- | -------------- | ---------- | ---- |
Include a row for each triaged issue. For skipped issues (already had comments or assignees), note them below the table.
Print total counts: issues in column, skipped (had comments), skipped (assigned), triaged, failed.
[run]
branch = True
include = lib/galaxy/*
disable_warnings = include-ignored
.k8s_ci.Dockerfile
.venv
database
**/node_modules
[flake8]
# These are exceptions allowed by Galaxy style guidelines:
# B008 Do not perform function calls in argument defaults (for FastAPI Depends and Body)
# B042 Seems to unnecessarily complicate exception classes
# E203 is whitespace before ':'; we follow black's formatting here. See https://black.readthedocs.io/en/stable/faq.html#why-are-flake8-s-e203-and-w503-violated
# E402 module level import not at top of file # TODO, we would like to improve this.
# E501 is line length (delegated to black)
# E701,E704 are multiple statements on one line; we follow black's formatting here. See https://black.readthedocs.io/en/stable/guides/using_black_with_other_tools.html#configuration
# W503 is line breaks before binary operators, which has been reversed in PEP 8.
# D** are docstring linting - which we mostly ignore except D302. (Hopefully we will solve more over time).
ignore = B008,B042,E203,E402,E501,E701,E704,W503,D100,D101,D102,D103,D104,D105,D106,D107,D200,D201,D202,D204,D205,D206,D207,D208,D209,D210,D211,D300,D301,D400,D401,D402,D403,D412,D413
exclude = lib/tool_shed/test/test_data/repos
# Migrate code style to Black and Apply isort
623a5e165ad79e15321b19b2a2afbf2f9224cc90
# Application of black styling to /tools/
03351d2a9f81b938d650e2a955c65c18526ee7f0
# Migrate code style to Prettier
5b2928f851bd5ea3b9c2a04abf2cee9ff0bc54cc
87873c5e2f4e6b97fe0f2084bfca0295fcd471de
# vue/recommended standardization (attribute and property order)
3bea88e761473b5d67d215d2faf732c86abd2091
44ac26da4c53dedad3499cfef6d9d4cb61364a05
* text=auto
lib/galaxy/datatypes/test/dosimzml eol=crlf
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: ''
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**Galaxy Version and/or server at which you observed the bug**
Galaxy Version: (check <galaxy_url>/api/version if you don't know)
Commit: (run `git rev-parse HEAD` if you run this Galaxy server)
**Browser and Operating System**
Operating System: Windows, Linux, macOS
Browser: Firefox, Chrome, Chrome-based, Safari
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Additional context**
Add any other context about the problem here.
comment: false
paths-ignore:
- 'lib/galaxy/datatypes/test/*.bcsl.ts'
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
version: 2
updates:
# Enable version updates for GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
---
area/admin:
- changed-files:
- any-glob-to-any-file:
- client/src/components/admin/**/*
- doc/source/admin/**/*
area/API:
- changed-files:
- any-glob-to-any-file:
- lib/galaxy/webapps/galaxy/api/**/*
area/auth:
- changed-files:
- any-glob-to-any-file:
- lib/galaxy/auth/**/*
- lib/galaxy/authnz/**/*
area/client:
- changed-files:
- any-glob-to-any-file:
- client/*
area/database:
- changed-files:
- any-glob-to-any-file:
- lib/galaxy/model/**/*
area/datatypes:
- changed-files:
- any-glob-to-any-file:
- lib/galaxy/datatypes/**/*
- lib/galaxy/config/sample/datatypes_conf.xml.sample
area/dependencies:
- changed-files:
- any-glob-to-any-file:
- lib/galaxy/dependencies/**/*
area/documentation:
- changed-files:
- any-glob-to-any-file:
- doc/**/*
area/jobs:
- changed-files:
- any-glob-to-any-file:
- lib/galaxy/jobs/**/*
area/libraries:
- changed-files:
- any-glob-to-any-file:
- client/src/components/LibraryFolder/**/*
- lib/galaxy/webapps/galaxy/api/libraries.py
area/objectstore:
- changed-files:
- any-glob-to-any-file:
- lib/galaxy/objectstore/**/*
area/packaging:
- changed-files:
- any-glob-to-any-file:
- packages/**/*
area/reports:
- changed-files:
- any-glob-to-any-file:
- client/src/reports/**/*
- lib/galaxy/webapps/reports/**/*
- templates/webapps/reports/**/*
area/scripts:
- changed-files:
- any-glob-to-any-file:
- scripts/**/*
area/security:
- changed-files:
- any-glob-to-any-file:
- lib/galaxy/security/**/*
area/testing:
- changed-files:
- any-glob-to-any-file:
- lib/galaxy_test/**/*
- run_tests.sh
- test/**/*
- test-data/**/*
area/testing/api:
- changed-files:
- any-glob-to-any-file:
- lib/galaxy_test/api/**/*
area/testing/integration:
- changed-files:
- any-glob-to-any-file:
- test/integration/**/*
- test/integration_selenium/**/*
area/testing/selenium:
- changed-files:
- any-glob-to-any-file:
- lib/galaxy/selenium/**/*
- lib/galaxy_test/selenium/**/*
area/tool-dependencies:
- changed-files:
- any-glob-to-any-file:
- lib/galaxy/tool_util/deps/**/*
area/tool-framework:
- changed-files:
- any-glob-to-any-file:
- lib/galaxy/tools/**/*
- lib/galaxy/tool_util/**/*
- lib/galaxy/webapps/galaxy/api/tools.py
area/tools:
- changed-files:
- any-glob-to-any-file:
- tools/**/*
- tool-data/**/*
area/toolshed:
- changed-files:
- any-glob-to-any-file:
- client/src/toolshed/**/*
- lib/galaxy/webapps/galaxy/api/toolshed.py
- lib/toolshed/**/*
- templates/webapps/tool_shed/**/*
area/UI-UX:
- all:
- changed-files:
- any-glob-to-any-file:
- client/src/**/*
- templates/**/*
- all-globs-to-all-files:
- '!client/src/api/schema/schema.ts'
area/util:
- changed-files:
- any-glob-to-any-file:
- lib/galaxy/util/**/*
area/visualizations:
- changed-files:
- any-glob-to-any-file:
- config/plugins/visualizations/**/*
area/workflows:
- changed-files:
- any-glob-to-any-file:
- lib/galaxy/workflow/**/*
(Please replace this header with a description of your pull request. Please include *BOTH* what you did and why you made the changes. The "why" may simply be citing a relevant Galaxy issue.)
(If fixing a bug, please add any relevant error or traceback)
(For UI components, it is recommended to include screenshots or screencasts)
## How to test the changes?
(Select all options that apply)
- [ ] I've included appropriate [automated tests](https://docs.galaxyproject.org/en/latest/dev/writing_tests.html).
- [ ] This is a refactoring of components with existing test coverage.
- [ ] Instructions for manual testing are as follows:
1. [add testing steps and prerequisites here if you didn't write automated tests covering all your changes]
## License
- [x] I agree to license these and all my past contributions to the core galaxy codebase under the [MIT license](https://opensource.org/licenses/MIT).
changelog:
exclude:
labels:
- merge
- procedures
- planning
- status/needs feedback
- status/planning
authors:
- dependabot[bot]
- github-actions[bot]
- galaxybot
categories:
- title: 🎉 Major Changes
labels:
- highlight
- highlight/admin
- highlight/dev
- highlight/power-user
- title: 🚀 New Features
labels:
- kind/feature
exclude:
labels:
- area/documentation
- title: 🔧 Enhancements
labels:
- kind/enhancement
- title: 🐛 Bug Fixes
labels:
- kind/bug
- title: '🔒 Security Updates'
labels:
- 'area/security'
- title: ' Performance'
labels:
- 'area/performance'
- title: '🧰 Refactoring'
labels:
- 'kind/refactoring'
- title: '📚 Documentation'
labels:
- 'area/documentation'
- title: '🔧 Admin & Configuration'
labels:
- 'area/admin'
- 'area/configuration'
- title: '🛠️ API Changes'
labels:
- 'area/API'
- title: '🧪 Testing'
labels:
- 'area/testing'
- 'area/testing/api'
- 'area/testing/integration'
- 'area/testing/selenium'
- title: '🏗️ Infrastructure & Dependencies'
labels:
- 'area/dependencies'
- 'area/client-build'
- title: Other Changes
labels:
- "*"
name: API tests
on:
push:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
schedule:
# Run at midnight UTC every Tuesday
- cron: '0 0 * * 2'
env:
GALAXY_DEPENDENCIES_INSTALL_WEASYPRINT: '1'
GALAXY_TEST_DBURI: 'postgresql://postgres:postgres@localhost:5432/galaxy?client_encoding=utf8'
GALAXY_TEST_RAISE_EXCEPTION_ON_HISTORYLESS_HDA: '1'
GALAXY_CONFIG_SQLALCHEMY_WARN_20: '1'
GALAXY_TEST_REQUIRE_ALL_NEEDED_TOOLS: '1'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.10']
chunk: [0, 1]
services:
postgres:
image: postgres:17
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
steps:
- if: github.event_name == 'schedule'
run: |
echo "GALAXY_CONFIG_OVERRIDE_METADATA_STRATEGY=extended" >> $GITHUB_ENV
echo "GALAXY_CONFIG_OVERRIDE_OUTPUTS_TO_WORKING_DIRECTORY=true" >> $GITHUB_ENV
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c 'import sys; print("-".join(str(v) for v in sys.version_info))')" >> $GITHUB_OUTPUT
- name: Cache galaxy venv
uses: actions/cache@v5
with:
path: 'galaxy root/.venv'
key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-api
- name: Run tests
run: ./run_tests.sh --coverage --skip_flakey_fails -api lib/galaxy_test/api -- --num-shards=2 --shard-id=${{ matrix.chunk }}
working-directory: 'galaxy root'
- uses: codecov/codecov-action@v5
with:
flags: api
working-directory: 'galaxy root'
- uses: actions/upload-artifact@v7
if: failure()
with:
name: API test results (${{ matrix.python-version }}, ${{ matrix.chunk }})
path: 'galaxy root/run_api_tests.html'
name: BioBlend Tests
on:
pull_request:
paths:
- .github/workflows/bioblend.yaml
- lib/galaxy/schema/**
- lib/galaxy/webapps/galaxy/api/**
- lib/galaxy/webapps/galaxy/services/**
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
runs-on: ubuntu-latest
services:
postgres:
image: postgres:17
# Provide the password for postgres
env:
POSTGRES_PASSWORD: postgres
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
strategy:
fail-fast: false
matrix:
tox_env: [py314]
galaxy_python_version: ['3.10']
steps:
- name: Checkout Galaxy
uses: actions/checkout@v6
with:
fetch-depth: 1
path: galaxy
persist-credentials: false
- name: Checkout Bioblend
uses: actions/checkout@v6
with:
repository: galaxyproject/bioblend
path: bioblend
persist-credentials: false
- name: Calculate Python version for BioBlend from tox_env
id: get_bioblend_python_version
run: echo "bioblend_python_version=$(echo "${{ matrix.tox_env }}" | sed -e 's/^py\([3-9]\)\([0-9]\+\)/\1.\2/')" >> $GITHUB_OUTPUT
- name: Set up Python for BioBlend
uses: actions/setup-python@v6
with:
python-version: ${{ steps.get_bioblend_python_version.outputs.bioblend_python_version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Install tox
run: uv tool install tox --with tox-uv
- name: Set up Python for Galaxy
uses: actions/setup-python@v6
with:
python-version: ${{ matrix.galaxy_python_version }}
- name: Run tests
env:
PGPASSWORD: postgres
PGPORT: 5432
PGHOST: localhost
run: |
# Create a PostgreSQL database for Galaxy. The default SQLite3 database makes test fail randomly because of "database locked" error.
createdb -U postgres galaxy
export DATABASE_CONNECTION=postgresql://postgres:@localhost/galaxy
./bioblend/run_bioblend_tests.sh -g galaxy -v python${{ matrix.galaxy_python_version }} -e ${{ matrix.tox_env }}
- name: The job has failed
if: ${{ failure() }}
run: |
cat galaxy/*.log
name: Build client for selenium tests
on:
workflow_call:
outputs:
commit-id:
description: Commit ID
value: ${{ jobs.build-client.outputs.commit-id }}
jobs:
build-client:
runs-on: ubuntu-latest
outputs:
commit-id: ${{ steps.client-commit.outputs.commit }}
steps:
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- name: Read Node.js version
id: node-version
run: echo "version=$(cat 'galaxy root/client/.node_version')" >> $GITHUB_OUTPUT
- uses: actions/setup-node@v6
with:
node-version: ${{ steps.node-version.outputs.version }}
- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
package_json_file: 'galaxy root/package.json'
- name: get client commit
id: client-commit
shell: bash
run: echo "commit=$(git rev-parse HEAD 2>/dev/null)" >> $GITHUB_OUTPUT
working-directory: 'galaxy root'
- name: cache client build
uses: actions/cache@v5
id: cache
with:
key: galaxy-static-${{ steps.client-commit.outputs.commit }}
path: 'galaxy root/static'
- name: Build client
if: steps.cache.outputs.cache-hit != 'true'
env:
GALAXY_PLUGIN_BUILD_FAIL_ON_ERROR: 1
NODE_OPTIONS: --max-old-space-size=4096
run: make client
working-directory: 'galaxy root'
This diff is collapsed.
name: Check test class names
on:
pull_request:
paths:
- '.ci/check_test_class_names.sh'
- 'lib/galaxy_test/**'
- 'test/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.10']
steps:
- uses: actions/checkout@v6
with:
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Install Python dependencies
run: uv pip install --system -r requirements.txt -r lib/galaxy/dependencies/pinned-test-requirements.txt
- name: Run tests
run: .ci/check_test_class_names.sh
name: Client Unit Testing
on:
push:
paths:
- 'client/**'
- '.github/workflows/client-unit.yaml'
pull_request:
paths:
- 'client/**'
- '.github/workflows/client-unit.yaml'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
client-unit-test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
with:
persist-credentials: false
- name: Read Node.js version
id: node-version
run: echo "version=$(cat 'client/.node_version')" >> $GITHUB_OUTPUT
- name: Setup node
uses: actions/setup-node@v6
with:
node-version: ${{ steps.node-version.outputs.version }}
- name: Setup pnpm
uses: pnpm/action-setup@v4
- run: pnpm install --frozen-lockfile
working-directory: client
- name: Pre-build (icons and plugins)
run: node scripts/build.mjs
working-directory: client
- name: Run Vitest Unit Tests
run: pnpm test
working-directory: client
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [ dev ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ dev ]
schedule:
- cron: '16 6 * * 0'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: ['javascript-typescript', 'python']
# CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v6
with:
persist-credentials: false
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v4
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
config-file: ./.github/codeql/codeql-config.yml
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v4
# ℹ️ Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines
# modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v4
name: Converter tests
on:
push:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'packages/**'
schedule:
# Run at midnight UTC every Tuesday
- cron: '0 0 * * 2'
env:
GALAXY_TEST_RAISE_EXCEPTION_ON_HISTORYLESS_HDA: '1'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.10']
steps:
- if: github.event_name == 'schedule'
run: |
echo "GALAXY_CONFIG_OVERRIDE_METADATA_STRATEGY=extended" >> $GITHUB_ENV
echo "GALAXY_CONFIG_OVERRIDE_OUTPUTS_TO_WORKING_DIRECTORY=true" >> $GITHUB_ENV
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- name: Clone galaxyproject/galaxy-test-data
uses: actions/checkout@v6
with:
repository: galaxyproject/galaxy-test-data
path: galaxy-test-data
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Move test data
run: rsync -av --remove-source-files --exclude .git galaxy-test-data/ 'galaxy root/test-data/'
- name: Install planemo
run: uv tool install planemo
- name: Determine converters to check
run: |
ls 'galaxy root'/lib/galaxy/datatypes/converters/*xml | grep -v -f 'galaxy root'/lib/galaxy/datatypes/converters/.tt_skip > tool_list.txt
echo "Skipping checks for the following converters:"
ls 'galaxy root'/lib/galaxy/datatypes/converters/*xml | grep -f 'galaxy root'/lib/galaxy/datatypes/converters/.tt_skip
echo "Checking only the following converters:"
cat tool_list.txt
- name: Lint converters
run: |
mapfile -t TOOL_ARRAY < tool_list.txt
planemo lint --skip CitationsMissing,HelpEmpty,HelpMissing --report_level warn "${TOOL_ARRAY[@]}"
- name: Run tests
run: |
mapfile -t TOOL_ARRAY < tool_list.txt
planemo test --biocontainers --galaxy_python_version ${{ matrix.python-version }} --galaxy_root 'galaxy root' "${TOOL_ARRAY[@]}"
- uses: actions/upload-artifact@v7
if: failure()
with:
name: Converter test results (${{ matrix.python-version }})
path: tool_test_output.html
name: CWL conformance
on:
push:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
env:
GALAXY_TEST_DBURI: 'postgresql://postgres:postgres@localhost:5432/galaxy?client_encoding=utf8'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test
if: ${{ false }}
runs-on: ubuntu-latest
continue-on-error: ${{ startsWith(matrix.marker, 'red') }}
strategy:
fail-fast: false
matrix:
python-version: ['3.10']
marker: ['green', 'red and required', 'red and not required']
conformance-version: ['cwl_conformance_v1_0'] #, 'cwl_conformance_v1_1', 'cwl_conformance_v1_2']
services:
postgres:
image: postgres:17
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
steps:
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c 'import sys; print("-".join(str(v) for v in sys.version_info))')" >> $GITHUB_OUTPUT
- name: Cache galaxy venv
uses: actions/cache@v5
with:
path: 'galaxy root/.venv'
key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-api
- name: Run tests
run: ./run_tests.sh --coverage --skip_flakey_fails -cwl lib/galaxy_test/api/cwl -- -m "${{ matrix.marker }} and ${{ matrix.conformance-version }}"
working-directory: 'galaxy root'
- uses: codecov/codecov-action@v5
with:
flags: cwl-conformance
working-directory: 'galaxy root'
- uses: actions/upload-artifact@v7
if: failure()
with:
name: CWL conformance test results (${{ matrix.python-version }}, ${{ matrix.marker }}, ${{ matrix.conformance-version }})
path: 'galaxy root/run_cwl_tests.html'
name: Database indexes
on:
push:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
working-directory: 'galaxy root'
jobs:
check:
name: Check database indexes
runs-on: ubuntu-latest
strategy:
matrix:
db: ['postgresql', 'sqlite']
postgresql-version: ['17']
python-version: ['3.10']
include:
- db: postgresql
postgresql-version: '9.6'
python-version: '3.10'
services:
postgres:
image: postgres:${{ matrix.postgresql-version }}
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
steps:
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c 'import sys; print("-".join(str(v) for v in sys.version_info))')" >> $GITHUB_OUTPUT
- name: Install tox
run: uv tool install tox --with tox-uv
- name: Set database connection on PostgreSQL
if: matrix.db == 'postgresql'
run: echo 'GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION=postgresql://postgres:postgres@localhost:5432/galaxy?client_encoding=utf8' >> $GITHUB_ENV
- name: Check indexes
run: tox -e check_indexes
name: Update dependencies
on:
schedule:
- cron: '0 3 * * 6' # Run every saturday at 3 am.
workflow_dispatch:
jobs:
update_dependencies:
name: Update dependencies
if: github.repository_owner == 'galaxyproject'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
with:
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: '3.10'
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Update dependencies
run: make update-dependencies
- name: Create pull request
uses: peter-evans/create-pull-request@v8
with:
author: galaxybot <galaxybot@users.noreply.github.com>
token: ${{ secrets.GALAXYBOT_PAT }}
commit-message: |
Update Python dependencies
by running `make update-dependencies`.
branch: dev_auto_update_dependencies
delete-branch: true
push-to-fork: galaxybot/galaxy
title: Update Python dependencies
body: by running `make update-dependencies`.
labels: |
area/dependencies
kind/enhancement
name: Deployment Tests
on:
workflow_dispatch:
inputs:
target:
description: 'Galaxy Deployment to target'
required: true
default: 'usegalaxymain'
type: choice
options:
- usegalaxytest
- usegalaxymain
- usegalaxyeu
type:
description: 'Test type'
required: true
default: 'all'
type: choice
options:
- all
- api
- selenium
debug:
required: true
description: 'Run deployment tests with debug mode on'
type: boolean
jobs:
testdeployment:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.10']
steps:
- uses: actions/checkout@v6
with:
fetch-depth: 0
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- uses: nanasess/setup-chromedriver@v2
- name: Run tests
run: bash ./test/deployment/usegalaxystar.bash
env:
GALAXY_TEST_DEPLOYMENT_TARGET: ${{ inputs.target }}
GALAXY_TEST_DEPLOYMENT_DEBUG: ${{ inputs.debug }}
GALAXY_TEST_DEPLOYMENT_TEST_TYPE: ${{ inputs.type }}
GALAXY_TEST_USEGALAXYMAIN_USER_EMAIL: "jmchilton+test@gmail.com"
GALAXY_TEST_USEGALAXYMAIN_USER_PASSWORD: ${{ secrets.USEGALAXYMAIN_USER_PASSWORD }}
GALAXY_TEST_USEGALAXYMAIN_USER_KEY: ${{ secrets.USEGALAXYMAIN_USER_KEY }}
GALAXY_TEST_USEGALAXYTEST_USER_EMAIL: "jmchilton+test@gmail.com"
GALAXY_TEST_USEGALAXYTEST_USER_PASSWORD: ${{ secrets.USEGALAXYTEST_USER_PASSWORD }}
GALAXY_TEST_USEGALAXYTEST_USER_KEY: ${{ secrets.USEGALAXYTEST_USER_KEY }}
GALAXY_TEST_USEGALAXYEU_USER_EMAIL: "jmchilton+test@gmail.com"
GALAXY_TEST_USEGALAXYEU_USER_PASSWORD: ${{ secrets.USEGALAXYEU_USER_PASSWORD }}
GALAXY_TEST_USEGALAXYEU_USER_KEY: ${{ secrets.USEGALAXYEU_USER_KEY }}
GALAXY_TEST_TIMEOUT_MULTIPLIER: 10
- uses: actions/upload-artifact@v7
if: always()
with:
name: Deployment test results (${{ inputs.target }}, ${{ inputs.type }}, ${{ inputs.debug }}, ${{ matrix.python-version }})
path: 'deployment_tests.html'
name: Build docs
on:
push:
paths-ignore:
- 'client/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'client/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.10']
steps:
- name: Get target branch name (push)
if: github.event_name == 'push'
run: echo "TARGET_BRANCH=${GITHUB_REF##*/}" >> $GITHUB_ENV
- name: Get target branch name (pull request)
if: github.event_name == 'pull_request'
run: echo "TARGET_BRANCH=$GITHUB_BASE_REF" >> $GITHUB_ENV
- name: Show target branch name
run: echo $TARGET_BRANCH
- uses: actions/checkout@v6
with:
fetch-depth: 0
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Install Python dependencies
run: uv pip install --system -r requirements.txt -r lib/galaxy/dependencies/dev-requirements.txt sphinxcontrib-simpleversioning
- name: Add Google Analytics to doc/source/conf.py
run: |
sed -i -e "/html_theme_options = {/a\
\ 'analytics_id': 'UA-45719423-17'," -e "s#https://docs.galaxyproject.org/en/[^/]*/#https://docs.galaxyproject.org/en/$TARGET_BRANCH/#" doc/source/conf.py
- name: Checkout the latest doc/source/conf.versioning.py
if: github.event_name != 'push' || github.ref != 'refs/heads/dev'
run: |
# We cannot just download the latest version from dev, because it may be newer in this branch/PR
git fetch origin dev:dev
if [ ! -f doc/source/conf.versioning.py ] || [ "$(git log -1 --pretty="format:%ct" dev -- doc/source/conf.versioning.py)" -gt "$(git log -1 --pretty="format:%ct" -- doc/source/conf.versioning.py)" ]; then
git checkout dev -- doc/source/conf.versioning.py
fi
- name: Append doc/source/conf.versioning.py
run: cat doc/source/conf.versioning.py >> doc/source/conf.py
- name: Build docs
run: make docs
- name: Deploy docs
if: github.event_name == 'push' && github.repository_owner == 'galaxyproject'
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
run: |
case "$TARGET_BRANCH" in
release_[[:digit:]][[:digit:]].[[:digit:]][[:digit:]] | release_[[:digit:]][[:digit:]].[[:digit:]] | master)
UPLOAD_DIR=$TARGET_BRANCH
;;
dev)
UPLOAD_DIR=latest
;;
*)
echo "Not deploying documentation for branch $TARGET_BRANCH"
exit 0
;;
esac
uv tool install awscli
aws s3 sync doc/build/html/ "s3://galaxy-docs/en/$UPLOAD_DIR" --region us-east-2 --size-only --delete
name: first startup
on:
push:
paths-ignore:
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
env:
YARN_INSTALL_OPTS: --frozen-lockfile
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build-client:
uses: ./.github/workflows/build_client.yaml
test:
name: Startup test
needs: build-client
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.10', '3.14']
defaults:
run:
shell: bash -l {0}
steps:
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c 'import sys; print("-".join(str(v) for v in sys.version_info))')" >> $GITHUB_OUTPUT
- name: Restore client cache
uses: actions/cache@v5
with:
fail-on-cache-miss: true
key: galaxy-static-${{ needs.build-client.outputs.commit-id }}
path: 'galaxy root/static'
- name: Install tox
run: uv tool install tox --with tox-uv
- name: run tests
run: tox -e first_startup
working-directory: 'galaxy root'
name: Tool framework tests
on:
push:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
schedule:
# Run at midnight UTC every Tuesday
- cron: '0 0 * * 2'
env:
GALAXY_TEST_DBURI: 'postgresql://postgres:postgres@localhost:5432/galaxy?client_encoding=utf8'
GALAXY_TEST_RAISE_EXCEPTION_ON_HISTORYLESS_HDA: '1'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.10']
use-legacy-api: ['if_needed', 'always']
services:
postgres:
image: postgres:17
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
steps:
- if: github.event_name == 'schedule'
run: |
echo "GALAXY_CONFIG_OVERRIDE_METADATA_STRATEGY=extended" >> $GITHUB_ENV
echo "GALAXY_CONFIG_OVERRIDE_OUTPUTS_TO_WORKING_DIRECTORY=true" >> $GITHUB_ENV
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c 'import sys; print("-".join(str(v) for v in sys.version_info))')" >> $GITHUB_OUTPUT
- name: Cache galaxy venv
uses: actions/cache@v5
with:
path: 'galaxy root/.venv'
key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-framework
- name: Run tests
run: GALAXY_TEST_USE_LEGACY_TOOL_API="${{ matrix.use-legacy-api }}" ./run_tests.sh --coverage --framework-tools
working-directory: 'galaxy root'
- uses: codecov/codecov-action@v5
with:
flags: framework
working-directory: 'galaxy root'
- uses: actions/upload-artifact@v7
if: failure()
with:
name: Tool framework test results (${{ matrix.python-version }})
path: 'galaxy root/run_framework_tests.html'
name: Workflow framework tests
on:
push:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
schedule:
# Run at midnight UTC every Tuesday
- cron: '0 0 * * 2'
env:
GALAXY_TEST_DBURI: 'postgresql://postgres:postgres@localhost:5432/galaxy?client_encoding=utf8'
GALAXY_TEST_RAISE_EXCEPTION_ON_HISTORYLESS_HDA: '1'
GALAXY_TEST_WORKFLOW_AFTER_RERUN: '1'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.10']
services:
postgres:
image: postgres:17
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
steps:
- if: github.event_name == 'schedule'
run: |
echo "GALAXY_CONFIG_OVERRIDE_METADATA_STRATEGY=extended" >> $GITHUB_ENV
echo "GALAXY_CONFIG_OVERRIDE_OUTPUTS_TO_WORKING_DIRECTORY=true" >> $GITHUB_ENV
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c 'import sys; print("-".join(str(v) for v in sys.version_info))')" >> $GITHUB_OUTPUT
- name: Cache galaxy venv
uses: actions/cache@v5
with:
path: 'galaxy root/.venv'
key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-framework
- name: Run tests
run: ./run_tests.sh --coverage --framework-workflows
working-directory: 'galaxy root'
- uses: codecov/codecov-action@v5
with:
flags: framework-workflows
working-directory: 'galaxy root'
- uses: actions/upload-artifact@v7
if: failure()
with:
name: Workflow framework test results (${{ matrix.python-version }})
path: 'galaxy root/run_framework_workflows_tests.html'
name: Integration
on:
push:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
schedule:
# Run at midnight UTC every Tuesday
- cron: '0 0 * * 2'
env:
GALAXY_TEST_RAISE_EXCEPTION_ON_HISTORYLESS_HDA: '1'
GALAXY_CONFIG_SQLALCHEMY_WARN_20: '1'
GALAXY_DEPENDENCIES_INSTALL_WEASYPRINT: '1'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.10']
chunk: ['0', '1', '2', '3']
steps:
- if: github.event_name == 'schedule'
run: |
echo "GALAXY_CONFIG_OVERRIDE_METADATA_STRATEGY=extended" >> $GITHUB_ENV
# Skip outputs_to_working_directory: true in integration tests, doesn't work with pulsar
# echo "GALAXY_CONFIG_OVERRIDE_OUTPUTS_TO_WORKING_DIRECTORY=true" >> $GITHUB_ENV
- name: Prune unused docker image, volumes and containers
run: docker system prune -a -f
- name: Clean dotnet folder for space
run: rm -Rf /usr/share/dotnet
- name: Install packages
# ffmpeg: ffprobe needed by media datatypes
run: sudo apt-get update && sudo apt-get -y install ffmpeg
- name: Setup Minikube
uses: medyagh/setup-minikube@latest
with:
driver: none
kubernetes-version: '1.28.0'
- name: Check pods
run: kubectl get pods -A
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c 'import sys; print("-".join(str(v) for v in sys.version_info))')" >> $GITHUB_OUTPUT
- name: Cache galaxy venv
uses: actions/cache@v5
with:
path: 'galaxy root/.venv'
key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-integration
- name: Install Apptainer's singularity
uses: eWaterCycle/setup-apptainer@v2
- name: Run tests
run: |
. .ci/minikube-test-setup/start_services.sh
./run_tests.sh --coverage -integration test/integration -- --num-shards=4 --shard-id=${{ matrix.chunk }}
working-directory: 'galaxy root'
- uses: codecov/codecov-action@v5
with:
flags: integration
working-directory: 'galaxy root'
- uses: actions/upload-artifact@v7
if: failure()
with:
name: Integration test results (${{ matrix.python-version }}, ${{ matrix.chunk }})
path: 'galaxy root/run_integration_tests.html'
name: Integration Selenium
on:
push:
paths-ignore:
- 'doc/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'doc/**'
- 'packages/**'
schedule:
# Run at midnight UTC every Tuesday
- cron: '0 0 * * 2'
env:
GALAXY_SKIP_CLIENT_BUILD: '0'
GALAXY_TEST_DBURI: 'postgresql://postgres:postgres@localhost:5432/galaxy?client_encoding=utf8'
GALAXY_TEST_RAISE_EXCEPTION_ON_HISTORYLESS_HDA: '1'
GALAXY_TEST_SELENIUM_RETRIES: 1
YARN_INSTALL_OPTS: --frozen-lockfile
GALAXY_CONFIG_SQLALCHEMY_WARN_20: '1'
GALAXY_DEPENDENCIES_INSTALL_WEASYPRINT: '1'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
setup-selenium:
uses: ./.github/workflows/setup_selenium.yaml
build-client:
uses: ./.github/workflows/build_client.yaml
test:
name: Test
needs: [setup-selenium, build-client]
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.10']
services:
postgres:
image: postgres:17
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
steps:
- if: github.event_name == 'schedule'
run: |
echo "GALAXY_CONFIG_OVERRIDE_METADATA_STRATEGY=extended" >> $GITHUB_ENV
echo "GALAXY_CONFIG_OVERRIDE_OUTPUTS_TO_WORKING_DIRECTORY=true" >> $GITHUB_ENV
- name: Prune unused docker image, volumes and containers
run: docker system prune -a -f
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c 'import sys; print("-".join(str(v) for v in sys.version_info))')" >> $GITHUB_OUTPUT
- name: Cache galaxy venv
uses: actions/cache@v5
with:
path: 'galaxy root/.venv'
key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-integration-selenium
- name: Restore client cache
uses: actions/cache@v5
with:
fail-on-cache-miss: true
key: galaxy-static-${{ needs.build-client.outputs.commit-id }}
path: 'galaxy root/static'
- name: Run tests
run: ./run_tests.sh --coverage -integration test/integration_selenium
working-directory: 'galaxy root'
- uses: codecov/codecov-action@v5
with:
flags: integration-selenium
working-directory: 'galaxy root'
- uses: actions/upload-artifact@v7
if: failure()
with:
name: Integration Selenium test results (${{ matrix.python-version }})
path: 'galaxy root/run_integration_tests.html'
- uses: actions/upload-artifact@v7
if: failure()
with:
name: Integration Selenium debug info (${{ matrix.python-version }})
path: 'galaxy root/database/test_errors'
name: Client linting
on:
push:
paths:
- 'client/**'
- '.github/workflows/js_lint.yaml'
pull_request:
paths:
- 'client/**'
- '.github/workflows/js_lint.yaml'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
client-unit-test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
with:
persist-credentials: false
- name: Read Node.js version
id: node-version
run: echo "version=$(cat 'client/.node_version')" >> $GITHUB_OUTPUT
- name: Setup node
uses: actions/setup-node@v6
with:
node-version: ${{ steps.node-version.outputs.version }}
- name: Setup pnpm
uses: pnpm/action-setup@v4
- run: pnpm install --frozen-lockfile
working-directory: client
- name: Run ESLint
run: pnpm run eslint
working-directory: client
- name: Run prettier checks
run: pnpm run format-check
working-directory: client
- name: Run vue-tsc
working-directory: client
run: pnpm type-check
name: Labels Verifier
on:
pull_request_target:
types: [closed]
jobs:
onMerged:
name: "Check Labels on merge"
permissions:
pull-requests: write
runs-on: ubuntu-latest
steps:
- name: Check Labels on merge
if: |
github.event.pull_request.merged == true &&
! contains(join(github.event.pull_request.labels.*.name, ', '), 'kind/') &&
! contains(github.event.pull_request.labels.*.name, 'merge') &&
! contains(github.event.pull_request.labels.*.name, 'minor')
uses: actions/github-script@v8
with:
script: |
github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
body: 'This PR was merged without a "kind/" label, please correct.',
})
name: Python linting
on:
push:
paths:
- '**.py'
- '.github/workflows/lint.yaml'
- .flake8
- .isort.cfg
- lib/galaxy/dependencies/**
- mypy.ini
- pyproject.toml
- tox.ini
pull_request:
paths:
- '**.py'
- '.github/workflows/lint.yaml'
- .flake8
- .isort.cfg
- lib/galaxy/dependencies/**
- mypy.ini
- pyproject.toml
- tox.ini
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.10', '3.14']
env:
LINT_PATH: 'lib/galaxy/dependencies/pinned-lint-requirements.txt'
TYPE_PATH: 'lib/galaxy/dependencies/pinned-typecheck-requirements.txt'
CORE_PATH: 'lib/galaxy/dependencies/pinned-requirements.txt'
steps:
- uses: actions/checkout@v6
with:
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c 'import sys; print("-".join(str(v) for v in sys.version_info))')" >> $GITHUB_OUTPUT
- name: Install tox
run: uv tool install tox --with tox-uv
- name: Run linting
run: tox -e lint
- name: Run docstring linting
run: tox -e lint_docstring_include_list
- name: Run mypy checks
run: tox -e mypy
- name: Run format checks
run: tox -e format
name: OpenAPI linting
on:
push:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
validate-schema:
name: Validate OpenAPI schema
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.10', '3.14']
steps:
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- name: Read Node.js version
id: node-version
run: echo "version=$(cat 'galaxy root/client/.node_version')" >> $GITHUB_OUTPUT
- uses: actions/setup-node@v6
with:
node-version: ${{ steps.node-version.outputs.version }}
- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
package_json_file: 'galaxy root/package.json'
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c 'import sys; print("-".join(str(v) for v in sys.version_info))')" >> $GITHUB_OUTPUT
- name: Cache galaxy venv
uses: actions/cache@v5
with:
path: 'galaxy root/.venv'
key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-api
- name: Install dependencies
run: ./scripts/common_startup.sh --dev-wheels --skip-client-build
working-directory: 'galaxy root'
- name: Lint schema
run: make lint-api-schema
working-directory: 'galaxy root'
- name: Build typescript schema
run: make update-client-api-schema
working-directory: 'galaxy root'
- name: Diff...
run: git diff
working-directory: 'galaxy root'
- name: Check for changes
run: |
if [[ `git status --porcelain` ]]; then
echo "Rebuilding client/src/api/schema/schema.ts resulted in changes, run 'make update-client-api-schema' and commit results"
exit 1
fi
working-directory: 'galaxy root'
name: Main tool tests
on:
push:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
env:
GALAXY_TEST_DBURI: 'postgresql://postgres:postgres@localhost:5432/galaxy?client_encoding=utf8'
GALAXY_TEST_RAISE_EXCEPTION_ON_HISTORYLESS_HDA: '1'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.10']
services:
postgres:
image: postgres:17
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
steps:
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c 'import sys; print("-".join(str(v) for v in sys.version_info))')" >> $GITHUB_OUTPUT
- name: Cache galaxy venv
uses: actions/cache@v5
with:
path: 'galaxy root/.venv'
key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-framework
- name: Run tests
run: ./run_tests.sh --coverage --main_tools
working-directory: 'galaxy root'
- uses: codecov/codecov-action@v5
with:
flags: main-tools
working-directory: 'galaxy root'
- uses: actions/upload-artifact@v7
if: failure()
with:
name: Main tool test results (${{ matrix.python-version }})
path: 'galaxy root/run_framework_tests.html'
name: Maintenance Bot
on:
pull_request_target:
types: [opened, reopened, edited, ready_for_review, unlabeled]
jobs:
labeler:
name: Assign labels and milestone
if: github.repository_owner == 'galaxyproject'
permissions:
contents: read
issues: write
pull-requests: write
runs-on: ubuntu-latest
env:
MILESTONE_NUMBER: 33
steps:
- name: Get latest pull request labels
id: get_pr_labels
uses: actions/github-script@v8
with:
script: |
const response = await github.rest.issues.listLabelsOnIssue({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
});
console.log(response);
return response.data;
- name: Add area labels
if: ${{ ! contains(join(fromJSON(steps.get_pr_labels.outputs.result).*.name, ', '), 'area/') }}
uses: actions/labeler@v6
- name: Assign milestone
if: |
! github.event.pull_request.milestone &&
! contains(github.event.pull_request.labels.*.name, 'merge') &&
! contains(github.event.pull_request.labels.*.name, 'status/WIP') &&
! contains(github.event.pull_request.title, 'WIP') &&
! github.event.pull_request.draft
uses: actions/github-script@v8
with:
script: |
github.rest.issues.update({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
milestone: ${{ env.MILESTONE_NUMBER }},
});
name: Mulled Unit Tests
on:
push:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.10']
steps:
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c 'import sys; print("-".join(str(v) for v in sys.version_info))')" >> $GITHUB_OUTPUT
- name: Install Apptainer's singularity
uses: eWaterCycle/setup-apptainer@v2
- name: Install tox
run: uv tool install tox --with tox-uv
- name: Run tests
run: tox -e mulled
working-directory: 'galaxy root'
- uses: actions/upload-artifact@v7
if: failure()
with:
name: Mulled unit test results (${{ matrix.python-version }})
path: 'galaxy root/run_unit_tests.html'
name: macOS startup
on:
push:
paths-ignore:
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build-client:
uses: ./.github/workflows/build_client.yaml
test:
name: Startup test
runs-on: macos-latest
needs: build-client
strategy:
fail-fast: false
matrix:
python-version: ['3.10', '3.14']
defaults:
run:
shell: bash -l {0}
steps:
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- name: Install miniforge # use this job to test using Python from a conda environment
uses: conda-incubator/setup-miniconda@v3
with:
miniforge-version: latest
activate-environment: ''
- name: Restore client cache
uses: actions/cache@v5
with:
fail-on-cache-miss: true
key: galaxy-static-${{ needs.build-client.outputs.commit-id }}
path: 'galaxy root/static'
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Install tox
run: uv tool install tox --with tox-uv
- name: Run tests
run: tox -e first_startup
working-directory: 'galaxy root'
env:
GALAXY_CONDA_PYTHON_VERSION: "${{ matrix.python-version }}"
name: Performance tests
on:
push:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
schedule:
# Run at midnight UTC every Tuesday
- cron: '0 0 * * 2'
env:
GALAXY_TEST_DBURI: 'postgresql://postgres:postgres@localhost:5432/galaxy?client_encoding=utf8'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.10']
services:
postgres:
image: postgres:17
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
steps:
- if: github.event_name == 'schedule'
run: |
echo "GALAXY_CONFIG_OVERRIDE_METADATA_STRATEGY=extended" >> $GITHUB_ENV
echo "GALAXY_CONFIG_OVERRIDE_OUTPUTS_TO_WORKING_DIRECTORY=true" >> $GITHUB_ENV
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c 'import sys; print("-".join(str(v) for v in sys.version_info))')" >> $GITHUB_OUTPUT
- name: Cache galaxy venv
uses: actions/cache@v5
with:
path: 'galaxy root/.venv'
key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-performance
- name: Run tests
run: ./run_tests.sh --ci_test_metrics --structured_data_html --structured_data_report_file "test.json" --skip_flakey_fails -api lib/galaxy_test/performance
working-directory: 'galaxy root'
- uses: actions/upload-artifact@v7
if: failure()
with:
name: API test results (${{ matrix.python-version }})
path: 'galaxy root/run_api_tests.html'
- uses: actions/upload-artifact@v7
with:
name: Performance Metrics (${{ matrix.python-version }})
path: 'galaxy root/test.html'
name: Playwright tests
on:
push:
paths-ignore:
- 'doc/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'doc/**'
- 'packages/**'
schedule:
# Run at midnight UTC every Tuesday
- cron: '0 0 * * 2'
env:
GALAXY_CONFIG_GALAXY_URL_PREFIX: '/galaxypf'
GALAXY_TEST_DBURI: 'postgresql://postgres:postgres@localhost:5432/galaxy?client_encoding=utf8'
GALAXY_TEST_RAISE_EXCEPTION_ON_HISTORYLESS_HDA: '1'
GALAXY_TEST_SELENIUM_RETRIES: 1
GALAXY_TEST_SKIP_FLAKEY_TESTS_ON_ERROR: 1
GALAXY_TEST_SELENIUM_HEADLESS: 1
YARN_INSTALL_OPTS: --frozen-lockfile
GALAXY_CONFIG_SQLALCHEMY_WARN_20: '1'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build-client:
uses: ./.github/workflows/build_client.yaml
test:
name: Test
needs: [build-client]
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.10']
chunk: [0, 1, 2]
services:
postgres:
image: postgres:17
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
steps:
- if: github.event_name == 'schedule'
run: |
echo "GALAXY_CONFIG_OVERRIDE_METADATA_STRATEGY=extended" >> $GITHUB_ENV
echo "GALAXY_CONFIG_OVERRIDE_OUTPUTS_TO_WORKING_DIRECTORY=true" >> $GITHUB_ENV
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c 'import sys; print("-".join(str(v) for v in sys.version_info))')" >> $GITHUB_OUTPUT
- name: Cache galaxy venv
uses: actions/cache@v5
with:
path: 'galaxy root/.venv'
key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-playwright
- name: Restore client cache
uses: actions/cache@v5
with:
fail-on-cache-miss: true
key: galaxy-static-${{ needs.build-client.outputs.commit-id }}
path: 'galaxy root/static'
- name: Run tests
run: ./run_tests.sh --coverage -playwright lib/galaxy_test/selenium -- --num-shards=3 --shard-id=${{ matrix.chunk }}
working-directory: 'galaxy root'
- uses: codecov/codecov-action@v5
with:
flags: playwright
working-directory: 'galaxy root'
- uses: actions/upload-artifact@v7
if: failure()
with:
name: Playwright test results (${{ matrix.python-version }}, ${{ matrix.chunk }})
path: 'galaxy root/run_playwright_tests.html'
- uses: actions/upload-artifact@v7
if: failure()
with:
name: Playwright debug info (${{ matrix.python-version }}, ${{ matrix.chunk }})
path: 'galaxy root/database/test_errors'
name: Update PR title
on:
pull_request_target:
types: [opened, edited, reopened]
jobs:
update-title:
if: github.event.action != 'edited' || github.event.changes.base.ref.from != ''
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- name: Update PR title
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PR_NUMBER: ${{ github.event.pull_request.number }}
TARGET_BRANCH: "${{ github.base_ref }}"
PR_TITLE: "${{ github.event.pull_request.title }}"
REPO: "${{ github.repository }}"
run: |
VERSION=$(echo $TARGET_BRANCH | grep -oP '^release_\K\d+.\d+$' || true)
NEW_TITLE=$(echo "$PR_TITLE" | sed -E "s/\[[0-9]+\.[0-9]+\] //")
if [[ -n "$VERSION" ]]; then
NEW_TITLE="[$VERSION] $NEW_TITLE"
fi
if [[ "$NEW_TITLE" != "$PR_TITLE" ]]; then
gh pr edit $PR_NUMBER --repo "$REPO" --title "$NEW_TITLE"
fi
name: Publish release artifacts
on:
release:
types: [released, prereleased]
workflow_dispatch:
inputs:
release_type:
description: 'create release or prerelease artifact ?'
required: true
default: 'prerelease'
type: choice
options:
- release
- prerelease
release_tag:
description: 'Specify tag to build for'
required: true
type: string
jobs:
check-permissions:
if: github.event_name == 'workflow_dispatch'
runs-on: ubuntu-latest
steps:
- name: Check if user can create releases
run: |
PERMISSION=$(gh api repos/${{ github.repository }}/collaborators/${{ github.actor }}/permission --jq '.permission')
if [[ "$PERMISSION" != "admin" ]]; then
echo "Error: Only repository admins can manually trigger release artifacts"
exit 1
fi
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
build-and-publish-pypi:
if: |
github.repository_owner == 'galaxyproject' &&
(github.event_name == 'release' ||
(github.event_name == 'workflow_dispatch' && !cancelled() && !failure()))
needs: [check-permissions]
name: Build and Publish to PyPI
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.10']
steps:
- uses: actions/checkout@v6
with:
ref: ${{ github.event_name == 'workflow_dispatch' && inputs.release_tag || '' }}
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Install script dependencies
run: uv tool install galaxy-release-util
- name: Build and publish to PyPI
run: |
galaxy-release-util build-and-upload --no-confirm
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ (github.event_name == 'workflow_dispatch' && inputs.release_type == 'prerelease') || (github.event_name == 'release' && github.event.release.prerelease) && secrets.PYPI_TEST_TOKEN || secrets.PYPI_MAIN_TOKEN }}
TWINE_REPOSITORY_URL: ${{ (github.event_name == 'workflow_dispatch' && inputs.release_type == 'prerelease') || (github.event_name == 'release' && github.event.release.prerelease) && 'https://test.pypi.org/legacy/' || 'https://upload.pypi.org/legacy/' }}
build-and-publish-npm:
if: |
github.repository_owner == 'galaxyproject' &&
(github.event_name == 'release' ||
(github.event_name == 'workflow_dispatch' && !cancelled() && !failure()))
needs: [check-permissions]
name: Build and Publish to NPM
runs-on: ubuntu-latest
permissions:
id-token: write
steps:
- uses: actions/checkout@v6
with:
ref: ${{ github.event_name == 'workflow_dispatch' && inputs.release_tag || '' }}
persist-credentials: false
- name: Read Node.js version
id: node-version
run: echo "version=$(cat client/.node_version)" >> $GITHUB_OUTPUT
- uses: actions/setup-node@v6
with:
node-version: ${{ steps.node-version.outputs.version }}
registry-url: 'https://registry.npmjs.org'
- name: Setup pnpm
uses: pnpm/action-setup@v4
- name: build client
run: pnpm install && pnpm build-production
working-directory: 'client'
# Ensure npm 11.5.1 or later for trusted publishing
- run: npm install -g npm@latest
working-directory: 'client'
- name: publish client
if: (github.event_name == 'workflow_dispatch' && inputs.release_type == 'release') || (github.event_name == 'release' && !github.event.release.prerelease)
run: npm publish --provenance --access public
working-directory: 'client'
- name: sync client-api version
run: npm run sync-version
working-directory: 'client-api'
- name: build client-api
run: npm install && npm run build
working-directory: 'client-api'
- name: publish client-api
if: (github.event_name == 'workflow_dispatch' && inputs.release_type == 'release') || (github.event_name == 'release' && !github.event.release.prerelease)
run: npm publish --provenance --access public
working-directory: 'client-api'
name: Selenium tests
on:
push:
paths-ignore:
- 'doc/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'doc/**'
- 'packages/**'
schedule:
# Run at midnight UTC every Tuesday
- cron: '0 0 * * 2'
env:
GALAXY_CONFIG_GALAXY_URL_PREFIX: '/galaxypf'
GALAXY_TEST_DBURI: 'postgresql://postgres:postgres@localhost:5432/galaxy?client_encoding=utf8'
GALAXY_TEST_RAISE_EXCEPTION_ON_HISTORYLESS_HDA: '1'
GALAXY_TEST_SELENIUM_RETRIES: 1
GALAXY_TEST_SKIP_FLAKEY_TESTS_ON_ERROR: 1
YARN_INSTALL_OPTS: --frozen-lockfile
GALAXY_CONFIG_SQLALCHEMY_WARN_20: '1'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
setup-selenium:
uses: ./.github/workflows/setup_selenium.yaml
build-client:
uses: ./.github/workflows/build_client.yaml
test:
name: Test
needs: [setup-selenium, build-client]
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.10']
chunk: [0, 1, 2]
services:
postgres:
image: postgres:17
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
steps:
- if: github.event_name == 'schedule'
run: |
echo "GALAXY_CONFIG_OVERRIDE_METADATA_STRATEGY=extended" >> $GITHUB_ENV
echo "GALAXY_CONFIG_OVERRIDE_OUTPUTS_TO_WORKING_DIRECTORY=true" >> $GITHUB_ENV
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c 'import sys; print("-".join(str(v) for v in sys.version_info))')" >> $GITHUB_OUTPUT
- name: Cache galaxy venv
uses: actions/cache@v5
with:
path: 'galaxy root/.venv'
key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-selenium
- name: Restore client cache
uses: actions/cache@v5
with:
fail-on-cache-miss: true
key: galaxy-static-${{ needs.build-client.outputs.commit-id }}
path: 'galaxy root/static'
- name: Run tests
run: ./run_tests.sh --coverage -selenium lib/galaxy_test/selenium -- --num-shards=3 --shard-id=${{ matrix.chunk }}
working-directory: 'galaxy root'
- uses: codecov/codecov-action@v5
with:
flags: selenium
working-directory: 'galaxy root'
- uses: actions/upload-artifact@v7
if: failure()
with:
name: Selenium test results (${{ matrix.python-version }}, ${{ matrix.chunk }})
path: 'galaxy root/run_selenium_tests.html'
- uses: actions/upload-artifact@v7
if: failure()
with:
name: Selenium debug info (${{ matrix.python-version }}, ${{ matrix.chunk }})
path: 'galaxy root/database/test_errors'
on:
workflow_call:
jobs:
setup_chromedriver:
runs-on: ubuntu-latest
steps:
- name: Install chromedriver
uses: nanasess/setup-chromedriver@v2
name: Test Galaxy packages
on:
push:
paths-ignore:
- 'client/**'
- 'doc/**'
pull_request:
paths-ignore:
- 'client/**'
- 'doc/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.10', '3.14']
steps:
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Install ffmpeg
run: sudo apt-get update && sudo apt-get -y install ffmpeg
- name: Install tox
run: uv tool install tox --with tox-uv
- name: Run tests
run: tox -e test_galaxy_packages
working-directory: 'galaxy root'
name: Test Galaxy packages for Pulsar
on:
push:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
pull_request:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.8'] # don't upgrade, see https://github.com/galaxyproject/galaxy/pull/16649
steps:
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Install Apptainer's singularity
uses: eWaterCycle/setup-apptainer@v2
- name: Install ffmpeg
run: sudo apt-get update && sudo apt-get -y install ffmpeg
- name: Install tox
run: uv tool install tox --with tox-uv
- name: Run tests
run: tox -e test_galaxy_packages_for_pulsar
working-directory: 'galaxy root'
name: Test Galaxy release script
on:
push:
paths:
- '.github/workflows/test_galaxy_release.yaml'
- lib/galaxy/dependencies/**
- lib/galaxy/version.py
- scripts/release.sh
- test/release.sh
pull_request:
paths:
- '.github/workflows/test_galaxy_release.yaml'
- lib/galaxy/dependencies/**
- lib/galaxy/version.py
- scripts/release.sh
- test/release.sh
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
with:
fetch-depth: 0
persist-credentials: false
- name: Run tests
run: ./test/release.sh
name: Toolshed tests
on:
push:
paths-ignore:
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
env:
GALAXY_TEST_DBURI: 'postgresql://postgres:postgres@localhost:5432/galaxy?client_encoding=utf8'
TOOL_SHED_TEST_DBURI: 'postgresql://postgres:postgres@localhost:5432/toolshed?client_encoding=utf8'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.10', '3.14']
test-install-client: ['galaxy_api', 'standalone']
services:
postgres:
image: postgres:17
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
steps:
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c 'import sys; print("-".join(str(v) for v in sys.version_info))')" >> $GITHUB_OUTPUT
- name: Cache galaxy venv
uses: actions/cache@v5
with:
path: 'galaxy root/.venv'
key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-toolshed
- name: Install dependencies
run: ./scripts/common_startup.sh --dev-wheels --skip-client-build
working-directory: 'galaxy root'
- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
package_json_file: 'galaxy root/lib/tool_shed/webapp/frontend/package.json'
- name: Build Frontend
run: |
. .venv/bin/activate
cd lib/tool_shed/webapp/frontend
pnpm install --frozen-lockfile
make client
working-directory: 'galaxy root'
- name: Install playwright
run: |
. .venv/bin/activate
playwright install
working-directory: 'galaxy root'
- name: Run tests
run: ./run_tests.sh -toolshed
env:
TOOL_SHED_TEST_INSTALL_CLIENT: ${{ matrix.test-install-client }}
working-directory: 'galaxy root'
- uses: actions/upload-artifact@v7
if: failure()
with:
name: Toolshed test results (${{ matrix.python-version }}, ${{ matrix.test-install-client }})
path: 'galaxy root/run_toolshed_tests.html'
name: Unit w/postgres tests
on:
push:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
env:
GALAXY_TEST_DBURI: 'postgresql://postgres:postgres@localhost:5432/postgres?client_encoding=utf8' # using postgres as the db
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.10']
services:
postgres:
image: postgres:17
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
steps:
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c 'import sys; print("-".join(str(v) for v in sys.version_info))')" >> $GITHUB_OUTPUT
- name: Cache galaxy venv
uses: actions/cache@v5
with:
path: 'galaxy root/.venv'
key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-unit-postgres
- name: Run migration tests
run: ./run_tests.sh -unit test/unit/data/model/migrations/test_migrations.py
working-directory: 'galaxy root'
- name: Run test migrate database
run: ./run_tests.sh -unit test/unit/app/test_migrate_database.py
working-directory: 'galaxy root'
name: Unit tests
on:
push:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
pull_request:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
- 'packages/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.10', '3.14']
steps:
- uses: actions/checkout@v6
with:
path: 'galaxy root'
persist-credentials: false
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c 'import sys; print("-".join(str(v) for v in sys.version_info))')" >> $GITHUB_OUTPUT
- name: Install ffmpeg
run: sudo apt-get update && sudo apt-get -y install ffmpeg
- name: Install tox
run: uv tool install tox --with tox-uv
- name: Run tests
run: tox -e unit-coverage
working-directory: 'galaxy root'
- uses: codecov/codecov-action@v5
with:
flags: py-unit
working-directory: 'galaxy root'
- uses: actions/upload-artifact@v7
if: failure()
with:
name: Unit test results (${{ matrix.python-version }})
path: 'galaxy root/run_unit_tests.html'
.hg*
# Downloaded and locally built eggs
eggs
scripts/scramble/build
scripts/scramble/lib
scripts/scramble/archives
# Python virtualenv
.venv*/
# Python build artifacts
build
dist
lib/galaxy.egg-info
.eggs
# Generated static content
lib/galaxy/web/framework/static/maps
lib/galaxy/web/framework/static/scripts
lib/galaxy/web/framework/static/style
lib/galaxy/web/framework/meta.json
# Misc. data, config, cache files
/database/
# Python bytecode
*.pyc
# Galaxy runtime files
*.lock
*.log
*.pid
celerybeat-schedule
# Celery state files
celerybeat-schedule.*
# Tool Shed runtime files
tool_shed_webapp.lock
tool_shed_webapp.log
tool_shed_webapp.pid
hgweb.config*
lib/tool_shed/scripts/bootstrap_tool_shed/user_info.xml
# Reports runtime files
reports_webapp.lock
reports_webapp.log
reports_webapp.pid
# Jupyter runtime files
.ipynb_checkpoints
# Config files
universe_wsgi.ini
reports_wsgi.ini
reports.ini
tool_shed_wsgi.ini
datatypes_conf.xml
tool_conf.xml
external_service_types_conf.xml
migrated_tools_conf.xml
shed_tool_conf.xml
tool_data_table_conf.xml
tool_sheds_conf.xml
integrated_tool_panel.xml
openid_conf.xml
shed_tool_data_table_conf.xml
job_conf.xml
data_manager_conf.xml
shed_data_manager_conf.xml
object_store_conf.xml
job_metrics_conf.xml
workflow_schedulers_conf.xml
config/*
config/plugins/interactive_environments/**/*.ini
config/plugins/**/.cache
config/plugins/**/.parcel-cache
config/plugins/visualizations
!config/plugins
static/welcome.html.*
static/welcome.html
static/client_build_hash.txt
lib/galaxy_test/selenium/jupyter/galaxy_selenium_context.yml
# Tool data
tool-data/annotation_profiler_options.xml
tool-data/annotation_profiler_valid_builds.txt
tool-data/gatk_annotations.txt
tool-data/gd.restriction_enzymes.txt
tool-data/gd.species.txt
tool-data/shared/igv/igv_build_sites.txt
tool-data/shared/rviewer/rviewer_build_sites.txt
tool-data/shared/ncbi/builds.txt
tool-data/shared/ucsc/builds.txt
tool-data/shared/ucsc/manual_builds.txt
tool-data/shared/ensembl/builds.txt
tool-data/shared/ucsc/publicbuilds.txt
tool-data/shared/ucsc/ucsc_build_sites.txt
tool-data/*.loc
tool-data/genome/*
tool-data/*.sample
tool-data/testtoolshed.g2.bx.psu.edu/
tool-data/toolshed.g2.bx.psu.edu/
tool-data/**/*.fa
# Test output
.pytest_cache/
assets/
test-data-cache
run_api_tests.html
run_cwl_tests.html
run_framework_tests.html
run_functional_tests.html
run_integration_tests.html
run_playwright_tests.html
run_selenium_tests.html
run_toolshed_tests.html
test/tool_shed/tmp/*
.coverage
htmlcov
run_unit_tests.html
test/unit/**.log
.tox
tool_test_output.html
tool_test_output.json
# Project files
*.kpf
.idea
.vscode
client/**/jsconfig.json
vetur.config.js
.pre-commit-config.yaml
galaxy.code-workspace
# Chrom len files
*.len
# JARs
tool-data/shared/jars/
# JS, Local node_modules, and bower_components directories
static/maps
static/plugins
static/scripts
node_modules
bower_components
client/src/libs/*
# Old galaxy client location
client/galaxy
# Documentation build files
doc/build
doc/schema.md
doc/source/admin/config_logging_default_yaml.rst
doc/source/dev/schema.md
doc/source/dev/plantuml.jar
client/docs/dist
# Webpack stats
client/webpack-stats.json
# Packages
packages/*/build
packages/*/dist
packages/*/*.egg-info
packages/meta/requirements.txt
# Standalone script + main, or build into dist
config/plugins/**/static/dist
config/plugins/**/static/script.js
config/plugins/**/static/main.css
config/plugins/**/static/script.css
config/plugins/**/static/plugin_build_hash.txt
config/plugins/**/static/*.map
# Viz-specific build artifacts to ignore (until these are removed from codebase)
config/plugins/visualizations/annotate_image/static/jquery.contextMenu.css
config/plugins/visualizations/nvd3/nvd3_bar/static/nvd3.js
config/plugins/visualizations/scatterplot/static/scatterplot.js
config/plugins/visualizations/tiffviewer/static/
# CWL conformance tests
lib/galaxy_test/api/cwl/test_cwl_conformance_v1_?.py
test/functional/tools/cwl_tools/v1.?/
# Involucro tool
involucro
# Misc
*.orig
.DS_Store
*.rej
*~
github:
prebuilds:
# enable for the master/default branch (defaults to true)
master: true
# enable for all branches in this repo (defaults to false)
branches: true
# enable for pull requests coming from this repo (defaults to true)
pullRequests: true
# enable for pull requests coming from forks (defaults to false)
pullRequestsFromForks: true
# add a "Review in Gitpod" button as a comment to pull requests (defaults to true)
addComment: false
# add a "Review in Gitpod" button to pull requests (defaults to false)
addBadge: false
# add a label once the prebuild is ready to pull requests (defaults to false)
addLabel: false
image: gitpod/workspace-postgres
tasks:
- name: Setup Development Environment
init: >
cp .vscode/settings_gitpod.json .vscode/settings.json &&
cp .vscode/launch_gitpod.json .vscode/launch.json &&
python3 -m venv .venv &&
. .venv/bin/activate &&
pip install psycopg2 &&
pip install -r requirements.txt -r lib/galaxy/dependencies/dev-requirements.txt &&
pip install tox &&
gp sync-done setup
command: createdb galaxy
- name: Setup Galaxy Configuration
command: cp config/galaxy.yml.sample config/galaxy.yml
- name: Watch Client with Webpack
init: gp sync-await setup
command: make client-watch
# Ports to expose on workspace startup (optional)
ports:
- port: 8000
vscode:
extensions:
- ms-python.python
# No longer seems to work.
# - ms-python.vscode-pylance
[settings]
combine_as_imports=true
force_alphabetical_sort_within_sections=true
# Override force_grid_wrap value from profile=black, but black is still happy
force_grid_wrap=2
# Same line length as for black
line_length=120
no_lines_before=LOCALFOLDER
profile=black
reverse_relative=true
skip_gitignore=true
# Make isort run faster by skipping database
skip_glob=database/*,lib/tool_shed/test/test_data/repos/*
src_paths=lib
# Stage 1:
# - base: ubuntu (default) OR prebuilt image0
# - install build tools
# - clone playbook
# - run playbook
# - remove build artifacts + files not needed in container
# Stage 2:
# - create galaxy user + group + directory
# - copy galaxy files from stage 2.1 and 2.2
# - finalize container (set path, user...)
# Init ARGs
ARG ROOT_DIR=/galaxy
ARG SERVER_DIR=$ROOT_DIR/server
ARG STAGE1_BASE=python:3.12-slim
ARG FINAL_STAGE_BASE=$STAGE1_BASE
ARG GALAXY_USER=galaxy
ARG GALAXY_PLAYBOOK_REPO=https://github.com/galaxyproject/galaxy-docker-k8s
ARG GALAXY_PLAYBOOK_BRANCH=v4.2.0
ARG GIT_COMMIT=unspecified
ARG BUILD_DATE=unspecified
ARG IMAGE_TAG=unspecified
#======================================================
# Stage 1 - Run playbook
#======================================================
FROM $STAGE1_BASE AS stage1
ARG DEBIAN_FRONTEND=noninteractive
ARG SERVER_DIR
ARG GALAXY_PLAYBOOK_REPO
ARG GALAXY_PLAYBOOK_BRANCH
# Init Env
ENV LC_ALL=en_US.UTF-8
ENV LANG=en_US.UTF-8
# Install build dependencies + ansible
RUN set -xe; \
echo "Acquire::http {No-Cache=True;};" > /etc/apt/apt.conf.d/no-cache \
&& apt-get -qq update && apt-get install -y --no-install-recommends \
locales locales-all \
git \
make \
libc-dev \
bzip2 \
gcc \
&& pip install --no-cache virtualenv ansible==11.11.0 \
&& apt-get autoremove -y && apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/*
# Remove context from previous build; copy current context; run playbook
WORKDIR /tmp/ansible
RUN rm -rf *
# Add Galaxy source code
COPY . $SERVER_DIR/
RUN git clone --depth 1 --branch $GALAXY_PLAYBOOK_BRANCH $GALAXY_PLAYBOOK_REPO galaxy-docker
WORKDIR /tmp/ansible/galaxy-docker
RUN ansible-galaxy install -r requirements.yml -p roles --force-with-deps
RUN ansible-playbook -i localhost, playbook.yml -v -e galaxy_virtualenv_command=virtualenv
# Remove build artifacts + files not needed in container
WORKDIR $SERVER_DIR
# Save commit hash of HEAD before zapping git folder
RUN git rev-parse HEAD > GITREVISION
RUN rm -rf \
.ci \
.git \
.venv/include/node \
.venv/src/node* \
client/dist \
doc \
test \
test-data
# Clean up *all* node_modules, including plugins. Everything is already built+staged.
RUN find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
# Remove pre-built visualization plugin static files (not present in base image, ~220MB)
RUN find config/plugins/visualizations -mindepth 2 -maxdepth 2 -name "static" -type d -exec rm -rf '{}' +
#======================================================
# Stage 2 - Build final image based on previous stage
#======================================================
FROM $FINAL_STAGE_BASE
ARG DEBIAN_FRONTEND=noninteractive
ARG ROOT_DIR
ARG SERVER_DIR
ARG GALAXY_USER
ARG GIT_COMMIT
ARG BUILD_DATE
ARG IMAGE_TAG
LABEL org.opencontainers.image.title="Galaxy Minimal Image" \
org.opencontainers.image.description="A size optimized image for Galaxy targeting k8s and ci applications" \
org.opencontainers.image.authors="galaxyproject.org" \
org.opencontainers.image.vendor="Galaxy Project" \
org.opencontainers.image.documentation="https://github.com/galaxyproject/galaxy-docker-k8s" \
org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.version="$IMAGE_TAG" \
org.opencontainers.image.url="https://github.com/galaxyproject/galaxy-docker-k8s" \
org.opencontainers.image.source="https://github.com/galaxyproject/galaxy.git" \
org.opencontainers.image.revision=$GIT_COMMIT \
org.opencontainers.image.created=$BUILD_DATE
# Init Env
ENV LC_ALL=en_US.UTF-8
ENV LANG=en_US.UTF-8
# Install procps (contains kill, ps etc.), less, curl, vim-tiny and nano-tiny
# for convenience and debugging purposes. Nano and vim commands are aliased
# to their tiny variants using the debian alternatives system.
# Bzip2 and virtualenv are installed for backwards compatibility with older
# versions of this image which was based on Ubuntu and contained these
# utilities.
RUN set -xe; \
echo "Acquire::http {No-Cache=True;};" > /etc/apt/apt.conf.d/no-cache \
&& apt-get -qq update && apt-get install -y --no-install-recommends \
locales \
vim-tiny \
nano-tiny \
netcat-openbsd \
curl \
procps \
less \
bzip2 \
tini \
wget \
&& update-alternatives --install /usr/bin/nano nano /bin/nano-tiny 0 \
&& update-alternatives --install /usr/bin/vim vim /usr/bin/vim.tiny 0 \
&& echo "set nocompatible\nset backspace=indent,eol,start" >> /usr/share/vim/vimrc.tiny \
&& echo "$LANG UTF-8" > /etc/locale.gen \
&& locale-gen $LANG && update-locale LANG=$LANG \
&& curl -L https://github.com/galaxyproject/gxadmin/releases/latest/download/gxadmin > /usr/bin/gxadmin \
&& chmod +x /usr/bin/gxadmin \
&& apt-get autoremove -y && apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/*
# Create Galaxy user, group, directory; chown
RUN set -xe; \
adduser --system --group --uid 10001 $GALAXY_USER \
&& mkdir -p $SERVER_DIR \
&& chown $GALAXY_USER:$GALAXY_USER $ROOT_DIR -R
WORKDIR $ROOT_DIR
# Copy galaxy files to final image
# The chown value MUST be hardcoded (see https://github.com/moby/moby/issues/35018)
COPY --chown=$GALAXY_USER:$GALAXY_USER --from=stage1 $ROOT_DIR .
WORKDIR $SERVER_DIR
# The data in version.json will be displayed in Galaxy's /api/version endpoint
RUN printf "{\n \"git_commit\": \"$(cat GITREVISION)\",\n \"build_date\": \"$BUILD_DATE\",\n \"image_tag\": \"$IMAGE_TAG\"\n}\n" > version.json \
&& chown $GALAXY_USER:$GALAXY_USER version.json
EXPOSE 8080
USER $GALAXY_USER
ENV PATH="$SERVER_DIR/.venv/bin:${PATH}"
ENV GALAXY_CONFIG_CONDA_AUTO_INIT=False
ENTRYPOINT ["tini", "--"]
# [optional] to run:
CMD ["galaxy"]
repos:
- repo: https://github.com/psf/black
rev: 26.1.0
hooks:
- id: black
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.14.8
hooks:
- id: ruff
args: [--fix]
- repo: https://github.com/pycqa/flake8
rev: 7.0.0
hooks:
- id: flake8
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v3.1.0
hooks:
- id: prettier
additional_dependencies:
- prettier@3.6.2 # SEE: https://github.com/pre-commit/pre-commit/issues/3133
exclude_types: # .prettierignore ignored apparently ...
- yaml
- json
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0 # Use the ref you want to point at
hooks:
- id: trailing-whitespace
- id: check-merge-conflict
- id: check-symlinks
- id: destroyed-symlinks
- id: end-of-file-fixer
- id: name-tests-test
- repo: https://github.com/detailyang/pre-commit-shell
rev: 1.0.5
hooks:
- id: shell-lint
args: [--format=json]
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.27.4
hooks:
- id: check-github-workflows
- repo: local
hooks:
- id: eslint
name: client eslint
language: system
files: ^client/
entry: .ci/eslint_wrapper.sh
types: [file]
types_or: [javascript, jsx, ts, tsx, vue]
# - repo: https://github.com/pycqa/isort
# rev: 5.10.1
# hooks:
# - id: isort
# name: isort (python)
# This file instructs Redocly's linter to ignore the rules contained for specific parts of your API.
# See https://redoc.ly/docs/cli/ for more information.
_schema.yaml:
no-empty-servers:
- '#/openapi'
no-ambiguous-paths:
- >-
#/paths/~1api~1histories~1{history_id}~1contents~1{dataset_id}~1permissions
- >-
#/paths/~1api~1histories~1{history_id}~1contents~1{history_content_id}~1display
- >-
#/paths/~1api~1histories~1{history_id}~1contents~1{history_content_id}~1extra_files
- >-
#/paths/~1api~1histories~1{history_id}~1contents~1{history_content_id}~1metadata_file
- >-
#/paths/~1api~1histories~1{history_id}~1contents~1{history_content_id}~1tags
- '#/paths/~1api~1histories~1{history_id}~1contents~1{id}~1validate'
- '#/paths/~1api~1histories~1{history_id}~1contents~1{type}s~1{id}'
- '#/paths/~1api~1invocations~1{invocation_id}~1biocompute'
- '#/paths/~1api~1invocations~1{invocation_id}~1jobs_summary'
- '#/paths/~1api~1invocations~1{invocation_id}~1prepare_store_download'
- '#/paths/~1api~1invocations~1{invocation_id}~1report'
- '#/paths/~1api~1invocations~1{invocation_id}~1report.pdf'
- '#/paths/~1api~1invocations~1{invocation_id}~1step_jobs_summary'
- '#/paths/~1api~1invocations~1{invocation_id}~1write_store'
- '#/paths/~1api~1quotas~1{id}~1purge'
- '#/paths/~1api~1users~1{user_id}~1api_key'
- '#/paths/~1api~1users~1{user_id}~1beacon'
- '#/paths/~1api~1users~1{user_id}~1custom_builds'
- '#/paths/~1api~1users~1{user_id}~1recalculate_disk_usage'
- '#/paths/~1api~1users~1{user_id}~1send_activation_email'
- '#/paths/~1api~1users~1{user_id}~1usage'
no-unused-components:
- '#/components/schemas/FetchDataPayload'
security-defined:
- '#/paths/~1api~1authenticate~1baseauth/get'
_shed_schema.yaml:
no-empty-servers:
- '#/openapi'
operation-2xx-response:
- '#/paths/~1repository~1status_for_installed_repository/get'
security-defined:
- '#/paths/~1api~1authenticate~1baseauth/get'
- '#/paths/~1api~1categories~1{encoded_category_id}/get'
- '#/paths/~1api~1ga4gh~1trs~1v2~1service-info/get'
- '#/paths/~1api~1ga4gh~1trs~1v2~1toolClasses/get'
- '#/paths/~1api~1ga4gh~1trs~1v2~1tools/get'
- '#/paths/~1api~1repositories~1get_ordered_installable_revisions/get'
- '#/paths/~1api~1repositories~1updates/get'
- '#/paths/~1api~1repositories~1{encoded_repository_id}/get'
- '#/paths/~1api~1repositories~1{encoded_repository_id}~1metadata/get'
- >-
#/paths/~1api~1repositories~1{encoded_repository_id}~1revisions~1{changeset_revision}~1readmes/get
- '#/paths/~1api~1version/get'
- >-
#/paths/~1api_internal~1repositories~1{encoded_repository_id}~1metadata/get
- '#/paths/~1repository~1get_changeset_revision_and_ctx_rev/get'
- '#/paths/~1repository~1get_ctx_rev/get'
- '#/paths/~1repository~1get_repository_type/get'
- '#/paths/~1repository~1get_required_repo_info_dict/get'
- '#/paths/~1repository~1get_tool_dependencies/get'
- '#/paths/~1repository~1next_installable_changeset_revision/get'
- '#/paths/~1repository~1previous_changeset_revisions/get'
- '#/paths/~1repository~1static~1images~1{repository_id}~1{image_file}/get'
- '#/paths/~1repository~1status_for_installed_repository/get'
- '#/paths/~1repository~1updated_changeset_revisions/get'
organization: galaxyproject.org
extends:
- recommended
rules:
operation-4xx-response: off
GALAXY_TEST_TOOL_CONF="lib/galaxy/config/sample/tool_conf.xml.sample,test/functional/tools/sample_tool_conf.xml"
{
"version": "0.2.0",
"configurations": [
{
"name": "GalaxyFastAPI uvicorn",
"type": "python",
"request": "launch",
"module": "uvicorn",
"args": ["--app-dir", "lib", "--factory", "galaxy.webapps.galaxy.fast_factory:factory"],
"env": {
"GALAXY_CONFIG_FILE": "${workspaceFolder}/config/galaxy.yml",
"GALAXY_CONDA_AUTO_INIT": "false",
"GALAXY_CONFIG_TOOL_CONFIG_FILE": "lib/galaxy/config/sample/tool_conf.xml.sample,test/functional/tools/sample_tool_conf.xml",
"GALAXY_CONFIG_DATABASE_CONNECTION": "postgresql://localhost/galaxy"
}
},
{
"name": "Tool test framework",
"type": "python",
"request": "launch",
"program": "${workspaceFolder}/.venv/bin/pytest",
"args": [
"test/functional/test_toolbox_pytest.py",
"-m",
"tool",
"-k",
"job_properties_test_2"
]
},
{
"type": "node",
"name": "debug selected jest unit test",
"request": "launch",
"program": "${workspaceFolder}/client/node_modules/jest/bin/jest",
"args": [
"--runInBand",
"--config",
"${workspaceFolder}/client/tests/jest/jest.config.js",
"${file}"
],
"cwd": "${workspaceFolder}/client",
"console": "integratedTerminal",
"disableOptimisticBPs": true
}
]
}
{
"python.testing.pytestArgs": [
"--doctest-modules",
"lib/galaxy_test/api/",
"lib/galaxy/datatypes",
"test/unit",
"test/integration",
"lib/galaxy_test/selenium/"
],
"python.testing.pytestEnabled": true,
"python.envFile": "${workspaceFolder}/.vscode/.test.env"
}
{
"shedcomp": {
"prefix": "shed_component",
"body": [
"<script setup lang=\"ts\">",
"\t$0",
"</script>",
"<template>",
"</template>"
],
"description": "outline of a tool shed component"
},
"shedpage": {
"prefix": "shed_page",
"body": [
"<script setup lang=\"ts\">",
"import PageContainer from \"@/components/PageContainer.vue\"",
"</script>",
"<template>",
" <page-container>",
" $0",
" </page-container>",
"</template>"
],
"description": "outline of a tool shed page"
},
"shedfetcher": {
"prefix": "shed_fetcher",
"body": [
"import { fetcher } from \"@/schema\"",
"const fetcher = fetcher.path(\"$1\").method(\"get\").create()"
],
"description": "Import shed fetcher and instantiate with a path"
},
"shedrouter": {
"prefix": "shed_router",
"body": [
"import router from \"@/router\""
]
}
}
\ No newline at end of file
If you use or extend Galaxy in your published work, please cite this
publication:
- The Galaxy Community. "The Galaxy platform for accessible, reproducible, and
collaborative data analyses: 2024 update"
Nucleic Acids Res. (2024) 52(W1):W83-W94 doi:10.1093/nar/gkae410
BibTeX format:
@article{10.1093/nar/gkae410,
title = {The {Galaxy} platform for accessible, reproducible, and collaborative data analyses: 2024 update},
author = {The Galaxy Community},
journal = {Nucleic Acids Res.},
doi = {10.1093/nar/gkae410},
volume = {52},
number = {W1},
pages = {W83-W94},
year = {2024},
url = {https://doi.org/10.1093/nar/gkae410},
publisher = {Oxford University Press}
}
More publications
-----------------
To cite specific Galaxy components/features or other Galaxy project
publications, see
https://galaxyproject.org/citing-galaxy
# Galaxy Project Code of Conduct
This project is committed to providing a welcoming and harassment-free
experience for everyone. We therefore expect participants to abide by our Code
of Conduct, which can be found at:
https://galaxyproject.org/community/coc/
# Contributing
Galaxy welcomes new development! This document briefly describes how to
contribute to the [core galaxy repository](https://github.com/galaxyproject/galaxy).
For general information on the Galaxy ecosystem, please see the
[Galaxy Community Hub](https://galaxyproject.org).
For a description of how the Galaxy code is structured, see the
[Galaxy Code Architecture slides](https://training.galaxyproject.org/training-material/topics/dev/tutorials/architecture/slides.html)
that are part of the [Galaxy Training Materials](https://training.galaxyproject.org/).
## Before you Begin
If you have an idea for a feature to add or an approach for a bugfix, it is
best to communicate with Galaxy developers early. The primary venue for this is
the [GitHub issue tracker](https://github.com/galaxyproject/galaxy/issues).
Browse through existing GitHub issues and if one seems related, comment on it.
For more direct communication, Galaxy developers are generally available on
the [Galaxy Matrix space](https://matrix.to/#/#galaxyproject:matrix.org), in
particular on the [galaxyprojec/dev channel](https://matrix.to/#/#galaxyproject_dev:gitter.im)
and in the various [Working Group](https://galaxyproject.org/community/wg/)
channels.
If you're looking to help but aren't sure where to start, we also maintain a
[tag](https://github.com/galaxyproject/galaxy/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22)
on GitHub for smaller issues we believe would make the best entry points for
new developers.
## Reporting a new issue
If no existing Galaxy issue seems appropriate, a new issue can be opened using
[this form](https://github.com/galaxyproject/galaxy/issues/new).
## How to Contribute
All changes to the [core galaxy
repository](https://github.com/galaxyproject/galaxy) should be made through pull
requests (with just two exceptions outlined below).
If you are new to Git, the Software Carpentry's [Version Control with
Git](https://swcarpentry.github.io/git-novice/) tutorial is a good place to
start. More learning resources are listed at
https://help.github.com/en/github/getting-started-with-github/git-and-github-learning-resources
1. Make sure you have a free [GitHub](https://github.com/) account. To increase
the security of your account, we strongly recommend that you configure
[two-factor authentication](https://docs.github.com/en/github/authenticating-to-github/securing-your-account-with-two-factor-authentication-2fa).
Additionally, you may want to [sign your commits](https://docs.github.com/en/github/authenticating-to-github/managing-commit-signature-verification).
2. Fork the [galaxy repository](https://github.com/galaxyproject/galaxy) on
GitHub to make your changes. To keep your copy up to date with respect to
the main repository, you need to frequently [sync your
fork](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/syncing-a-fork):
```
$ git remote add upstream https://github.com/galaxyproject/galaxy
$ git fetch upstream
$ git checkout dev
$ git merge upstream/dev
```
3. Choose the correct branch to develop your changes against.
* The `master` branch is kept in sync with the latest tagged release, but
should **not** be used as the base (i.e. target) branch of a pull request.
* Additions of new features to the codebase should be based off the `dev`
branch (`git checkout -b feature_branch dev`), with few
[exceptions](doc/source/project/organization.rst#handling-pull-requests).
* Most bug fixes should target the oldest supported release exhibiting the
issue (`git checkout -b bugfix_branch release_XX.XX`).
* Serious security problems should not be fixed via pull request - please see
[the Galaxy security policies](SECURITY.md) for information about
responsibly disclosing security issues.
4. If your changes modify code please ensure the resulting files conform to
the [style guidelines](#style-guidelines) below.
If you are working on the Galaxy user interface (i.e. JavaScript,
styles, etc.), see more information in the [client README](client/README.md).
5. Galaxy contains hundreds of tests of different types and complexity and
running each is difficult and probably not reasonable on your workstation. So
please review the [running tests documentation](test/TESTING.md) and run any
that seem relevant.
If possible, also try to add new tests for the features added or bugs fixed
by your pull request.
Developers reviewing your pull request will be happy to help you add or run
the relevant tests as part of the pull request review process.
6. Write a useful and properly formatted commit message.
Follow [these guidelines and template](https://git-scm.com/book/en/v2/Distributed-Git-Contributing-to-a-Project#_commit_guidelines),
in particular start your message with a short imperative sentence on a single
line, possibly followed by a blank line and a more detailed explanation.
In the detailed explanation it's good to include relevant references (e.g.
any GitHub issue being fixed) using full URLs, and errors or tracebacks the
commit is meant to fix.
You can use the Markdown syntax for lists and code highlighting, wrapping the
explanation text at 72 characters when possible.
Example of a good commit message: https://github.com/galaxyproject/galaxy/commit/0429c4d515536f9cca6b70b2abeb019de807c955
7. Commit and push your changes to your
[fork](https://help.github.com/en/github/using-git/pushing-commits-to-a-remote-repository).
8. Open a [pull
request](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request)
with these changes. Your pull request message ideally should include:
* Why you made the changes (e.g. references to GitHub issues being fixed).
* A description of the implementation of the changes.
* How to test the changes, if you haven't included specific tests already.
9. The pull request should pass all the continuous integration tests which are
automatically started by GitHub.
10. Your pull request will be handled according to [some
rules](doc/source/project/organization.rst#handling-pull-requests).
11. If, before your pull request is merged, conflicts arise between your branch
and the target branch (because other commits were pushed to the target
branch), you need to either:
1) [rebase your branch](https://git-scm.com/docs/git-rebase) on top of the
target branch, or
2) merge the target branch into your branch.
We recommend the first approach (i.e. rebasing) because it produces cleaner
git histories, which are easier to bisect. If your branch is called
`feature_branch` and your target branch is `dev`, you can rebase your branch
with the following commands:
```
$ git checkout feature_branch
$ git pull
$ git fetch upstream
$ git rebase upstream/dev
```
Once you have resolved the conflicts in all commits of your branch, you can
force-push the rebased branch to update the pull request:
```
$ git push --force
```
## Style guidelines
### Python
- Galaxy follows [PEP-8](https://www.python.org/dev/peps/pep-0008/), with
particular emphasis on readability being the ultimate goal:
- 4 spaces (not tabs!) per indentation level
- divergences from PEP-8 are listed in the `[flake8]` section of the `.flake8`
file and in the `[tool.ruff]` section of the `pyproject.toml` file.
- The Python code base is automatically formatted using
[isort](https://pycqa.github.io/isort/) (for imports) and
[black](https://black.readthedocs.io). To easily format your Python code
before submitting your contribution, please either use `make diff-format`
or run `isort FILE; black FILE` for each FILE you modify.
- Python [docstrings](http://www.python.org/dev/peps/pep-0257/) need to be in
[reStructured Text (RST)](https://docutils.sourceforge.io/rst.html) format and
compatible with [Sphinx](https://www.sphinx-doc.org).
- String formatting should normally be done using
[formatted string literals (f-strings)](https://docs.python.org/3/tutorial/inputoutput.html#formatted-string-literals),
except:
- when the format string is kept in a separate variable, in which case the
[string ``format()`` method](https://docs.python.org/3/tutorial/inputoutput.html#the-string-format-method)
should be used;
- when [formatting a log message](https://docs.python.org/3/library/logging.html#logging.Logger.debug),
in which case it's better to use a
[`printf`-style](https://docs.python.org/3/library/stdtypes.html#old-string-formatting)
message format string and pass its arguments to the logging method
separately. This is a bit more efficient than using f-strings and allows for
better log aggregation. For more information, see this
[blog post](https://dev.to/izabelakowal/what-is-the-best-string-formatting-technique-for-logging-in-python-d1d).
## Documentation
General documentation (e.g. admin, development, release notes) is found in the
``doc/source/`` directory.
The documentation source files need to be written in one of these markup
languages:
- [reStructuredText](https://www.sphinx-doc.org/en/master/usage/restructuredtext/index.html)
(with Sphinx extensions)
- [Markdown](https://myst-parser.readthedocs.io/en/latest/syntax/typography.html)
(with MyST-Parser extensions).
These source files are then built into HTML documentation with
[Sphinx](https://www.sphinx-doc.org/) by running ``make docs`` and published on
the [Galaxy Documentation website](https://docs.galaxyproject.org/).
## A Quick Note about Tools
For the most part, Galaxy tools should be published to a [Tool
Shed](https://galaxyproject.org/toolshed) and not in this repository directly.
More information about tool development can be found [on the community
hub](https://galaxyproject.org/develop).
# Contributors
The following individuals have contributed code to Galaxy:
* Enis Afgan <afgane@gmail.com>
* Istvan Albert <istvan.albert@gmail.com>
* Renato Alves <alves.rjc@gmail.com> <rjalves@igc.gulbenkian.pt>
* Guruprasad Ananda <gua110@bx.psu.edu>
* Evgeny Anatskiy <evgeny.anatskiy@gmail.com>
* Florent Angly <florent.angly@gmail.com>
* Patrick Austin <patrick.austin@stfc.ac.uk>
* Raj Ayyampalayam <raj76@uga.edu>
* Abdulrahman Azab <eng.azab@gmail.com>
* Finn Bacall <finn.bacall@manchester.ac.uk>
* Dannon Baker <dannon.baker@gmail.com>
* balto <balto_59@hotmail.fr>
* Christopher Bare <christopherbare@gmail.com>
* Bérénice Batut <berenice.batut@gmail.com>
* Marius van den Beek <m.vandenbeek@gmail.com>
* Maria Bernard <maria.bernard@jouy.inra.fr>
* Jean-Frédéric Berthelot <jean-frederic.berthelot@lifl.fr>
* Léo Biscassi <leo.biscassi@gmail.com>
* Dan Blanchard <dan.blanchard@gmail.com>
* Clemens Blank <blankclemens@gmail.com>
* Daniel Blankenberg <dan.blankenberg@gmail.com> <dan@bx.psu.edu>
* Jorrit Boekel <jorrit.boekel@scilifelab.se>
* James Boocock <sfk2001@gmail.com>
* Carlos Borroto <carlos.borroto@gmail.com>
* Daniel Bouchard <dbouchard@corefacility.ca> <daniel.bouchard@phac-aspc.gc.ca>
* Dave Bouvier <dave@bx.psu.edu>
* Adam Brenner <aebrenne@uci.edu>
* Anthony Bretaudeau <anthony.bretaudeau@rennes.inra.fr> <abretaud@irisa.fr>
* Christian Y. Brenninkmeijer <christian.brenninkmeijer@manchester.ac.uk>
* Freek de Bruijn <freek.de.bruijn@nbic.nl>
* Richard Burhans <burhans@bx.psu.edu>
* Jennifer Cabral <jencabral@gmail.com>
* Martin Čech <marten@bx.psu.edu>
* Eli Chadwick <eli.chadwick@manchester.ac.uk>
* Ramkrishna Chakrabarty <rc@bx.psu.edu>
* Matt Chambers <matt.chambers42@gmail.com>
* Brad Chapman <chapmanb@50mail.com>
* John Chilton <jmchilton@gmail.com>
* Saket Choudhary <saketkc@gmail.com>
* Wen-Yu Chung <wychung@bx.psu.edu>
* Dave Clements <clements@galaxyproject.org>
* Peter Cock <p.j.a.cock@googlemail.com> <peter.cock@hutton.ac.uk>
* Ira Cooke <iracooke@gmail.com>
* Nate Coraor <nate@bx.psu.edu>
* Michael Cotterell <mepcotterell@gmail.com>
* Michael R. Crusoe <crusoe@ucdavis.edu>
* Gianmauro Cuccuru <gmauro@crs4.it>
* Frederik Delaere <frederik.delaere@gmail.com>
* Matthias Desmet <matthias.desmet@ugent.be>
* Matthew Ryan Dillon <matthewrdillon@gmail.com>
* Olivia Doppelt <olivia.doppelt@pasteur.fr>
* Shane Dowling <shane@shanedowling.com>
* John Duddy <jduddy@illumina.com>
* Carl Eberhard <carlfeberhard@gmail.com>
* Ignacio Eguinoa <ignacio.eguinoa@gmail.com>
* Mark Einon <mark.einon@gmail.com>
* Kyle Ellrott <kellrott@gmail.com> <kellrott@soe.ucsc.edu>
* Eric Enns <eric.enns@gmail.com>
* fescudie <fescudie@toulouse.inra.fr>
* Anne Fouilloux <annefou@uio.no>
* Dorine Francheteau <dorine@bx.psu.edu>
* Jean-Frédéric (@JeanFred on Github)
* Maximilian Friedersdorff <max@friedersdorff.com>
* Jaime Frey <jfrey@cs.wisc.edu>
* Ben Fulton <benmarkfulton@gmail.com>
* Carrie Ganote <cganote@iu.edu>
* Ryan Golhar <ngsbioinformatics@gmail.com>
* Jeremy Goecks <jeremy.goecks@moffitt.org>
* Nuwan Goonasekera <nuwan.goonasekera@gmail.com>
* Björn Grüning <bjoern.gruening@gmail.com> <bjoern@gruenings.eu>
* Aysam Guerler <aysam.guerler@gmail.com>
* Simon Guest <simon.guest@agresearch.co.nz>
* Nalin Gupta <nalin.gupta@stfc.ac.uk>
* Jianbin He <jbhe@bx.psu.edu>
* Peter van Heusden <pvh@sanbi.ac.za>
* Morita Hideyuki <h-morita@esm.co.jp>
* Saskia Hiltemann <zazkia@gmail.com>
* Rob Hooft <rob.hooft@nbic.nl>
* Y. Hoogstrate <y.hoogstrate@erasmusmc.nl>
* Hans-Rudolf Hotz <hrhotz@gmail.com>
* Jian-Long Huang <jlh@pyhub.org>
* Gert Hulselmans <gert.hulselmans@med.kuleuven.be>
* Manabu Ishii <manabu.ishii.rb@gmail.com>
* Jennifer Jackson <jen@bx.psu.edu>
* Joachim Jacob <joachim.jacob@gmail.com>
* Xiaoqian Jiang <jxq198409@hotmail.com>
* Jim Johnson <jj@umn.edu> <jj@msi.umn.edu>
* Kaivan Kamali <kxk302@gmail.com>
* Radhesh Kamath <radhesh@bx.psu.edu>
* Iyad Kandalaft <ik@iyadk.com>
* Jan Kanis <jan.code@jankanis.nl>
* David King <dcking@bx.psu.edu>
* Rory Kirchner <roryk@mit.edu>
* Edward Kirton <eskirton@lbl.gov>
* Jasper Koehorst <jasperkoehorst@gmail.com>
* Anup Kumar <anup.rulez@gmail.com>
* Brad Langhorst <langhorst@neb.com>
* Delphine Lariviere <lariviere.delphine@gmail.com>
* Ross Lazarus <ross.lazarus@gmail.com> <rossl@bx.psu.edu>
* Yvan Le Bras <yvan.le_bras@irisa.fr>
* Gildas Le Corguillé @lecorguille
* Alexander Lenail <alexander.lenail@tufts.edu>
* Simone Leo <simone.leo@gmail.com>
* Kanwei Li <kanwei@gmail.com>
* Michael Li <michael.li@uwaterloo.ca>
* Pierre Lindenbaum <plindenbaum@yahoo.fr>
* Mikael Loaec <mikael.loaec@versailles.inra.fr>
* Thoba Lose <lose.thoba@gmail.com>
* Philip Mabon <philipmabon@gmail.com>
* Remi Marenco <remi.marenco@gmail.com> <remimarenco@gmail.com>
* Zipho Mashologu <zipho@trustpay.biz>
* Thomas McGowan <mcgo0092@msi.umn.edu>
* Scott McManus <scottmcmanus@emory.edu> <scottmcmanus@gatech.edu>
* Hervé Ménager <herve.menager@pasteur.fr>
* Pablo Moreno <pablo.a.moreno@gmail.com>
* Hunter Moseley <hunter.moseley@louisville.edu>
* Takao Nakaguchi <takao.nakaguchi@gmail.com>
* Arjun Nath <arjun@bx.psu.edu>
* Anton Nekrutenko <anton@bx.psu.edu> <anton@nekrut.org>
* Eric Paniagua <paniagua.cshl@gmail.com>
* Richard Park <rpark@bu.edu>
* Lance Parsons <lparsons@princeton.edu>
* Balthazar Pavot <balthazar.pavot@hotmail.fr>
* Chinmay Rao <chinmay@bx.psu.edu>
* Matt Ralston <mrals89@gmail.com>
* ramezrawas <ramezrawas@gmail.com>
* Helena Rasche <helena.rasche@gmail.com>
* Athos Ribeiro <athoscr@fedoraproject.org>
* Andrew Robinson <Andrew.Robinson@latrobe.edu.au>
* Devon Ryan <dpryan79@gmail.com>
* Michael Sauria <crockopotamus@gmail.com>
* Andrea Sbardellati <andrea.sbardellati@crs4.it>
* Ian Schenck <ian@bx.psu.edu>
* Jelle Scholtalbers <j.scholtalbers@gmail.com>
* Nick Semenkovich <semenko@alum.mit.edu>
* Varun Shankar <varunshankar55@gmail.com>
* Matthew Shirley <mdshw5@gmail.com>
* Timur Shtatland <shtatland@neb.com>
* Sourav Singh <ssouravsingh12@gmail.com>
* Clare Sloggett <sloc@unimelb.edu.au>
* Eteri Sokhoyan @Sokhoyan
* Nicola Soranzo <nicola.soranzo@earlham.ac.uk> <nicola.soranzo@tgac.ac.uk> <nsoranzo@tiscali.it> <soranzo@crs4.it>
* Nick Stoler <nick@nstoler.com>
* Roy Storey <kiwiroy@gmail.com>
* Hanfei Sun <ad9075@gmail.com>
* Ilya Sytchev <hackdna@gmail.com>
* Scott Szakonyi <sszakony@nd.edu>
* James Taylor <james@jamestaylor.org>
* Tomithy Too <tomithy.too@gmail.com>
* David Trudgian <dave@trudgian.net> <david.trudgian@utsouthwestern.edu>
* Nitesh Turaga <nitesh.turaga@gmail.com>
* Clayton Turner <clayclay911@gmail.com>
* Jesse c j van Dam <jesse.vandam@wur.nl>
* Ashok Varadharajan <ashvark@gmail.com>
* Marek Vavruša <marek@vavrusa.com>
* Martijn Vermaat <m.vermaat.hg@lumc.nl>
* Kelly Vincent <kpvincent@bx.psu.edu>
* Jeremy Volkening <jdv@base2bio.com>
* Greg Von Kuster <greg@bx.psu.edu>
* Pavan Videm <videmp@informatik.uni-freiburg.de>
* Hiral Vora <hvora1@uncc.edu>
* Junzhou Wang <junzhouwang@gmail.com>
* Andrew Warren <anwarren@vbi.vt.edu>
* Trevor Wennblom <trevor@well.com>
* Joachim Wolff <wolffj@informatik.uni-freiburg.de>
* Thomas Wollmann <thomas.s.wollmann@gmail.com> <thomas.wollmann@bioquant.uni-heidelberg.de>
* Jay Young <xiaojay@gmail.com>
* Yi Zhang <yizhang@bx.psu.edu>
# Institutional sponsors
Galaxy development began at The Pennsylvania State University in 2006.
In 2009 all contributions to that point were licensed by The
Pennsylvania State University under the terms of the Academic Free
License 3.0 (see LICENSE.txt). This license applies to all subsequent
contributions - including but not limited to development at The
Pennsylvania State University, Emory University, Johns Hopkins
University, and George Washington University as part of the following
NIH and NSF grants:
* NSF DBI 0543285, “Tailoring genomic data to the needs of experimental
biologists and educators”
* NIH R21 HG005133, “A turnkey solution for next generation sequence
data analysis”
* NIH R01 HG004909, “An efficient lightweight environment for biomedical
computation”
* NSF DBI 0850103, “Cyberinfrastructure for accessible and reproducible
research in life sciences”
* NIH RC2 HG005542, “Dynamically scalable accessible analysis for next
generation sequence data”
* NIH U41 HG006620, “Democratization of data analysis in life sciences
through Galaxy”
Copyright (c) 2005-2026 Galaxy Contributors (see CONTRIBUTORS.md)
Galaxy is provided from 2026-02-25 onwards entirely under the MIT License.
Some icons found in Galaxy are from the Silk Icons set, available under
the Creative Commons Attribution 2.5 License, from:
http://www.famfamfam.com/lab/icons/silk/
Other images and documentation are licensed under the Creative Commons
Attribution 3.0 (CC BY 3.0) License. See:
http://creativecommons.org/licenses/by/3.0/
--------------------------------------------------------------------------------
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
This diff is collapsed.
.. figure:: https://galaxyproject.org/images/galaxy-logos/galaxy_project_logo.jpg
:alt: Galaxy Logo
The latest information about Galaxy can be found on the `Galaxy Community Hub <https://galaxyproject.org/>`__.
Community support is available at `Galaxy Help <https://help.galaxyproject.org/>`__.
.. image:: https://img.shields.io/badge/chat-gitter-blue.svg
:target: https://gitter.im/galaxyproject/Lobby
:alt: Chat on gitter
.. image:: https://img.shields.io/badge/chat-irc.freenode.net%23galaxyproject-blue.svg
:target: https://webchat.freenode.net/?channels=galaxyproject
:alt: Chat on irc
.. image:: https://img.shields.io/badge/release-documentation-blue.svg
:target: https://docs.galaxyproject.org/en/master/
:alt: Release Documentation
.. image:: https://travis-ci.org/galaxyproject/galaxy.svg?branch=dev
:target: https://travis-ci.org/galaxyproject/galaxy
:alt: Inspect the test results
Galaxy Quickstart
=================
Galaxy requires Python 3.10 or higher. To check your Python version, run:
.. code:: console
$ python -V
Python 3.10.12
Start Galaxy:
.. code:: console
$ sh run.sh
Once Galaxy completes startup, you should be able to view Galaxy in your
browser at: http://localhost:8080
For more installation details please see: https://getgalaxy.org/
Documentation is available at: https://docs.galaxyproject.org/
Tutorials on how to use Galaxy, perform scientific analyses with it, develop Galaxy and its tools, and admin a Galaxy server are at: https://training.galaxyproject.org/
Tools
=====
Tools can be either installed from the Tool Shed or added manually.
For details please see the `tutorial <https://galaxyproject.org/admin/tools/add-tool-from-toolshed-tutorial/>`__.
Note that not all dependencies for the tools provided in the
``tool_conf.xml.sample`` are included. To install them please visit
"Manage dependencies" in the admin interface.
Issues and Galaxy Development
=============================
Please see `CONTRIBUTING.md <CONTRIBUTING.md>`_ .
.. figure:: https://galaxyproject.org/images/galaxy-logos/galaxy_project_logo.jpg
:alt: Galaxy Logo
The latest information about Galaxy can be found on the `Galaxy Community Hub <https://galaxyproject.org/>`__.
Community support is available at `Galaxy Help <https://help.galaxyproject.org/>`__.
.. image:: https://img.shields.io/badge/chat-gitter-blue.svg
:target: https://gitter.im/galaxyproject/Lobby
:alt: Chat on gitter
.. image:: https://img.shields.io/badge/chat-irc.freenode.net%23galaxyproject-blue.svg
:target: https://webchat.freenode.net/?channels=galaxyproject
:alt: Chat on irc
.. image:: https://img.shields.io/badge/release-documentation-blue.svg
:target: https://docs.galaxyproject.org/en/master/
:alt: Release Documentation
.. image:: https://travis-ci.org/galaxyproject/galaxy.svg?branch=dev
:target: https://travis-ci.org/galaxyproject/galaxy
:alt: Inspect the test results
Galaxy Quickstart
=================
Galaxy requires Python 3.10 or higher. To check your Python version, run:
.. code:: console
$ python -V
Python 3.10.12
Start Galaxy:
.. code:: console
$ sh run.sh
Once Galaxy completes startup, you should be able to view Galaxy in your
browser at: http://localhost:8080
For more installation details please see: https://getgalaxy.org/
Documentation is available at: https://docs.galaxyproject.org/
Tutorials on how to use Galaxy, perform scientific analyses with it, develop Galaxy and its tools, and admin a Galaxy server are at: https://training.galaxyproject.org/
Tools
=====
Tools can be either installed from the Tool Shed or added manually.
For details please see the `tutorial <https://galaxyproject.org/admin/tools/add-tool-from-toolshed-tutorial/>`__.
Note that not all dependencies for the tools provided in the
``tool_conf.xml.sample`` are included. To install them please visit
"Manage dependencies" in the admin interface.
Issues and Galaxy Development
=============================
Please see `CONTRIBUTING.md <CONTRIBUTING.md>`_ .
# Security
The Galaxy project is strongly committed to security and responsible disclosure. We have adopted and published a set of policies specifying how we will act in response to reported security issues, in order to ensure timely updates are made available to all affected parties.
## Reporting Security Issues
If you believe you have discovered a security issue, please email [galaxy-committers@lists.galaxyproject.org](galaxy-committers@lists.galaxyproject.org). Please use `[SECURITY]` in the email title. Alternatively you can report a security vulnerability using GitHub [private reporting](https://github.com/galaxyproject/galaxy/security/advisories/new). In either case one of the maintainers will acknowledge your report within 2 US business days.
We ask that you not disclose the issues publicly. We will provide you credit for the discovery when publicly disclosing the issue.
Security issues which *only* affect a pre-release version of Galaxy (i.e. the `dev` branch in GitHub) do not need to go through this process, so you may open issues and pull requests publicly.
## Supported versions
The following branches or releases receive security support:
- Development on the `dev` branch, hosted on GitHub, which will become the next release of Galaxy
- Releases within the past 12 months.
- E.g. 24.0 will receive support for a full year, at which point 25.0 will be available.
For unsupported branches:
- Older versions of Galaxy may be affected by security issues.
- Security patches *may* apply
- The security team does not commit to investigating issues that pertain to unsupported releases.
- The security team does not commit to issuing patches or new releases of unsupported versions.
## Issue Severity
Galaxy takes a very conservative stance on issue severity as individual Galaxy instances often install tools and make customizations that might increase their risk in the face of otherwise less-serious vulnerabilities. As a result, issues that would be considered less-severe in other projects may be treated as higher risk here.
### Issue Classification
Severity | Examples
------------ | ---------
High | Remote code execution (RCE), SQL Injection, Cross-site scripting (XSS), and *any issue allowing user impersonation*.
Medium / Low | Unvalidated redirects/forwards, Issues due to uncommon configuration options.
These are only examples. The security team will provide a severity classification based on its impact on the average Galaxy instance. However, Galaxy administrators should take it upon themselves to evaluate the impact for their instance(s).
## Notification of Vulnerabilities
For high severity issues, we will notify [the list of public Galaxy owners](https://lists.galaxyproject.org/lists/galaxy-public-servers.lists.galaxyproject.org/) with:
- A description of the issue
- List of supported versions that are affected
- Steps to update or patch your Galaxy
The issue will then be embargoed for three (3) days. For medium and low
severity issues, or for publicly announcing high severity issues after the
embargo, we will:
- Patch the oldest release within the 12 month support window, and merge that fix forward.
- Updates will be available on the `release_XX.YY` branches.
- Update each release branch
- Publish a repository security advisory on GitHub containing:
- A description of the issue
- List of supported versions that are affected
- Steps to update or patch your Galaxy
- A CVE identifier (if applicable)
If an issue is deemed to be time-sensitive – e.g. due to active and ongoing exploits in the wild – the embargo may be shortened considerably.
If we believe that the reported issue affects other Galaxy Project components or projects outside of the Galaxy ecosystem, we may discuss the issue with those projects and coordinate disclosure and resolution with them.
#!/bin/sh
cd "$(dirname "$0")"
. ./scripts/common_startup_functions.sh
setup_python
python ./scripts/check_model.py "$@"
[ $? -ne 0 ] && exit 1;
exit 0;
# Development files
node_modules/
coverage/
.vscode/
.idea/
.git/
.github/
.DS_Store
# Source and test files (only dist is published)
src/examples/
src/**/*.test.ts
vitest.config.ts
tsconfig.json
tsconfig.*.json
tsup.config.ts
# CI/CD
*.yml
*.yaml
# Logs
*.log
npm-debug.log*
pnpm-debug.log*
yarn-debug.log*
yarn-error.log*
# Lock files
package-lock.json
pnpm-lock.yaml
yarn.lock
# Not ignoring src directory since we're including it for source maps and debugging
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
../../client/src/api
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment