Merge pull request #22080 from NousResearch/fix/faster-docker

ci: split docker-publish per-arch runners + cache-friendly dockerfile layers
This commit is contained in:
ethernet 2026-05-08 19:12:14 -04:00 committed by GitHub
commit d10d19ebb7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 514 additions and 90 deletions

View file

@ -0,0 +1,47 @@
name: Hermes smoke test
description: >
Run the image's built-in entrypoint against `--help` and `dashboard --help`
to catch basic runtime regressions before publishing. Requires the image
to already be loaded into the local Docker daemon under `image`.
Works identically on amd64 and arm64 runners.
inputs:
image:
description: Fully-qualified image tag (e.g. nousresearch/hermes-agent:test)
required: true
runs:
using: composite
steps:
- name: Ensure /tmp/hermes-test is hermes-writable
shell: bash
run: |
# The image runs as the hermes user (UID 10000). GitHub Actions
# creates /tmp/hermes-test root-owned by default, which hermes
# can't write to — chown it to match the in-container UID before
# bind-mounting. Real users doing `docker run -v ~/.hermes:...`
# with their own UID hit the same issue and have their own
# remediations (HERMES_UID env var, or chown locally).
mkdir -p /tmp/hermes-test
sudo chown -R 10000:10000 /tmp/hermes-test
- name: hermes --help
shell: bash
run: |
docker run --rm \
-v /tmp/hermes-test:/opt/data \
--entrypoint /opt/hermes/docker/entrypoint.sh \
"${{ inputs.image }}" --help
- name: hermes dashboard --help
shell: bash
run: |
# Regression guard for #9153: dashboard was present in source but
# missing from the published image. If this fails, something in
# the Dockerfile is excluding the dashboard subcommand from the
# installed package.
docker run --rm \
-v /tmp/hermes-test:/opt/data \
--entrypoint /opt/hermes/docker/entrypoint.sh \
"${{ inputs.image }}" dashboard --help

View file

@ -10,48 +10,59 @@ on:
- 'Dockerfile'
- 'docker/**'
- '.github/workflows/docker-publish.yml'
- '.github/actions/hermes-smoke-test/**'
pull_request:
branches: [main]
paths:
- '**/*.py'
- 'pyproject.toml'
- 'uv.lock'
- 'Dockerfile'
- 'docker/**'
- '.github/workflows/docker-publish.yml'
- '.github/actions/hermes-smoke-test/**'
release:
types: [published]
permissions:
contents: read
# Top-level concurrency: do NOT cancel in-flight builds when a new push lands.
# Every commit deserves its own SHA-tagged image in the registry, and we guard
# the :latest tag in a separate job below (with its own concurrency group) so
# a slow run can't clobber :latest with older bits.
# Concurrency: push/release runs are NEVER cancelled so every merge gets its
# own SHA-tagged image; :latest is guarded separately by the move-latest job.
# PR runs reuse a PR-scoped group with cancel-in-progress: true so rapid
# pushes to the same PR collapse to the latest commit.
concurrency:
group: docker-${{ github.ref }}
cancel-in-progress: false
group: docker-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
env:
IMAGE_NAME: nousresearch/hermes-agent
jobs:
build-and-push:
# ---------------------------------------------------------------------------
# Build amd64 natively. This job also runs the smoke tests (basic --help
# and the dashboard subcommand regression guard from #9153), because amd64
# is the only arch we can `load` into the local daemon on an amd64 runner.
# ---------------------------------------------------------------------------
build-amd64:
# Only run on the upstream repository, not on forks
if: github.repository == 'NousResearch/hermes-agent'
runs-on: ubuntu-latest
timeout-minutes: 60
timeout-minutes: 45
outputs:
pushed_sha_tag: ${{ steps.mark_pushed.outputs.pushed }}
digest: ${{ steps.push.outputs.digest }}
steps:
- name: Checkout code
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
with:
submodules: recursive
# Fetch enough history to run `git merge-base --is-ancestor` in the
# move-latest job. That job reuses this checkout via its own
# actions/checkout call, but commits reachable from main up to ~1000
# back are plenty for any realistic race window.
fetch-depth: 1000
- name: Set up QEMU
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
# Build amd64 only so we can `load` the image for smoke testing.
# `load: true` cannot export a multi-arch manifest to the local daemon.
# The multi-arch build follows on push to main / release.
# Build once, load into the local daemon for smoke testing. Cached
# to gha with a per-arch scope; the push step below reuses every
# layer from this build.
- name: Build image (amd64, smoke test)
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6
with:
@ -59,36 +70,14 @@ jobs:
file: Dockerfile
load: true
platforms: linux/amd64
tags: nousresearch/hermes-agent:test
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ${{ env.IMAGE_NAME }}:test
cache-from: type=gha,scope=docker-amd64
cache-to: type=gha,mode=max,scope=docker-amd64
- name: Test image starts
run: |
mkdir -p /tmp/hermes-test
sudo chown -R 10000:10000 /tmp/hermes-test
# The image runs as the hermes user (UID 10000). GitHub Actions
# creates /tmp/hermes-test root-owned by default, which hermes
# can't write to — chown it to match the in-container UID before
# bind-mounting. Real users doing `docker run -v ~/.hermes:...`
# with their own UID hit the same issue and have their own
# remediations (HERMES_UID env var, or chown locally).
docker run --rm \
-v /tmp/hermes-test:/opt/data \
--entrypoint /opt/hermes/docker/entrypoint.sh \
nousresearch/hermes-agent:test --help
- name: Test dashboard subcommand
run: |
mkdir -p /tmp/hermes-test
sudo chown -R 10000:10000 /tmp/hermes-test
# Verify the dashboard subcommand is included in the Docker image.
# This prevents regressions like #9153 where the dashboard command
# was present in source but missing from the published image.
docker run --rm \
-v /tmp/hermes-test:/opt/data \
--entrypoint /opt/hermes/docker/entrypoint.sh \
nousresearch/hermes-agent:test dashboard --help
- name: Smoke test image
uses: ./.github/actions/hermes-smoke-test
with:
image: ${{ env.IMAGE_NAME }}:test
- name: Log in to Docker Hub
if: github.event_name == 'push' && github.ref == 'refs/heads/main' || github.event_name == 'release'
@ -97,61 +86,229 @@ jobs:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
# Always push a per-commit SHA tag on main. This is race-free because
# every commit has a unique SHA — concurrent runs can't clobber each
# other here. We also embed the git SHA as an OCI label so the
# move-latest job (below) can read it back off the registry's `:latest`.
- name: Push multi-arch image with SHA tag (main branch)
id: push_sha
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
# Push amd64 by digest only (no tag). The merge job assembles the
# tagged manifest list. `push-by-digest=true` is docker's recommended
# pattern for multi-runner multi-platform builds.
#
# We apply the OCI revision label here (and again on arm64) because
# the move-latest job reads it off the linux/amd64 sub-manifest config
# of `:latest` to decide whether it's safe to advance. The label must
# be on each per-arch image — manifest lists themselves don't carry
# image config labels.
- name: Push amd64 by digest
id: push
if: github.event_name == 'push' && github.ref == 'refs/heads/main' || github.event_name == 'release'
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6
with:
context: .
file: Dockerfile
push: true
platforms: linux/amd64,linux/arm64
tags: nousresearch/hermes-agent:sha-${{ github.sha }}
platforms: linux/amd64
labels: |
org.opencontainers.image.revision=${{ github.sha }}
cache-from: type=gha
cache-to: type=gha,mode=max
outputs: type=image,name=${{ env.IMAGE_NAME }},push-by-digest=true,name-canonical=true,push=true
cache-from: type=gha,scope=docker-amd64
cache-to: type=gha,mode=max,scope=docker-amd64
# Write the digest to a file and upload it as an artifact so the
# merge job can stitch both per-arch digests into a manifest list.
- name: Export digest
if: github.event_name == 'push' && github.ref == 'refs/heads/main' || github.event_name == 'release'
run: |
mkdir -p /tmp/digests
digest="${{ steps.push.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest artifact
if: github.event_name == 'push' && github.ref == 'refs/heads/main' || github.event_name == 'release'
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with:
name: digest-amd64
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
# ---------------------------------------------------------------------------
# Build arm64 natively on GitHub's free arm64 runner. This replaces the
# previous QEMU-emulated arm64 build, which was ~5-10x slower and shared
# a cache scope with amd64. Matches the amd64 job's shape: build+load,
# smoke test, then on push/release push by digest.
# ---------------------------------------------------------------------------
build-arm64:
if: github.repository == 'NousResearch/hermes-agent'
runs-on: ubuntu-24.04-arm
timeout-minutes: 45
outputs:
digest: ${{ steps.push.outputs.digest }}
steps:
- name: Checkout code
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
with:
submodules: recursive
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
# Build once, load into the local daemon for smoke testing. Cached
# to gha with a per-arch scope; the push step below reuses every
# layer from this build.
- name: Build image (arm64, smoke test)
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6
with:
context: .
file: Dockerfile
load: true
platforms: linux/arm64
tags: ${{ env.IMAGE_NAME }}:test
cache-from: type=gha,scope=docker-arm64
cache-to: type=gha,mode=max,scope=docker-arm64
- name: Smoke test image
uses: ./.github/actions/hermes-smoke-test
with:
image: ${{ env.IMAGE_NAME }}:test
- name: Log in to Docker Hub
if: github.event_name == 'push' && github.ref == 'refs/heads/main' || github.event_name == 'release'
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Push arm64 by digest
id: push
if: github.event_name == 'push' && github.ref == 'refs/heads/main' || github.event_name == 'release'
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6
with:
context: .
file: Dockerfile
platforms: linux/arm64
labels: |
org.opencontainers.image.revision=${{ github.sha }}
outputs: type=image,name=${{ env.IMAGE_NAME }},push-by-digest=true,name-canonical=true,push=true
cache-from: type=gha,scope=docker-arm64
cache-to: type=gha,mode=max,scope=docker-arm64
- name: Export digest
if: github.event_name == 'push' && github.ref == 'refs/heads/main' || github.event_name == 'release'
run: |
mkdir -p /tmp/digests
digest="${{ steps.push.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest artifact
if: github.event_name == 'push' && github.ref == 'refs/heads/main' || github.event_name == 'release'
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with:
name: digest-arm64
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
# ---------------------------------------------------------------------------
# Stitch both per-arch digests into a single tagged multi-arch manifest.
# This is a registry-side operation — no building, no layer re-push —
# so it runs in ~30 seconds. On main pushes it produces :sha-<sha>.
# On releases it produces :<release_tag_name>.
# ---------------------------------------------------------------------------
merge:
if: github.repository == 'NousResearch/hermes-agent' && (github.event_name == 'push' && github.ref == 'refs/heads/main' || github.event_name == 'release')
runs-on: ubuntu-latest
needs: [build-amd64, build-arm64]
timeout-minutes: 10
outputs:
pushed_sha_tag: ${{ steps.mark_pushed.outputs.pushed }}
steps:
- name: Download digests
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
with:
path: /tmp/digests
pattern: digest-*
merge-multiple: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
- name: Log in to Docker Hub
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
# Compute the tag for this run. Main pushes use sha-<sha> (so every
# commit gets its own immutable tag); releases use the release tag name.
- name: Compute tag
id: tag
run: |
if [ "${{ github.event_name }}" = "release" ]; then
echo "tag=${{ github.event.release.tag_name }}" >> "$GITHUB_OUTPUT"
else
echo "tag=sha-${{ github.sha }}" >> "$GITHUB_OUTPUT"
fi
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
set -euo pipefail
# Build the arg array from each digest file (filename = the digest
# hex, with no sha256: prefix; empty file content, only the name
# matters). Using an array avoids shellcheck SC2046 and keeps
# every digest a single argv token even under pathological names.
args=()
for digest_file in *; do
args+=("${IMAGE_NAME}@sha256:${digest_file}")
done
docker buildx imagetools create \
-t "${IMAGE_NAME}:${TAG}" \
"${args[@]}"
env:
IMAGE_NAME: ${{ env.IMAGE_NAME }}
TAG: ${{ steps.tag.outputs.tag }}
- name: Inspect image
run: |
docker buildx imagetools inspect "${IMAGE_NAME}:${TAG}"
env:
IMAGE_NAME: ${{ env.IMAGE_NAME }}
TAG: ${{ steps.tag.outputs.tag }}
# Signal to move-latest that the SHA tag is live. Only on main pushes;
# releases don't trigger move-latest (they use their own release tag).
- name: Mark SHA tag pushed
id: mark_pushed
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
run: echo "pushed=true" >> "$GITHUB_OUTPUT"
- name: Push multi-arch image (release)
if: github.event_name == 'release'
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6
with:
context: .
file: Dockerfile
push: true
platforms: linux/amd64,linux/arm64
tags: nousresearch/hermes-agent:${{ github.event.release.tag_name }}
cache-from: type=gha
cache-to: type=gha,mode=max
# Second job: moves `:latest` to point at the SHA tag the first job pushed.
# ---------------------------------------------------------------------------
# Move :latest to point at the SHA tag the merge job pushed.
#
# Has its own concurrency group with `cancel-in-progress: true`, which
# gives us the serialization we need: if a newer push arrives while an
# older run is mid-way through this job, the older run is cancelled
# before it can clobber `:latest`. Combined with the ancestor check
# below, this means `:latest` only ever moves forward in git history.
# The real serialization guarantee comes from the top-level concurrency
# group (`docker-${{ github.ref }}` with `cancel-in-progress: false`),
# which ensures at most one workflow run for this ref executes at a time.
# That means two move-latest steps for the same ref cannot overlap.
#
# This job has its own concurrency group as defense-in-depth: if the
# top-level group is ever loosened, queued move-latests will run serially
# in arrival order, each one running the ancestor check below and either
# advancing :latest or skipping. `cancel-in-progress: false` matches the
# top-level setting — we don't want rapid pushes to cancel a queued
# move-latest, because the ancestor check is the real safety mechanism
# and queueing is cheap (move-latest is a ~30s registry op).
#
# Combined with the ancestor check, this means :latest only ever moves
# forward in git history.
# ---------------------------------------------------------------------------
move-latest:
if: |
github.repository == 'NousResearch/hermes-agent'
&& github.event_name == 'push'
&& github.ref == 'refs/heads/main'
&& needs.build-and-push.outputs.pushed_sha_tag == 'true'
needs: build-and-push
&& needs.merge.outputs.pushed_sha_tag == 'true'
needs: merge
runs-on: ubuntu-latest
timeout-minutes: 10
concurrency:
group: docker-move-latest-${{ github.ref }}
cancel-in-progress: true
cancel-in-progress: false
steps:
- name: Checkout code
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
@ -167,11 +324,11 @@ jobs:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
# Read the git revision label off the current `:latest` manifest, then
# Read the git revision label off the current :latest manifest, then
# use `git merge-base --is-ancestor` to check whether our commit is a
# descendant of it. If `:latest` doesn't exist yet, or its label is
# descendant of it. If :latest doesn't exist yet, or its label is
# missing, we treat that as "safe to publish". If another run already
# advanced `:latest` past us (or diverged), we skip and leave it alone.
# advanced :latest past us (or diverged), we skip and leave it alone.
- name: Decide whether to move :latest
id: latest_check
run: |

119
.github/workflows/uv-lockfile-check.yml vendored Normal file
View file

@ -0,0 +1,119 @@
name: uv.lock check
# Verify uv.lock is in sync with pyproject.toml. Blocking check — PRs
# that modify pyproject.toml without regenerating uv.lock (or vice versa)
# must not merge, because the Docker build's `uv sync --frozen` step will
# fail on a stale lockfile and we'd rather catch it here than in the
# docker-publish workflow on main.
#
# ─────────────────────────────────────────────────────────────────────────
# IMPORTANT: this check runs against the MERGED state, not just your branch
# ─────────────────────────────────────────────────────────────────────────
#
# For `pull_request` events, GitHub checks out `refs/pull/<N>/merge` by
# default — a synthetic commit that merges your PR branch into the CURRENT
# state of `main`. That means the pyproject.toml evaluated here is
# `main's pyproject.toml + your PR's changes to pyproject.toml`, not just
# what's on your branch.
#
# Failure mode this creates: if `main` has advanced since you branched
# (e.g. someone merged a PR that added a dep to pyproject.toml + its
# corresponding uv.lock entries), your branch's uv.lock is missing those
# new entries. `uv lock --check` resolves against the merged pyproject
# and sees a lockfile that doesn't cover all the current deps → fails
# with "The lockfile at uv.lock needs to be updated."
#
# This can be confusing: `uv lock --check` passes locally (your branch
# is internally consistent) but fails in CI (merged state isn't).
#
# Fix is to sync your branch with main and regenerate the lockfile:
#
# git fetch origin main
# git rebase origin/main # or merge, whatever the repo prefers
# uv lock # regenerates uv.lock against new pyproject.toml
# git add uv.lock
# git commit -m "chore: refresh uv.lock after rebase onto main"
# git push --force-with-lease # if you rebased
#
# If you also changed pyproject.toml in your PR, `uv lock` handles that
# at the same time — one regeneration covers both your changes and the
# drift from main.
#
# This is the correct behavior! The check is protecting main's Docker
# build: a post-merge build would see the same merged state and fail
# the same way. Better to catch it here than after merge.
on:
push:
branches: [main]
paths:
- 'pyproject.toml'
- 'uv.lock'
- '.github/workflows/uv-lockfile-check.yml'
pull_request:
branches: [main]
paths:
- 'pyproject.toml'
- 'uv.lock'
- '.github/workflows/uv-lockfile-check.yml'
permissions:
contents: read
concurrency:
group: uv-lockfile-check-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
jobs:
check:
name: uv lock --check
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- name: Checkout code
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
- name: Install uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5
# `uv lock --check` re-resolves the project from pyproject.toml and
# compares the result to uv.lock, exiting non-zero if they disagree.
# No network writes, no file modifications.
#
# On PRs this runs against the merge commit (see comment at the top
# of this file) — failures often mean "your branch is behind main,
# rebase and regenerate uv.lock."
- name: Verify uv.lock is up-to-date
run: |
if ! uv lock --check; then
cat <<'EOF' >> "$GITHUB_STEP_SUMMARY"
## ❌ uv.lock is out of sync with pyproject.toml
**If this is a PR:** this check runs against the merged state
(your branch + current `main`), not just your branch. If
`uv lock --check` passes locally, your branch is likely behind
`main` — recent changes to `pyproject.toml` on `main` aren't
reflected in your branch's `uv.lock` yet.
To fix, sync with main and regenerate the lockfile:
```bash
git fetch origin main
git rebase origin/main # or `git merge origin/main`
uv lock # regenerate against new pyproject.toml
git add uv.lock
git commit -m "chore: refresh uv.lock after syncing with main"
git push --force-with-lease # drop --force-with-lease if you merged
```
**If you only changed pyproject.toml:** run `uv lock` locally
and commit the result.
This check is blocking because the Docker image build uses
`uv sync --frozen --extra all`, which rejects stale lockfiles
— catching it here avoids a ~15 min failed docker-publish run
on `main` post-merge.
EOF
echo "::error title=uv.lock out of sync::Run \`uv lock\` locally and commit the result. If on a PR, sync with main first."
exit 1
fi

View file

@ -55,6 +55,29 @@ RUN npm install --prefer-offline --no-audit && \
(cd ui-tui && npm install --prefer-offline --no-audit) && \
npm cache clean --force
# ---------- Layer-cached Python dependency install ----------
# Copy only pyproject.toml + uv.lock so the Python dep resolve + wheel
# download + native-extension compile layer is cached unless those inputs
# change. Before this split the Python install sat after `COPY . .`, so
# every source-only commit re-did ~4-5 min of dep work on cold builds.
#
# README.md is referenced by pyproject.toml's `readme =` field, but it's
# excluded from the build context by .dockerignore's `*.md`. uv's build
# frontend stats the readme path during dep resolution, so we `touch` an
# empty placeholder — the real README is restored by `COPY . .` below.
#
# `uv sync --frozen --no-install-project --extra all` installs only the
# deps reachable through the composite `[all]` extra (handpicked set
# intended for the production image). We do NOT use `--all-extras`:
# that would pull in `[rl]` (atroposlib + tinker + torch + wandb from
# git), `[yc-bench]` (another git dep), and `[termux-all]` (Android
# redundancy), none of which belong in the published container.
#
# The editable link is created after the source copy below.
COPY pyproject.toml uv.lock ./
RUN touch ./README.md
RUN uv sync --frozen --no-install-project --extra all
# ---------- Source code ----------
# .dockerignore excludes node_modules, so the installs above survive.
COPY --chown=hermes:hermes . .
@ -77,9 +100,10 @@ RUN chmod -R a+rX /opt/hermes && \
# Start as root so the entrypoint can usermod/groupmod + gosu.
# If HERMES_UID is unset, the entrypoint drops to the default hermes user (10000).
# ---------- Python virtualenv ----------
RUN uv venv && \
uv pip install --no-cache-dir -e ".[all]"
# ---------- Link hermes-agent itself (editable) ----------
# Deps are already installed in the cached layer above; `--no-deps` makes
# this a fast (~1s) egg-link creation with no resolution or downloads.
RUN uv pip install --no-cache-dir --no-deps -e "."
# ---------- Runtime ----------
ENV HERMES_WEB_DIST=/opt/hermes/hermes_cli/web_dist

81
uv.lock generated
View file

@ -8,6 +8,10 @@ resolution-markers = [
"python_full_version < '3.12'",
]
[options]
exclude-newer = "2026-05-01T22:46:56.926194148Z"
exclude-newer-span = "P7D"
[[package]]
name = "agent-client-protocol"
version = "0.9.0"
@ -1950,7 +1954,7 @@ wheels = [
[[package]]
name = "hermes-agent"
version = "0.12.0"
version = "0.13.0"
source = { editable = "." }
dependencies = [
{ name = "anthropic" },
@ -1965,6 +1969,7 @@ dependencies = [
{ name = "openai" },
{ name = "parallel-web" },
{ name = "prompt-toolkit" },
{ name = "psutil" },
{ name = "pydantic" },
{ name = "pyjwt", extra = ["crypto"] },
{ name = "python-dotenv" },
@ -1972,6 +1977,7 @@ dependencies = [
{ name = "requests" },
{ name = "rich" },
{ name = "tenacity" },
{ name = "tzdata", marker = "sys_platform == 'win32'" },
]
[package.optional-dependencies]
@ -2026,6 +2032,9 @@ bedrock = [
cli = [
{ name = "simple-term-menu" },
]
computer-use = [
{ name = "mcp" },
]
daytona = [
{ name = "daytona" },
]
@ -2109,6 +2118,31 @@ termux = [
{ name = "pywinpty", marker = "sys_platform == 'win32'" },
{ name = "simple-term-menu" },
]
termux-all = [
{ name = "agent-client-protocol" },
{ name = "aiohttp" },
{ name = "alibabacloud-dingtalk" },
{ name = "boto3" },
{ name = "dingtalk-stream" },
{ name = "discord-py", extra = ["voice"] },
{ name = "elevenlabs" },
{ name = "fastapi" },
{ name = "google-api-python-client" },
{ name = "google-auth-httplib2" },
{ name = "google-auth-oauthlib" },
{ name = "honcho-ai" },
{ name = "lark-oapi" },
{ name = "mcp" },
{ name = "mistralai" },
{ name = "ptyprocess", marker = "sys_platform != 'win32'" },
{ name = "python-telegram-bot", extra = ["webhooks"] },
{ name = "pywinpty", marker = "sys_platform == 'win32'" },
{ name = "qrcode" },
{ name = "simple-term-menu" },
{ name = "slack-bolt" },
{ name = "slack-sdk" },
{ name = "uvicorn", extra = ["standard"] },
]
tts-premium = [
{ name = "elevenlabs" },
]
@ -2161,6 +2195,7 @@ requires-dist = [
{ name = "hermes-agent", extras = ["acp"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["acp"], marker = "extra == 'termux'" },
{ name = "hermes-agent", extras = ["bedrock"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["bedrock"], marker = "extra == 'termux-all'" },
{ name = "hermes-agent", extras = ["cli"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["cli"], marker = "extra == 'termux'" },
{ name = "hermes-agent", extras = ["cron"], marker = "extra == 'all'" },
@ -2168,31 +2203,43 @@ requires-dist = [
{ name = "hermes-agent", extras = ["daytona"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["dev"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["dingtalk"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["dingtalk"], marker = "extra == 'termux-all'" },
{ name = "hermes-agent", extras = ["feishu"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["feishu"], marker = "extra == 'termux-all'" },
{ name = "hermes-agent", extras = ["google"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["google"], marker = "extra == 'termux-all'" },
{ name = "hermes-agent", extras = ["homeassistant"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["homeassistant"], marker = "extra == 'termux-all'" },
{ name = "hermes-agent", extras = ["honcho"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["honcho"], marker = "extra == 'termux'" },
{ name = "hermes-agent", extras = ["matrix"], marker = "sys_platform == 'linux' and extra == 'all'" },
{ name = "hermes-agent", extras = ["mcp"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["mcp"], marker = "extra == 'termux'" },
{ name = "hermes-agent", extras = ["messaging"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["messaging"], marker = "extra == 'termux-all'" },
{ name = "hermes-agent", extras = ["mistral"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["mistral"], marker = "extra == 'termux-all'" },
{ name = "hermes-agent", extras = ["modal"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["pty"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["pty"], marker = "extra == 'termux'" },
{ name = "hermes-agent", extras = ["slack"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["slack"], marker = "extra == 'termux-all'" },
{ name = "hermes-agent", extras = ["sms"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["sms"], marker = "extra == 'termux-all'" },
{ name = "hermes-agent", extras = ["termux"], marker = "extra == 'termux-all'" },
{ name = "hermes-agent", extras = ["tts-premium"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["tts-premium"], marker = "extra == 'termux-all'" },
{ name = "hermes-agent", extras = ["vercel"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["voice"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["web"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["web"], marker = "extra == 'termux-all'" },
{ name = "honcho-ai", marker = "extra == 'honcho'", specifier = ">=2.0.1,<3" },
{ name = "httpx", extras = ["socks"], specifier = ">=0.28.1,<1" },
{ name = "jinja2", specifier = ">=3.1.5,<4" },
{ name = "lark-oapi", marker = "extra == 'feishu'", specifier = ">=1.5.3,<2" },
{ name = "markdown", marker = "extra == 'matrix'", specifier = ">=3.6,<4" },
{ name = "mautrix", extras = ["encryption"], marker = "extra == 'matrix'", specifier = ">=0.20,<1" },
{ name = "mcp", marker = "extra == 'computer-use'", specifier = ">=1.2.0,<2" },
{ name = "mcp", marker = "extra == 'dev'", specifier = ">=1.2.0,<2" },
{ name = "mcp", marker = "extra == 'mcp'", specifier = ">=1.2.0,<2" },
{ name = "mistralai", marker = "extra == 'mistral'", specifier = ">=2.3.0,<3" },
@ -2201,6 +2248,7 @@ requires-dist = [
{ name = "openai", specifier = ">=2.21.0,<3" },
{ name = "parallel-web", specifier = ">=0.4.2,<1" },
{ name = "prompt-toolkit", specifier = ">=3.0.52,<4" },
{ name = "psutil", specifier = ">=5.9.0,<8" },
{ name = "ptyprocess", marker = "sys_platform != 'win32' and extra == 'pty'", specifier = ">=0.7.0,<1" },
{ name = "pydantic", specifier = ">=2.12.5,<3" },
{ name = "pyjwt", extras = ["crypto"], specifier = ">=2.12.0,<3" },
@ -2227,13 +2275,14 @@ requires-dist = [
{ name = "tenacity", specifier = ">=9.1.4,<10" },
{ name = "tinker", marker = "extra == 'rl'", git = "https://github.com/thinking-machines-lab/tinker.git?rev=30517b667f18a3dfb7ef33fb56cf686d5820ba2b" },
{ name = "ty", marker = "extra == 'dev'", specifier = ">=0.0.1a29,<0.0.22" },
{ name = "tzdata", marker = "sys_platform == 'win32'", specifier = ">=2023.3" },
{ name = "uvicorn", extras = ["standard"], marker = "extra == 'rl'", specifier = ">=0.24.0,<1" },
{ name = "uvicorn", extras = ["standard"], marker = "extra == 'web'", specifier = ">=0.24.0,<1" },
{ name = "vercel", marker = "extra == 'vercel'", specifier = ">=0.5.7,<0.6.0" },
{ name = "wandb", marker = "extra == 'rl'", specifier = ">=0.15.0,<1" },
{ name = "yc-bench", marker = "python_full_version >= '3.12' and extra == 'yc-bench'", git = "https://github.com/collinear-ai/yc-bench.git?rev=bfb0c88062450f46341bd9a5298903fc2e952a5c" },
]
provides-extras = ["modal", "daytona", "vercel", "dev", "messaging", "cron", "slack", "matrix", "cli", "tts-premium", "voice", "pty", "honcho", "mcp", "homeassistant", "sms", "acp", "mistral", "bedrock", "termux", "dingtalk", "feishu", "google", "web", "rl", "yc-bench", "all"]
provides-extras = ["modal", "daytona", "vercel", "dev", "messaging", "cron", "slack", "matrix", "cli", "tts-premium", "voice", "pty", "honcho", "mcp", "homeassistant", "sms", "computer-use", "acp", "mistral", "bedrock", "termux", "termux-all", "dingtalk", "feishu", "google", "web", "rl", "yc-bench", "all"]
[[package]]
name = "hf-transfer"
@ -4000,6 +4049,34 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/57/bf/2086963c69bdac3d7cff1cc7ff79b8ce5ea0bec6797a017e1be338a46248/protobuf-6.33.5-py3-none-any.whl", hash = "sha256:69915a973dd0f60f31a08b8318b73eab2bd6a392c79184b3612226b0a3f8ec02", size = 170687, upload-time = "2026-01-29T21:51:32.557Z" },
]
[[package]]
name = "psutil"
version = "7.2.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/aa/c6/d1ddf4abb55e93cebc4f2ed8b5d6dbad109ecb8d63748dd2b20ab5e57ebe/psutil-7.2.2.tar.gz", hash = "sha256:0746f5f8d406af344fd547f1c8daa5f5c33dbc293bb8d6a16d80b4bb88f59372", size = 493740, upload-time = "2026-01-28T18:14:54.428Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/51/08/510cbdb69c25a96f4ae523f733cdc963ae654904e8db864c07585ef99875/psutil-7.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2edccc433cbfa046b980b0df0171cd25bcaeb3a68fe9022db0979e7aa74a826b", size = 130595, upload-time = "2026-01-28T18:14:57.293Z" },
{ url = "https://files.pythonhosted.org/packages/d6/f5/97baea3fe7a5a9af7436301f85490905379b1c6f2dd51fe3ecf24b4c5fbf/psutil-7.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e78c8603dcd9a04c7364f1a3e670cea95d51ee865e4efb3556a3a63adef958ea", size = 131082, upload-time = "2026-01-28T18:14:59.732Z" },
{ url = "https://files.pythonhosted.org/packages/37/d6/246513fbf9fa174af531f28412297dd05241d97a75911ac8febefa1a53c6/psutil-7.2.2-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a571f2330c966c62aeda00dd24620425d4b0cc86881c89861fbc04549e5dc63", size = 181476, upload-time = "2026-01-28T18:15:01.884Z" },
{ url = "https://files.pythonhosted.org/packages/b8/b5/9182c9af3836cca61696dabe4fd1304e17bc56cb62f17439e1154f225dd3/psutil-7.2.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:917e891983ca3c1887b4ef36447b1e0873e70c933afc831c6b6da078ba474312", size = 184062, upload-time = "2026-01-28T18:15:04.436Z" },
{ url = "https://files.pythonhosted.org/packages/16/ba/0756dca669f5a9300d0cbcbfae9a4c30e446dfc7440ffe43ded5724bfd93/psutil-7.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:ab486563df44c17f5173621c7b198955bd6b613fb87c71c161f827d3fb149a9b", size = 139893, upload-time = "2026-01-28T18:15:06.378Z" },
{ url = "https://files.pythonhosted.org/packages/1c/61/8fa0e26f33623b49949346de05ec1ddaad02ed8ba64af45f40a147dbfa97/psutil-7.2.2-cp313-cp313t-win_arm64.whl", hash = "sha256:ae0aefdd8796a7737eccea863f80f81e468a1e4cf14d926bd9b6f5f2d5f90ca9", size = 135589, upload-time = "2026-01-28T18:15:08.03Z" },
{ url = "https://files.pythonhosted.org/packages/81/69/ef179ab5ca24f32acc1dac0c247fd6a13b501fd5534dbae0e05a1c48b66d/psutil-7.2.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:eed63d3b4d62449571547b60578c5b2c4bcccc5387148db46e0c2313dad0ee00", size = 130664, upload-time = "2026-01-28T18:15:09.469Z" },
{ url = "https://files.pythonhosted.org/packages/7b/64/665248b557a236d3fa9efc378d60d95ef56dd0a490c2cd37dafc7660d4a9/psutil-7.2.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7b6d09433a10592ce39b13d7be5a54fbac1d1228ed29abc880fb23df7cb694c9", size = 131087, upload-time = "2026-01-28T18:15:11.724Z" },
{ url = "https://files.pythonhosted.org/packages/d5/2e/e6782744700d6759ebce3043dcfa661fb61e2fb752b91cdeae9af12c2178/psutil-7.2.2-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fa4ecf83bcdf6e6c8f4449aff98eefb5d0604bf88cb883d7da3d8d2d909546a", size = 182383, upload-time = "2026-01-28T18:15:13.445Z" },
{ url = "https://files.pythonhosted.org/packages/57/49/0a41cefd10cb7505cdc04dab3eacf24c0c2cb158a998b8c7b1d27ee2c1f5/psutil-7.2.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e452c464a02e7dc7822a05d25db4cde564444a67e58539a00f929c51eddda0cf", size = 185210, upload-time = "2026-01-28T18:15:16.002Z" },
{ url = "https://files.pythonhosted.org/packages/dd/2c/ff9bfb544f283ba5f83ba725a3c5fec6d6b10b8f27ac1dc641c473dc390d/psutil-7.2.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c7663d4e37f13e884d13994247449e9f8f574bc4655d509c3b95e9ec9e2b9dc1", size = 141228, upload-time = "2026-01-28T18:15:18.385Z" },
{ url = "https://files.pythonhosted.org/packages/f2/fc/f8d9c31db14fcec13748d373e668bc3bed94d9077dbc17fb0eebc073233c/psutil-7.2.2-cp314-cp314t-win_arm64.whl", hash = "sha256:11fe5a4f613759764e79c65cf11ebdf26e33d6dd34336f8a337aa2996d71c841", size = 136284, upload-time = "2026-01-28T18:15:19.912Z" },
{ url = "https://files.pythonhosted.org/packages/e7/36/5ee6e05c9bd427237b11b3937ad82bb8ad2752d72c6969314590dd0c2f6e/psutil-7.2.2-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ed0cace939114f62738d808fdcecd4c869222507e266e574799e9c0faa17d486", size = 129090, upload-time = "2026-01-28T18:15:22.168Z" },
{ url = "https://files.pythonhosted.org/packages/80/c4/f5af4c1ca8c1eeb2e92ccca14ce8effdeec651d5ab6053c589b074eda6e1/psutil-7.2.2-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:1a7b04c10f32cc88ab39cbf606e117fd74721c831c98a27dc04578deb0c16979", size = 129859, upload-time = "2026-01-28T18:15:23.795Z" },
{ url = "https://files.pythonhosted.org/packages/b5/70/5d8df3b09e25bce090399cf48e452d25c935ab72dad19406c77f4e828045/psutil-7.2.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:076a2d2f923fd4821644f5ba89f059523da90dc9014e85f8e45a5774ca5bc6f9", size = 155560, upload-time = "2026-01-28T18:15:25.976Z" },
{ url = "https://files.pythonhosted.org/packages/63/65/37648c0c158dc222aba51c089eb3bdfa238e621674dc42d48706e639204f/psutil-7.2.2-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0726cecd84f9474419d67252add4ac0cd9811b04d61123054b9fb6f57df6e9e", size = 156997, upload-time = "2026-01-28T18:15:27.794Z" },
{ url = "https://files.pythonhosted.org/packages/8e/13/125093eadae863ce03c6ffdbae9929430d116a246ef69866dad94da3bfbc/psutil-7.2.2-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fd04ef36b4a6d599bbdb225dd1d3f51e00105f6d48a28f006da7f9822f2606d8", size = 148972, upload-time = "2026-01-28T18:15:29.342Z" },
{ url = "https://files.pythonhosted.org/packages/04/78/0acd37ca84ce3ddffaa92ef0f571e073faa6d8ff1f0559ab1272188ea2be/psutil-7.2.2-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b58fabe35e80b264a4e3bb23e6b96f9e45a3df7fb7eed419ac0e5947c61e47cc", size = 148266, upload-time = "2026-01-28T18:15:31.597Z" },
{ url = "https://files.pythonhosted.org/packages/b4/90/e2159492b5426be0c1fef7acba807a03511f97c5f86b3caeda6ad92351a7/psutil-7.2.2-cp37-abi3-win_amd64.whl", hash = "sha256:eb7e81434c8d223ec4a219b5fc1c47d0417b12be7ea866e24fb5ad6e84b3d988", size = 137737, upload-time = "2026-01-28T18:15:33.849Z" },
{ url = "https://files.pythonhosted.org/packages/8c/c7/7bb2e321574b10df20cbde462a94e2b71d05f9bbda251ef27d104668306a/psutil-7.2.2-cp37-abi3-win_arm64.whl", hash = "sha256:8c233660f575a5a89e6d4cb65d9f938126312bca76d8fe087b947b3a1aaac9ee", size = 134617, upload-time = "2026-01-28T18:15:36.514Z" },
]
[[package]]
name = "ptyprocess"
version = "0.7.0"