mirror of
https://github.com/fish-shell/fish-shell.git
synced 2026-05-11 02:51:15 -03:00
Compare commits
1 Commits
4.5.0
...
release-mo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a590e1ee1b |
@@ -1,6 +1,7 @@
|
||||
image: alpine/edge
|
||||
packages:
|
||||
- cargo
|
||||
- clang17-libclang
|
||||
- cmake
|
||||
- ninja
|
||||
- pcre2-dev
|
||||
@@ -23,4 +24,4 @@ tasks:
|
||||
ninja
|
||||
- test: |
|
||||
cd fish-shell/build
|
||||
ninja fish_run_tests
|
||||
env ninja test
|
||||
|
||||
@@ -20,4 +20,4 @@ tasks:
|
||||
ninja
|
||||
- test: |
|
||||
cd fish/build
|
||||
ninja fish_run_tests
|
||||
env ninja test
|
||||
|
||||
@@ -5,10 +5,10 @@ packages:
|
||||
- gettext
|
||||
- gmake
|
||||
- llvm
|
||||
- terminfo-db
|
||||
- ninja
|
||||
- pcre2
|
||||
- py311-pexpect
|
||||
- py311-sphinx
|
||||
- python
|
||||
- rust
|
||||
- tmux
|
||||
@@ -27,4 +27,4 @@ tasks:
|
||||
ninja
|
||||
- test: |
|
||||
cd fish-shell/build
|
||||
ninja fish_run_tests
|
||||
ninja test
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
image: openbsd/latest
|
||||
packages:
|
||||
- cmake
|
||||
- gcc
|
||||
- gettext
|
||||
- gmake
|
||||
- llvm
|
||||
- ninja
|
||||
- pcre2
|
||||
- py3-pexpect
|
||||
- py3-sphinx
|
||||
- python
|
||||
- rust
|
||||
- tmux
|
||||
sources:
|
||||
- https://github.com/fish-shell/fish-shell
|
||||
tasks:
|
||||
- build: |
|
||||
cd fish-shell
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -GNinja .. \
|
||||
-DCMAKE_INSTALL_PREFIX=/usr \
|
||||
-DCMAKE_INSTALL_DATADIR=share \
|
||||
-DCMAKE_INSTALL_DOCDIR=share/doc/fish \
|
||||
-DCMAKE_INSTALL_SYSCONFDIR=/etc
|
||||
ninja
|
||||
- test: |
|
||||
cd fish-shell/build
|
||||
ninja fish_run_tests
|
||||
@@ -1,2 +1,8 @@
|
||||
[alias]
|
||||
xtask = "run --package xtask --"
|
||||
# For macOS, support 10.9 on x86-64, and 11.0 on aarch64, which
|
||||
# is the minimum supported version for Apple Silicon.
|
||||
[target.x86_64-apple-darwin]
|
||||
rustflags = ["-C", "link-arg=-mmacosx-version-min=10.9"]
|
||||
|
||||
[target.aarch64-apple-darwin]
|
||||
rustflags = ["-C", "link-arg=-mmacosx-version-min=11.0"]
|
||||
|
||||
|
||||
67
.cirrus.yml
67
.cirrus.yml
@@ -8,29 +8,67 @@ linux_task:
|
||||
container: &step
|
||||
image: ghcr.io/krobelus/fish-ci/alpine:latest
|
||||
memory: 4GB
|
||||
- name: ubuntu-oldest-supported
|
||||
- name: jammy
|
||||
container:
|
||||
<<: *step
|
||||
image: ghcr.io/krobelus/fish-ci/ubuntu-oldest-supported:latest
|
||||
image: ghcr.io/krobelus/fish-ci/jammy:latest
|
||||
# - name: jammy-asan
|
||||
# container:
|
||||
# <<: *step
|
||||
# image: ghcr.io/krobelus/fish-ci/jammy-asan:latest
|
||||
# - name: focal-32bit
|
||||
# container:
|
||||
# <<: *step
|
||||
# image: ghcr.io/krobelus/fish-ci/focal-32bit:latest
|
||||
|
||||
tests_script:
|
||||
# cirrus at times gives us 32 procs and 2 GB of RAM
|
||||
# Unrestriced parallelism results in OOM
|
||||
- lscpu || true
|
||||
- (cat /proc/meminfo | grep MemTotal) || true
|
||||
- mkdir build && cd build
|
||||
- FISH_TEST_MAX_CONCURRENCY=6 cmake -G Ninja -DCMAKE_BUILD_TYPE=Debug ..
|
||||
- cmake -G Ninja -DCMAKE_BUILD_TYPE=Debug -DCTEST_PARALLEL_LEVEL=6 ..
|
||||
- ninja -j 6 fish
|
||||
- ninja fish_run_tests
|
||||
|
||||
only_if: $CIRRUS_REPO_OWNER == 'fish-shell'
|
||||
|
||||
linux_arm_task:
|
||||
matrix:
|
||||
- name: focal-arm64
|
||||
arm_container:
|
||||
image: ghcr.io/fish-shell/fish-ci/focal-arm64
|
||||
- name: jammy-armv7-32bit
|
||||
arm_container:
|
||||
image: ghcr.io/fish-shell/fish-ci/jammy-armv7-32bit
|
||||
|
||||
tests_script:
|
||||
# cirrus at times gives us 32 procs and 2 GB of RAM
|
||||
# Unrestriced parallelism results in OOM
|
||||
- lscpu || true
|
||||
- (cat /proc/meminfo | grep MemTotal) || true
|
||||
- mkdir build && cd build
|
||||
- cmake -G Ninja -DCMAKE_BUILD_TYPE=Debug -DCTEST_PARALLEL_LEVEL=6 ..
|
||||
- ninja -j 6 fish
|
||||
- file ./fish
|
||||
- ninja fish_run_tests
|
||||
|
||||
# CI task disabled during RIIR transition
|
||||
only_if: false && $CIRRUS_REPO_OWNER == 'fish-shell'
|
||||
|
||||
freebsd_task:
|
||||
matrix:
|
||||
- name: FreeBSD Stable
|
||||
# - name: FreeBSD 14
|
||||
# freebsd_instance:
|
||||
# image_family: freebsd-14-0-snap
|
||||
- name: FreeBSD 13
|
||||
freebsd_instance:
|
||||
image: freebsd-15-0-release-amd64-ufs # updatecli.d/cirrus-freebsd.yml
|
||||
image: freebsd-13-2-release-amd64
|
||||
# - name: FreeBSD 12.3
|
||||
# freebsd_instance:
|
||||
# image: freebsd-12-3-release-amd64
|
||||
tests_script:
|
||||
- pkg update
|
||||
- pkg install -y cmake-core devel/pcre2 devel/ninja gettext git-lite lang/rust misc/py-pexpect
|
||||
- pkg install -y cmake-core devel/pcre2 devel/ninja misc/py-pexpect git-lite terminfo-db
|
||||
# libclang.so is a required build dependency for rust-c++ ffi bridge
|
||||
- pkg install -y llvm
|
||||
# BSDs have the following behavior: root may open or access files even if
|
||||
@@ -43,7 +81,16 @@ freebsd_task:
|
||||
- mkdir build && cd build
|
||||
- chown -R fish-user ..
|
||||
- sudo -u fish-user -s whoami
|
||||
- sudo -u fish-user -s FISH_TEST_MAX_CONCURRENCY=1 cmake -G Ninja -DCMAKE_BUILD_TYPE=Debug ..
|
||||
- sudo -u fish-user -s ninja -j 6 fish
|
||||
- sudo -u fish-user -s ninja fish_run_tests
|
||||
# FreeBSD's pkg currently has rust 1.66.0 while we need rust 1.70.0+. Use rustup to install
|
||||
# the latest, but note that it only installs rust per-user.
|
||||
- sudo -u fish-user -s fetch -qo - https://sh.rustup.rs > rustup.sh
|
||||
- sudo -u fish-user -s sh ./rustup.sh -y --profile=minimal
|
||||
# `sudo -s ...` does not invoke a login shell so we need a workaround to make sure the
|
||||
# rustup environment is configured for subsequent `sudo -s ...` commands.
|
||||
# For some reason, this doesn't do the job:
|
||||
# - sudo -u fish-user sh -c 'echo source \$HOME/.cargo/env >> $HOME/.cshrc'
|
||||
- sudo -u fish-user -s cmake -G Ninja -DCMAKE_BUILD_TYPE=Debug -DCTEST_PARALLEL_LEVEL=1 ..
|
||||
- sudo -u fish-user sh -c '. $HOME/.cargo/env; ninja -j 6 fish'
|
||||
- sudo -u fish-user sh -c '. $HOME/.cargo/env; ninja fish_run_tests'
|
||||
|
||||
only_if: $CIRRUS_REPO_OWNER == 'fish-shell'
|
||||
|
||||
@@ -9,26 +9,20 @@ trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
max_line_length = 100
|
||||
|
||||
[{Makefile,{BSD,GNU}makefile}]
|
||||
[{Makefile,*.in}]
|
||||
indent_style = tab
|
||||
|
||||
[*.{md,rst}]
|
||||
max_line_length = unset
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.sh]
|
||||
indent_size = 4
|
||||
[*.{sh,ac}]
|
||||
indent_size = 2
|
||||
|
||||
[build_tools/release.sh]
|
||||
max_line_length = 72
|
||||
|
||||
[{Dockerfile,Vagrantfile}]
|
||||
[Dockerfile]
|
||||
indent_size = 2
|
||||
|
||||
[share/{completions,functions}/**.fish]
|
||||
max_line_length = unset
|
||||
max_line_length = off
|
||||
|
||||
[{COMMIT_EDITMSG,git-revise-todo,*.jjdescription}]
|
||||
max_line_length = 72
|
||||
|
||||
[*.yml]
|
||||
indent_size = 2
|
||||
[{COMMIT_EDITMSG,git-revise-todo}]
|
||||
max_line_length = 80
|
||||
|
||||
15
.gitattributes
vendored
15
.gitattributes
vendored
@@ -1,12 +1,13 @@
|
||||
# normalize newlines
|
||||
* text=auto eol=lf
|
||||
* text=auto
|
||||
*.fish text
|
||||
*.bat eol=crlf
|
||||
|
||||
# let git show off diff hunk headers, help git diff -L:
|
||||
# https://git-scm.com/docs/gitattributes
|
||||
*.c diff=cpp
|
||||
*.cpp diff=cpp
|
||||
*.h diff=cpp
|
||||
*.py diff=py
|
||||
*.rs diff=rust
|
||||
# add a [diff "fish"] to git config with pattern
|
||||
*.fish diff=fish
|
||||
|
||||
@@ -14,14 +15,18 @@
|
||||
.gitattributes export-ignore
|
||||
.gitignore export-ignore
|
||||
/build_tools/make_tarball.sh export-ignore
|
||||
/debian export-ignore
|
||||
/debian/* export-ignore
|
||||
/.github export-ignore
|
||||
/.github/* export-ignore
|
||||
/.builds export-ignore
|
||||
/.builds/* export-ignore
|
||||
|
||||
# for linguist, which drives GitHub's language statistics
|
||||
# for linguist; let github identify our project as C++ instead of C due to pcre2
|
||||
pcre2/** linguist-vendored
|
||||
alpine.js linguist-vendored
|
||||
doc_src/** linguist-documentation
|
||||
*.fish linguist-language=fish
|
||||
# see 70f2899fcd which attempts to "rig the count"
|
||||
src/*.h linguist-language=c++
|
||||
src/builtins/*.h linguist-language=c++
|
||||
share/completions/*.fish linguist-documentation
|
||||
|
||||
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@@ -1,2 +0,0 @@
|
||||
github:
|
||||
- krobelus
|
||||
@@ -1,18 +1,10 @@
|
||||
---
|
||||
name: "Bug Report"
|
||||
about: "Simple template for bug reports"
|
||||
title: ""
|
||||
labels: []
|
||||
assignees: []
|
||||
---
|
||||
|
||||
<!--
|
||||
Please tell us which fish version you are using by executing the following:
|
||||
|
||||
fish --version
|
||||
echo $version
|
||||
|
||||
Please tell us which operating system (output of `uname`) and terminal you are using.
|
||||
Please tell us which operating system and terminal you are using. The output of `uname -a` and `echo $TERM` may be helpful in this regard although other commands might be relevant in your specific situation.
|
||||
|
||||
Please tell us if you tried fish without third-party customizations by executing this command and whether it affected the behavior you are reporting:
|
||||
|
||||
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,8 +1,11 @@
|
||||
## Description
|
||||
|
||||
Talk about your changes here.
|
||||
|
||||
Fixes issue #
|
||||
|
||||
## TODOs:
|
||||
<!-- Check off what what has been done so far. -->
|
||||
- [ ] If addressing an issue, a commit message mentions `Fixes issue #<issue-number>`
|
||||
<!-- Just check off what what we know been done so far. We can help you with this stuff. -->
|
||||
- [ ] Changes to fish usage are reflected in user documentation/manpages.
|
||||
- [ ] Tests have been added for regressions fixed
|
||||
- [ ] User-visible changes noted in CHANGELOG.rst <!-- Usually skipped for changes to completions -->
|
||||
- [ ] User-visible changes noted in CHANGELOG.rst <!-- Don't document changes for completions inside CHANGELOG.rst, there are lot of such edits -->
|
||||
|
||||
41
.github/actions/install-dependencies/action.yml
vendored
41
.github/actions/install-dependencies/action.yml
vendored
@@ -1,41 +0,0 @@
|
||||
name: Install dependencies for system tests
|
||||
|
||||
inputs:
|
||||
include_sphinx:
|
||||
description: Whether to install Sphinx
|
||||
required: true
|
||||
default: false
|
||||
include_pcre:
|
||||
description: Whether to install the PCRE library
|
||||
required: false
|
||||
default: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- shell: bash
|
||||
env:
|
||||
include_pcre: ${{ inputs.include_pcre }}
|
||||
run: |
|
||||
set -x
|
||||
: "optional dependencies"
|
||||
sudo apt install \
|
||||
gettext \
|
||||
$(if $include_pcre; then echo libpcre2-dev; fi) \
|
||||
;
|
||||
: "system test dependencies"
|
||||
sudo apt install \
|
||||
diffutils $(: "for diff") \
|
||||
git \
|
||||
gettext \
|
||||
less \
|
||||
$(if ${{ inputs.include_pcre }}; then echo libpcre2-dev; fi) \
|
||||
python3-pexpect \
|
||||
tmux \
|
||||
wget \
|
||||
;
|
||||
- uses: ./.github/actions/install-sphinx
|
||||
if: ${{ inputs.include_sphinx == 'true' }}
|
||||
23
.github/actions/install-sphinx/action.yml
vendored
23
.github/actions/install-sphinx/action.yml
vendored
@@ -1,23 +0,0 @@
|
||||
name: Install sphinx
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- shell: bash
|
||||
run: |
|
||||
set -x
|
||||
sudo pip install uv --break-system-packages
|
||||
command -v uv
|
||||
command -v uvx
|
||||
# Check that pyproject.toml and the lock file are in sync.
|
||||
# TODO Use "uv" to install Python as well.
|
||||
: 'Note that --no-managed-python below would be implied but be explicit'
|
||||
uv='env UV_PYTHON=python uv --no-managed-python'
|
||||
$uv lock --check --exclude-newer="$(awk -F'"' <uv.lock '/^exclude-newer[[:space:]]*=/ {print $2}')"
|
||||
# Install globally.
|
||||
sudo $uv pip install --group=dev --system --break-system-packages
|
||||
# Smoke test.
|
||||
python -c 'import sphinx; import sphinx_markdown_builder'
|
||||
41
.github/actions/rust-toolchain/action.yml
vendored
41
.github/actions/rust-toolchain/action.yml
vendored
@@ -1,41 +0,0 @@
|
||||
name: Rust Toolchain
|
||||
|
||||
inputs:
|
||||
toolchain_channel:
|
||||
description: Either "stable" or "msrv"
|
||||
required: true
|
||||
targets:
|
||||
description: Comma-separated list of target triples to install for this toolchain
|
||||
required: false
|
||||
components:
|
||||
description: Comma-separated list of components to be additionally installed
|
||||
required: false
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Set toolchain
|
||||
env:
|
||||
toolchain_channel: ${{ inputs.toolchain_channel }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -x
|
||||
toolchain=$(
|
||||
case "$toolchain_channel" in
|
||||
(stable) echo 1.93 ;; # updatecli.d/rust.yml
|
||||
(msrv) echo 1.85 ;; # updatecli.d/rust.yml
|
||||
(*)
|
||||
printf >&2 "error: unsupported toolchain channel %s" "$toolchain_channel"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
)
|
||||
printf 'TOOLCHAIN=%s\n' "$toolchain" >>"$GITHUB_ENV"
|
||||
- uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: ${{ env.TOOLCHAIN }}
|
||||
targets: ${{ inputs.targets }}
|
||||
components: ${{ inputs.components }}
|
||||
@@ -1,21 +0,0 @@
|
||||
name: Oldest Supported Rust Toolchain
|
||||
|
||||
inputs:
|
||||
targets:
|
||||
description: Comma-separated list of target triples to install for this toolchain
|
||||
required: false
|
||||
components:
|
||||
description: Comma-separated list of components to be additionally installed
|
||||
required: false
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- uses: ./.github/actions/rust-toolchain
|
||||
with:
|
||||
toolchain_channel: "msrv"
|
||||
targets: ${{ inputs.targets }}
|
||||
components: ${{ inputs.components }}
|
||||
21
.github/actions/rust-toolchain@stable/action.yml
vendored
21
.github/actions/rust-toolchain@stable/action.yml
vendored
@@ -1,21 +0,0 @@
|
||||
name: Stable Rust Toolchain
|
||||
|
||||
inputs:
|
||||
targets:
|
||||
description: Comma-separated list of target triples to install for this toolchain
|
||||
required: false
|
||||
components:
|
||||
description: Comma-separated list of components to be additionally installed
|
||||
required: false
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- uses: ./.github/actions/rust-toolchain
|
||||
with:
|
||||
toolchain_channel: "stable"
|
||||
targets: ${{ inputs.targets }}
|
||||
components: ${{ inputs.components }}
|
||||
28
.github/workflows/autolabel_prs.yml
vendored
28
.github/workflows/autolabel_prs.yml
vendored
@@ -8,12 +8,16 @@ jobs:
|
||||
label-and-milestone:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# - name: Checkout repository
|
||||
# uses: actions/checkout@v2
|
||||
|
||||
- name: Set label and milestone
|
||||
id: set-label-milestone
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8, build_tools/update-dependencies.sh
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const completionsLabel = 'completions';
|
||||
const completionsMilestone = 'fish next-3.x';
|
||||
|
||||
// Get changed files in the pull request
|
||||
const prNumber = context.payload.pull_request.number;
|
||||
@@ -37,4 +41,26 @@ jobs:
|
||||
labels: [completionsLabel],
|
||||
});
|
||||
console.log(`PR ${prNumber} assigned label "${completionsLabel}"`);
|
||||
|
||||
// Get the list of milestones
|
||||
const { data: milestones } = await github.rest.issues.listMilestones({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
});
|
||||
|
||||
// Find the milestone id
|
||||
const milestone = milestones.find(milestone => milestone.title === completionsMilestone);
|
||||
|
||||
if (milestone) {
|
||||
// Set the milestone for the PR
|
||||
await github.rest.issues.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: prNumber,
|
||||
milestone: milestone.number
|
||||
});
|
||||
console.log(`PR ${prNumber} assigned milestone "${completionsMilestone}"`);
|
||||
} else {
|
||||
console.error(`Milestone "${completionsMilestone}" not found`);
|
||||
}
|
||||
}
|
||||
|
||||
64
.github/workflows/build_docker_images.yml
vendored
64
.github/workflows/build_docker_images.yml
vendored
@@ -1,64 +0,0 @@
|
||||
name: Build Docker test images
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- 'docker/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: docker-builds
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
NAMESPACE: fish-ci
|
||||
|
||||
jobs:
|
||||
docker-build:
|
||||
if: github.repository_owner == 'fish-shell'
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
attestations: write
|
||||
id-token: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
target: alpine
|
||||
- os: ubuntu-latest
|
||||
target: ubuntu-oldest-supported
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2, build_tools/update-dependencies.sh
|
||||
-
|
||||
name: Login to Container registry
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0, build_tools/update-dependencies.sh
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
-
|
||||
name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0, build_tools/update-dependencies.sh
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ github.repository_owner }}/${{ env.NAMESPACE }}/${{ matrix.target }}
|
||||
flavor: |
|
||||
latest=true
|
||||
-
|
||||
name: Build and push
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0, build_tools/update-dependencies.sh
|
||||
with:
|
||||
context: docker/context
|
||||
push: true
|
||||
file: docker/${{ matrix.target }}.Dockerfile
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
24
.github/workflows/lint-dependencies.yml
vendored
24
.github/workflows/lint-dependencies.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Lint Dependencies
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- '.github/workflows/lint-dependencies.yml'
|
||||
- 'Cargo.lock'
|
||||
- '**/Cargo.toml'
|
||||
- 'deny.toml'
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/lint-dependencies.yml'
|
||||
- 'Cargo.lock'
|
||||
- '**/Cargo.toml'
|
||||
- 'deny.toml'
|
||||
jobs:
|
||||
cargo-deny:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2, build_tools/update-dependencies.sh
|
||||
- uses: EmbarkStudios/cargo-deny-action@44db170f6a7d12a6e90340e9e0fca1f650d34b14 # v2.0.15, build_tools/update-dependencies.sh
|
||||
with:
|
||||
command: check licenses
|
||||
arguments: --all-features --locked --exclude-dev
|
||||
rust-version: 1.93 # updatecli.d/rust.yml
|
||||
66
.github/workflows/lint.yml
vendored
66
.github/workflows/lint.yml
vendored
@@ -1,66 +0,0 @@
|
||||
name: Lint
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
format:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2, build_tools/update-dependencies.sh
|
||||
- uses: ./.github/actions/rust-toolchain@stable
|
||||
with:
|
||||
components: rustfmt
|
||||
- name: install dependencies
|
||||
run: pip install ruff
|
||||
- name: build fish
|
||||
run: cargo build
|
||||
- name: check format
|
||||
run: PATH="target/debug:$PATH" build_tools/style.fish --all --check
|
||||
- name: check rustfmt
|
||||
run: find build.rs crates src -type f -name '*.rs' | xargs rustfmt --check
|
||||
|
||||
|
||||
clippy:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- rust_version: "stable"
|
||||
features: ""
|
||||
- rust_version: "stable"
|
||||
features: "--no-default-features"
|
||||
- rust_version: "msrv"
|
||||
features: ""
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2, build_tools/update-dependencies.sh
|
||||
- uses: ./.github/actions/rust-toolchain
|
||||
with:
|
||||
toolchain_channel: ${{ matrix.rust_version }}
|
||||
components: clippy
|
||||
- name: Update package database
|
||||
run: sudo apt-get update
|
||||
- name: Install deps
|
||||
run: |
|
||||
sudo apt install gettext
|
||||
- name: cargo clippy
|
||||
run: cargo clippy --workspace --all-targets ${{ matrix.features }} -- --deny=warnings
|
||||
|
||||
rustdoc:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2, build_tools/update-dependencies.sh
|
||||
- uses: ./.github/actions/rust-toolchain@stable
|
||||
- name: Update package database
|
||||
run: sudo apt-get update
|
||||
- name: Install deps
|
||||
run: |
|
||||
sudo apt install gettext
|
||||
- name: cargo doc
|
||||
run: |
|
||||
RUSTDOCFLAGS='-D warnings' cargo doc --workspace
|
||||
- name: cargo doctest
|
||||
run: |
|
||||
cargo test --doc --workspace
|
||||
3
.github/workflows/lockthreads.yml
vendored
3
.github/workflows/lockthreads.yml
vendored
@@ -12,13 +12,12 @@ permissions:
|
||||
|
||||
jobs:
|
||||
lock:
|
||||
if: github.repository_owner == 'fish-shell'
|
||||
permissions:
|
||||
issues: write # for dessant/lock-threads to lock issues
|
||||
pull-requests: write # for dessant/lock-threads to lock PRs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@f5f995c727ac99a91dec92781a8e34e7c839a65e # v6.0.0, build_tools/update-dependencies.sh
|
||||
- uses: dessant/lock-threads@v4
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
issue-inactive-days: '365'
|
||||
|
||||
42
.github/workflows/mac_codesign.yml
vendored
Normal file
42
.github/workflows/mac_codesign.yml
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
name: macOS build and codesign
|
||||
|
||||
on:
|
||||
workflow_dispatch: # Enables manual trigger from GitHub UI
|
||||
|
||||
jobs:
|
||||
build-and-code-sign:
|
||||
runs-on: macos-latest
|
||||
environment: macos-codesign
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install Rust 1.73.0
|
||||
uses: dtolnay/rust-toolchain@1.73.0
|
||||
with:
|
||||
targets: x86_64-apple-darwin
|
||||
- name: Install Rust 1.79
|
||||
uses: dtolnay/rust-toolchain@1.79
|
||||
with:
|
||||
targets: aarch64-apple-darwin
|
||||
- name: build-and-codesign
|
||||
run: |
|
||||
cargo install apple-codesign
|
||||
mkdir -p "$FISH_ARTEFACT_PATH"
|
||||
echo "$MAC_CODESIGN_APP_P12_BASE64" | base64 --decode > /tmp/app.p12
|
||||
echo "$MAC_CODESIGN_INSTALLER_P12_BASE64" | base64 --decode > /tmp/installer.p12
|
||||
echo "$MACOS_NOTARIZE_JSON" > /tmp/notarize.json
|
||||
./build_tools/make_pkg.sh -s -f /tmp/app.p12 -i /tmp/installer.p12 -p "$MAC_CODESIGN_PASSWORD" -n -j /tmp/notarize.json
|
||||
rm /tmp/installer.p12 /tmp/app.p12 /tmp/notarize.json
|
||||
env:
|
||||
MAC_CODESIGN_APP_P12_BASE64: ${{ secrets.MAC_CODESIGN_APP_P12_BASE64 }}
|
||||
MAC_CODESIGN_INSTALLER_P12_BASE64: ${{ secrets.MAC_CODESIGN_INSTALLER_P12_BASE64 }}
|
||||
MAC_CODESIGN_PASSWORD: ${{ secrets.MAC_CODESIGN_PASSWORD }}
|
||||
MACOS_NOTARIZE_JSON: ${{ secrets.MACOS_NOTARIZE_JSON }}
|
||||
# macOS runners keep having issues loading Cargo.toml dependencies from git (GitHub) instead
|
||||
# of crates.io, so give this a try. It's also sometimes significantly faster on all platforms.
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
||||
FISH_ARTEFACT_PATH: /tmp/fish-built
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: macOS Artefacts
|
||||
path: /tmp/fish-built/*
|
||||
if-no-files-found: error
|
||||
166
.github/workflows/main.yml
vendored
Normal file
166
.github/workflows/main.yml
vendored
Normal file
@@ -0,0 +1,166 @@
|
||||
name: make test
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
env:
|
||||
CTEST_PARALLEL_LEVEL: "1"
|
||||
CMAKE_BUILD_PARALLEL_LEVEL: "4"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
ubuntu:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@1.70
|
||||
- name: Install deps
|
||||
run: |
|
||||
sudo apt install gettext libpcre2-dev python3-pexpect tmux
|
||||
# Generate a locale that uses a comma as decimal separator.
|
||||
sudo locale-gen fr_FR.UTF-8
|
||||
- name: cmake
|
||||
run: |
|
||||
mkdir build && cd build
|
||||
cmake .. -DCMAKE_BUILD_TYPE=RelWithDebInfo
|
||||
- name: make
|
||||
run: |
|
||||
make VERBOSE=1
|
||||
- name: make test
|
||||
run: |
|
||||
make VERBOSE=1 test
|
||||
|
||||
ubuntu-32bit-static-pcre2:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@1.70
|
||||
with:
|
||||
targets: "i586-unknown-linux-gnu" # rust-toolchain wants this comma-separated
|
||||
- name: Install deps
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install gettext python3-pexpect g++-multilib tmux
|
||||
- name: cmake
|
||||
env:
|
||||
CFLAGS: "-m32"
|
||||
run: |
|
||||
mkdir build && cd build
|
||||
cmake -DFISH_USE_SYSTEM_PCRE2=OFF -DRust_CARGO_TARGET=i586-unknown-linux-gnu ..
|
||||
- name: make
|
||||
run: |
|
||||
make VERBOSE=1
|
||||
- name: make test
|
||||
run: |
|
||||
make VERBOSE=1 test
|
||||
|
||||
ubuntu-asan:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
# Rust has two different memory sanitizers of interest; they can't be used at the same time:
|
||||
# * AddressSanitizer detects out-of-bound access, use-after-free, use-after-return,
|
||||
# use-after-scope, double-free, invalid-free, and memory leaks.
|
||||
# * MemorySanitizer detects uninitialized reads.
|
||||
#
|
||||
RUSTFLAGS: "-Zsanitizer=address"
|
||||
# RUSTFLAGS: "-Zsanitizer=memory -Zsanitizer-memory-track-origins"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
# All -Z options require running nightly
|
||||
- uses: dtolnay/rust-toolchain@nightly
|
||||
with:
|
||||
# ASAN uses `cargo build -Zbuild-std` which requires the rust-src component
|
||||
# this is comma-separated
|
||||
components: rust-src
|
||||
- name: Install deps
|
||||
run: |
|
||||
sudo apt install gettext libpcre2-dev python3-pexpect tmux
|
||||
- name: cmake
|
||||
env:
|
||||
CC: clang
|
||||
run: |
|
||||
mkdir build && cd build
|
||||
# Rust's ASAN requires the build system to explicitly pass a --target triple. We read that
|
||||
# value from CMake variable Rust_CARGO_TARGET (shared with corrosion).
|
||||
cmake .. -DASAN=1 -DRust_CARGO_TARGET=x86_64-unknown-linux-gnu -DCMAKE_BUILD_TYPE=Debug
|
||||
- name: make
|
||||
run: |
|
||||
make VERBOSE=1
|
||||
- name: make test
|
||||
env:
|
||||
FISH_CI_SAN: 1
|
||||
ASAN_OPTIONS: check_initialization_order=1:detect_stack_use_after_return=1:detect_leaks=1:fast_unwind_on_malloc=0
|
||||
# use_tls=0 is a workaround for LSAN crashing with "Tracer caught signal 11" (SIGSEGV),
|
||||
# which seems to be an issue with TLS support in newer glibc versions under virtualized
|
||||
# environments. Follow https://github.com/google/sanitizers/issues/1342 and
|
||||
# https://github.com/google/sanitizers/issues/1409 to track this issue.
|
||||
# UPDATE: this can cause spurious leak reports for __cxa_thread_atexit_impl() under glibc.
|
||||
LSAN_OPTIONS: verbosity=0:log_threads=0:use_tls=1:print_suppressions=0
|
||||
run: |
|
||||
llvm_version=$(clang --version | awk 'NR==1 { split($NF, version, "."); print version[1] }')
|
||||
export ASAN_SYMBOLIZER_PATH=/usr/bin/llvm-symbolizer-$llvm_version
|
||||
export LSAN_OPTIONS="$LSAN_OPTIONS:suppressions=$PWD/build_tools/lsan_suppressions.txt"
|
||||
make VERBOSE=1 test
|
||||
|
||||
# Our clang++ tsan builds are not recognizing safe rust patterns (such as the fact that Drop
|
||||
# cannot be called while a thread is using the object in question). Rust has its own way of
|
||||
# running TSAN, but for the duration of the port from C++ to Rust, we'll keep this disabled.
|
||||
|
||||
# ubuntu-threadsan:
|
||||
#
|
||||
# runs-on: ubuntu-latest
|
||||
#
|
||||
# steps:
|
||||
# - uses: actions/checkout@v4
|
||||
# - uses: dtolnay/rust-toolchain@1.70
|
||||
# - name: Install deps
|
||||
# run: |
|
||||
# sudo apt install gettext libpcre2-dev python3-pexpect tmux
|
||||
# - name: cmake
|
||||
# env:
|
||||
# FISH_CI_SAN: 1
|
||||
# CC: clang
|
||||
# run: |
|
||||
# mkdir build && cd build
|
||||
# cmake ..
|
||||
# - name: make
|
||||
# run: |
|
||||
# make
|
||||
# - name: make test
|
||||
# run: |
|
||||
# make test
|
||||
|
||||
macos:
|
||||
|
||||
runs-on: macos-latest
|
||||
|
||||
env:
|
||||
# macOS runners keep having issues loading Cargo.toml dependencies from git (GitHub) instead
|
||||
# of crates.io, so give this a try. It's also sometimes significantly faster on all platforms.
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@1.70
|
||||
- name: Install deps
|
||||
run: |
|
||||
# --break-system-packages because homebrew has now declared itself "externally managed".
|
||||
# this is CI so we don't actually care.
|
||||
sudo pip3 install --break-system-packages pexpect
|
||||
brew install tmux
|
||||
- name: cmake
|
||||
run: |
|
||||
mkdir build && cd build
|
||||
cmake -DWITH_GETTEXT=NO -DCMAKE_BUILD_TYPE=Debug ..
|
||||
- name: make
|
||||
run: |
|
||||
make VERBOSE=1
|
||||
- name: make test
|
||||
run: |
|
||||
make VERBOSE=1 test
|
||||
196
.github/workflows/release.yml
vendored
196
.github/workflows/release.yml
vendored
@@ -1,196 +0,0 @@
|
||||
name: Create a new release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version to release (tag name)'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
is-release-tag:
|
||||
name: Pre-release checks
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2, build_tools/update-dependencies.sh
|
||||
with:
|
||||
# Workaround for https://github.com/actions/checkout/issues/882
|
||||
ref: ${{ inputs.version }}
|
||||
- name: Check if the pushed tag looks like a release
|
||||
run: |
|
||||
set -x
|
||||
commit_subject=$(git log -1 --format=%s)
|
||||
tag=$(git describe)
|
||||
[ "$commit_subject" = "Release $tag" ]
|
||||
|
||||
|
||||
source-tarball:
|
||||
needs: [is-release-tag]
|
||||
name: Create the source tarball
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
tarball-name: ${{ steps.version.outputs.tarball-name }}
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2, build_tools/update-dependencies.sh
|
||||
with:
|
||||
# Workaround for https://github.com/actions/checkout/issues/882
|
||||
ref: ${{ inputs.version }}
|
||||
- name: Update package database
|
||||
run: sudo apt-get update
|
||||
- name: Install dependencies
|
||||
run: sudo apt install cmake gettext ninja-build python3-pip
|
||||
- uses: ./.github/actions/install-sphinx
|
||||
- name: Create tarball
|
||||
run: |
|
||||
set -x
|
||||
mkdir /tmp/fish-built
|
||||
FISH_ARTEFACT_PATH=/tmp/fish-built ./build_tools/make_tarball.sh
|
||||
relnotes=/tmp/fish-built/release-notes.md
|
||||
# Need history since the last release (i.e. tag) for stats.
|
||||
git fetch --tags
|
||||
git fetch --unshallow
|
||||
gpg_public_key_url=https://github.com/${{ github.actor }}.gpg
|
||||
curl -sS "$gpg_public_key_url" | grep 'PGP PUBLIC KEY BLOCK' -A5
|
||||
FISH_GPG_PUBLIC_KEY_URL=$gpg_public_key_url \
|
||||
sh -x ./build_tools/release-notes.sh >"$relnotes"
|
||||
# Delete title
|
||||
sed -n 1p "$relnotes" | grep -q "^## fish .*"
|
||||
sed -n 2p "$relnotes" | grep -q '^$'
|
||||
sed -i 1,2d "$relnotes"
|
||||
- name: Upload tarball artifact
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0, build_tools/update-dependencies.sh
|
||||
with:
|
||||
name: source-tarball
|
||||
path: |
|
||||
/tmp/fish-built/fish-${{ inputs.version }}.tar.xz
|
||||
/tmp/fish-built/release-notes.md
|
||||
if-no-files-found: error
|
||||
|
||||
packages-for-linux:
|
||||
needs: [is-release-tag]
|
||||
name: Build single-file fish for Linux
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2, build_tools/update-dependencies.sh
|
||||
with:
|
||||
# Workaround for https://github.com/actions/checkout/issues/882
|
||||
ref: ${{ inputs.version }}
|
||||
- name: Install Rust Stable
|
||||
uses: ./.github/actions/rust-toolchain@stable
|
||||
with:
|
||||
targets: x86_64-unknown-linux-musl,aarch64-unknown-linux-musl
|
||||
- name: Update package database
|
||||
run: sudo apt-get update
|
||||
- name: Install dependencies
|
||||
run: sudo apt install crossbuild-essential-arm64 gettext musl-tools
|
||||
- uses: ./.github/actions/install-sphinx
|
||||
- name: Build statically-linked executables
|
||||
run: |
|
||||
set -x
|
||||
cargo build --release --target x86_64-unknown-linux-musl --bin fish
|
||||
CFLAGS="-D_FORTIFY_SOURCE=2" \
|
||||
CC=aarch64-linux-gnu-gcc \
|
||||
RUSTFLAGS="-C linker=aarch64-linux-gnu-gcc -C link-arg=-lgcc -C link-arg=-D_FORTIFY_SOURCE=0" \
|
||||
cargo build --release --target aarch64-unknown-linux-musl --bin fish
|
||||
- name: Compress
|
||||
run: |
|
||||
set -x
|
||||
for arch in x86_64 aarch64; do
|
||||
tar -cazf fish-$(git describe)-linux-$arch.tar.xz \
|
||||
-C target/$arch-unknown-linux-musl/release fish
|
||||
done
|
||||
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0, build_tools/update-dependencies.sh
|
||||
with:
|
||||
name: Static builds for Linux
|
||||
path: fish-${{ inputs.version }}-linux-*.tar.xz
|
||||
if-no-files-found: error
|
||||
|
||||
create-draft-release:
|
||||
needs:
|
||||
- is-release-tag
|
||||
- source-tarball
|
||||
- packages-for-linux
|
||||
name: Create release draft
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2, build_tools/update-dependencies.sh
|
||||
with:
|
||||
# Workaround for https://github.com/actions/checkout/issues/882
|
||||
ref: ${{ inputs.version }}
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0, build_tools/update-dependencies.sh
|
||||
with:
|
||||
merge-multiple: true
|
||||
path: /tmp/artifacts
|
||||
- name: List artifacts
|
||||
run: find /tmp/artifacts -type f
|
||||
- name: Create draft release
|
||||
uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2.5.0, build_tools/update-dependencies.sh
|
||||
with:
|
||||
tag_name: ${{ inputs.version }}
|
||||
name: fish ${{ inputs.version }}
|
||||
body_path: /tmp/artifacts/release-notes.md
|
||||
draft: true
|
||||
files: |
|
||||
/tmp/artifacts/fish-${{ inputs.version }}.tar.xz
|
||||
/tmp/artifacts/fish-${{ inputs.version }}-linux-*.tar.xz
|
||||
|
||||
packages-for-macos:
|
||||
needs: [is-release-tag, create-draft-release]
|
||||
name: Build packages for macOS
|
||||
runs-on: macos-latest
|
||||
environment: macos-codesign
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2, build_tools/update-dependencies.sh
|
||||
with:
|
||||
# Workaround for https://github.com/actions/checkout/issues/882
|
||||
ref: ${{ inputs.version }}
|
||||
- name: Install Rust
|
||||
uses: ./.github/actions/rust-toolchain@stable
|
||||
with:
|
||||
targets: aarch64-apple-darwin,x86_64-apple-darwin
|
||||
- name: Install dependencies
|
||||
run: brew install gettext
|
||||
- uses: ./.github/actions/install-sphinx
|
||||
- name: Build and codesign
|
||||
run: |
|
||||
die() { echo >&2 "$*"; exit 1; }
|
||||
[ -n "$MAC_CODESIGN_APP_P12_BASE64" ] || die "Missing MAC_CODESIGN_APP_P12_BASE64"
|
||||
[ -n "$MAC_CODESIGN_INSTALLER_P12_BASE64" ] || die "Missing MAC_CODESIGN_INSTALLER_P12_BASE64"
|
||||
[ -n "$MAC_CODESIGN_PASSWORD" ] || die "Missing MAC_CODESIGN_PASSWORD"
|
||||
[ -n "$MACOS_NOTARIZE_JSON" ] || die "Missing MACOS_NOTARIZE_JSON"
|
||||
set -x
|
||||
export FISH_ARTEFACT_PATH=/tmp/fish-built
|
||||
# macOS runners keep having issues loading Cargo.toml dependencies from git (GitHub) instead
|
||||
# of crates.io, so give this a try. It's also sometimes significantly faster on all platforms.
|
||||
export CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||
cargo install apple-codesign
|
||||
mkdir -p "$FISH_ARTEFACT_PATH"
|
||||
echo "$MAC_CODESIGN_APP_P12_BASE64" | base64 --decode >/tmp/app.p12
|
||||
echo "$MAC_CODESIGN_INSTALLER_P12_BASE64" | base64 --decode >/tmp/installer.p12
|
||||
echo "$MACOS_NOTARIZE_JSON" >/tmp/notarize.json
|
||||
./build_tools/make_macos_pkg.sh -s -f /tmp/app.p12 \
|
||||
-i /tmp/installer.p12 -p "$MAC_CODESIGN_PASSWORD" \
|
||||
-n -j /tmp/notarize.json -- -c "-DWITH_DOCS=ON"
|
||||
version=$(git describe)
|
||||
[ -f "${FISH_ARTEFACT_PATH}/fish-$version.app.zip" ]
|
||||
[ -f "${FISH_ARTEFACT_PATH}/fish-$version.pkg" ]
|
||||
rm /tmp/installer.p12 /tmp/app.p12 /tmp/notarize.json
|
||||
env:
|
||||
MAC_CODESIGN_APP_P12_BASE64: ${{ secrets.MAC_CODESIGN_APP_P12_BASE64 }}
|
||||
MAC_CODESIGN_INSTALLER_P12_BASE64: ${{ secrets.MAC_CODESIGN_INSTALLER_P12_BASE64 }}
|
||||
MAC_CODESIGN_PASSWORD: ${{ secrets.MAC_CODESIGN_PASSWORD }}
|
||||
MACOS_NOTARIZE_JSON: ${{ secrets.MACOS_NOTARIZE_JSON }}
|
||||
- name: Add macOS packages to the release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
version=$(git describe)
|
||||
gh release upload $version \
|
||||
/tmp/fish-built/fish-$version.app.zip \
|
||||
/tmp/fish-built/fish-$version.pkg
|
||||
33
.github/workflows/rust_checks.yml
vendored
Normal file
33
.github/workflows/rust_checks.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
name: Rust checks
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
rustfmt:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- name: cargo fmt
|
||||
run: cargo fmt --check --all
|
||||
|
||||
clippy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- name: Install deps
|
||||
run: |
|
||||
sudo apt install gettext libpcre2-dev
|
||||
- name: cmake
|
||||
run: |
|
||||
cmake -B build
|
||||
- name: cargo clippy
|
||||
# This used to have --deny=warnings, but that turns rust release day
|
||||
# into automatic CI failure day, so we don't do that.
|
||||
run: cargo clippy --workspace --all-targets
|
||||
179
.github/workflows/test.yml
vendored
179
.github/workflows/test.yml
vendored
@@ -1,179 +0,0 @@
|
||||
name: Test
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
env:
|
||||
FISH_TEST_MAX_CONCURRENCY: "4"
|
||||
CMAKE_BUILD_PARALLEL_LEVEL: "4"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
ubuntu:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2, build_tools/update-dependencies.sh
|
||||
- uses: ./.github/actions/rust-toolchain@oldest-supported
|
||||
- name: Install deps
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
include_sphinx: true
|
||||
- name: Generate a locale that uses a comma as decimal separator.
|
||||
run: |
|
||||
sudo locale-gen fr_FR.UTF-8
|
||||
- name: cmake
|
||||
run: |
|
||||
mkdir build && cd build
|
||||
cmake .. -DCMAKE_BUILD_TYPE=RelWithDebInfo
|
||||
- name: make
|
||||
run: |
|
||||
make -C build VERBOSE=1
|
||||
- name: make fish_run_tests
|
||||
run: |
|
||||
make -C build VERBOSE=1 fish_run_tests
|
||||
- name: translation updates
|
||||
run: |
|
||||
# Generate PO files. This should not result it a change in the repo if all translations are
|
||||
# up to date.
|
||||
# Ensure that fish is available as an executable.
|
||||
PATH="$PWD/build:$PATH" build_tools/update_translations.fish
|
||||
# Show diff output. Fail if there is any.
|
||||
git --no-pager diff --exit-code || { echo 'There are uncommitted changes after regenerating the gettext PO files. Make sure to update them via `build_tools/update_translations.fish` after changing source files.'; exit 1; }
|
||||
|
||||
ubuntu-32bit-static-pcre2:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2, build_tools/update-dependencies.sh
|
||||
- uses: ./.github/actions/rust-toolchain@oldest-supported
|
||||
with:
|
||||
targets: "i586-unknown-linux-gnu"
|
||||
- name: Update package database
|
||||
run: sudo apt-get update
|
||||
- name: Install deps
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
include_pcre: false
|
||||
include_sphinx: false
|
||||
- name: Install g++-multilib
|
||||
run: sudo apt install g++-multilib
|
||||
- name: cmake
|
||||
env:
|
||||
CFLAGS: "-m32"
|
||||
run: |
|
||||
mkdir build && cd build
|
||||
cmake -DFISH_USE_SYSTEM_PCRE2=OFF -DRust_CARGO_TARGET=i586-unknown-linux-gnu ..
|
||||
- name: make
|
||||
run: |
|
||||
make -C build VERBOSE=1
|
||||
- name: make fish_run_tests
|
||||
run: |
|
||||
make -C build VERBOSE=1 fish_run_tests
|
||||
|
||||
ubuntu-asan:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
# Rust has two different memory sanitizers of interest; they can't be used at the same time:
|
||||
# * AddressSanitizer detects out-of-bound access, use-after-free, use-after-return,
|
||||
# use-after-scope, double-free, invalid-free, and memory leaks.
|
||||
# * MemorySanitizer detects uninitialized reads.
|
||||
#
|
||||
RUSTFLAGS: "-Zsanitizer=address"
|
||||
# RUSTFLAGS: "-Zsanitizer=memory -Zsanitizer-memory-track-origins"
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2, build_tools/update-dependencies.sh
|
||||
# All -Z options require running nightly
|
||||
- uses: dtolnay/rust-toolchain@nightly
|
||||
with:
|
||||
# ASAN uses `cargo build -Zbuild-std` which requires the rust-src component
|
||||
# this is comma-separated
|
||||
components: rust-src
|
||||
- name: Update package database
|
||||
run: sudo apt-get update
|
||||
- name: Install deps
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
include_sphinx: false
|
||||
- name: Install llvm
|
||||
run: |
|
||||
sudo apt install llvm # for llvm-symbolizer
|
||||
- name: cmake
|
||||
env:
|
||||
CC: clang
|
||||
run: |
|
||||
mkdir build && cd build
|
||||
# Rust's ASAN requires the build system to explicitly pass a --target triple. We read that
|
||||
# value from CMake variable Rust_CARGO_TARGET.
|
||||
cmake .. -DASAN=1 -DRust_CARGO_TARGET=x86_64-unknown-linux-gnu -DCMAKE_BUILD_TYPE=Debug
|
||||
- name: make
|
||||
run: |
|
||||
make -C build VERBOSE=1
|
||||
- name: make fish_run_tests
|
||||
env:
|
||||
FISH_CI_SAN: 1
|
||||
ASAN_OPTIONS: check_initialization_order=1:detect_stack_use_after_return=1:detect_leaks=1:fast_unwind_on_malloc=0
|
||||
# use_tls=0 is a workaround for LSAN crashing with "Tracer caught signal 11" (SIGSEGV),
|
||||
# which seems to be an issue with TLS support in newer glibc versions under virtualized
|
||||
# environments. Follow https://github.com/google/sanitizers/issues/1342 and
|
||||
# https://github.com/google/sanitizers/issues/1409 to track this issue.
|
||||
# UPDATE: this can cause spurious leak reports for __cxa_thread_atexit_impl() under glibc.
|
||||
LSAN_OPTIONS: verbosity=0:log_threads=0:use_tls=1:print_suppressions=0
|
||||
run: |
|
||||
set -x
|
||||
export ASAN_SYMBOLIZER_PATH=$(command -v /usr/bin/llvm-symbolizer* | sort -n | head -1)
|
||||
export LSAN_OPTIONS="$LSAN_OPTIONS:suppressions=$PWD/build_tools/lsan_suppressions.txt"
|
||||
make -C build VERBOSE=1 fish_run_tests
|
||||
|
||||
macos:
|
||||
runs-on: macos-latest
|
||||
env:
|
||||
# macOS runners keep having issues loading Cargo.toml dependencies from git (GitHub) instead
|
||||
# of crates.io, so give this a try. It's also sometimes significantly faster on all platforms.
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2, build_tools/update-dependencies.sh
|
||||
- uses: ./.github/actions/rust-toolchain@oldest-supported
|
||||
- name: Install deps
|
||||
run: |
|
||||
# --break-system-packages because homebrew has now declared itself "externally managed".
|
||||
# this is CI so we don't actually care.
|
||||
sudo pip3 install --break-system-packages pexpect
|
||||
brew install gettext tmux
|
||||
- uses: ./.github/actions/install-sphinx
|
||||
- name: cmake
|
||||
run: |
|
||||
mkdir build && cd build
|
||||
FISH_TEST_MAX_CONCURRENCY=1 \
|
||||
cmake -DCMAKE_BUILD_TYPE=Debug ..
|
||||
- name: make
|
||||
run: |
|
||||
make -C build VERBOSE=1
|
||||
- name: make fish_run_tests
|
||||
run: |
|
||||
make -C build VERBOSE=1 fish_run_tests
|
||||
|
||||
windows:
|
||||
runs-on: windows-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: msys2 {0}
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2, build_tools/update-dependencies.sh
|
||||
- uses: msys2/setup-msys2@4f806de0a5a7294ffabaff804b38a9b435a73bda # v2.30.0, build_tools/update-dependencies.sh
|
||||
with:
|
||||
update: true
|
||||
msystem: MSYS
|
||||
- name: Install deps
|
||||
# Not using setup-msys2 `install` option to make it easier to copy/paste
|
||||
run: |
|
||||
pacman --noconfirm -S --needed git rust
|
||||
- name: cargo build
|
||||
run: |
|
||||
cargo build
|
||||
- name: smoketest
|
||||
# We can't run `build_tools/check.sh` yet, there are just too many failures
|
||||
# so this is just a quick check to make sure that fish can swim
|
||||
run: |
|
||||
set -x
|
||||
[ "$(target/debug/fish.exe -c 'echo (math 1 + 1)')" = 2 ]
|
||||
cargo test
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -7,6 +7,7 @@
|
||||
*.DS_Store
|
||||
*.a
|
||||
*.app
|
||||
*.d
|
||||
*.dll
|
||||
*.dylib
|
||||
*.exe
|
||||
@@ -37,6 +38,7 @@ Desktop.ini
|
||||
Thumbs.db
|
||||
ehthumbs.db
|
||||
|
||||
messages.pot
|
||||
.directory
|
||||
.fuse_hidden*
|
||||
|
||||
@@ -75,7 +77,6 @@ __pycache__
|
||||
/share/__fish_build_paths.fish
|
||||
/share/pkgconfig
|
||||
/tests/*.tmp.*
|
||||
/tests/.last-check-all-files
|
||||
|
||||
# xcode
|
||||
## Build generated
|
||||
@@ -101,9 +102,3 @@ target/
|
||||
|
||||
# Generated by clangd
|
||||
/.cache
|
||||
|
||||
# JetBrains editors.
|
||||
.idea/
|
||||
|
||||
# AI slop
|
||||
.claude/
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
edition = "2024"
|
||||
@@ -1,3 +0,0 @@
|
||||
# Coding style
|
||||
|
||||
- Use comments sparingly. Don't explain what the code is doing, rather explain why.
|
||||
@@ -31,7 +31,7 @@ PREFIX?=/usr/local
|
||||
build/fish: build/$(BUILDFILE)
|
||||
$(CMAKE) --build build
|
||||
|
||||
# Don't split the mkdir into its own rule because that would cause CMake to regenerate the build
|
||||
# Don't split the mkdir into its own rule because that would cause CMake to regenerate the build
|
||||
# files after each build (because it adds the mdate of the build directory into the out-of-date
|
||||
# calculation tree). GNUmake supports order-only dependencies, BSDmake does not seem to.
|
||||
build/$(BUILDFILE):
|
||||
@@ -48,11 +48,7 @@ clean:
|
||||
|
||||
.PHONY: test
|
||||
test: build/fish
|
||||
$(CMAKE) --build build --target fish_run_tests
|
||||
|
||||
.PHONY: fish_run_tests
|
||||
fish_run_tests: build/fish
|
||||
$(CMAKE) --build build --target fish_run_tests
|
||||
$(CMAKE) --build build --target test
|
||||
|
||||
.PHONY: run
|
||||
run: build/fish
|
||||
|
||||
832
CHANGELOG.rst
832
CHANGELOG.rst
File diff suppressed because it is too large
Load Diff
@@ -1,11 +1,14 @@
|
||||
cmake_minimum_required(VERSION 3.15)
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/cmake")
|
||||
|
||||
project(fish LANGUAGES C)
|
||||
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
|
||||
|
||||
set(DEFAULT_BUILD_TYPE "RelWithDebInfo")
|
||||
set(DEFAULT_BUILD_TYPE "Debug")
|
||||
|
||||
# Generate Xcode schemas (but not for tests).
|
||||
set(CMAKE_XCODE_GENERATE_SCHEME 1)
|
||||
|
||||
if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES)
|
||||
message(STATUS "Setting build type to default '${DEFAULT_BUILD_TYPE}'")
|
||||
@@ -14,6 +17,9 @@ endif()
|
||||
|
||||
# Set up standard directories.
|
||||
include(GNUInstallDirs)
|
||||
add_definitions(-D_UNICODE=1)
|
||||
|
||||
include(cmake/gettext.cmake)
|
||||
|
||||
# Set up PCRE2
|
||||
# This sets an environment variable that needs to be available before the Rust stanzas
|
||||
@@ -24,26 +30,15 @@ include(cmake/Rust.cmake)
|
||||
# Work around issue where archive-built libs go in the wrong place.
|
||||
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR})
|
||||
|
||||
find_program(SPHINX_EXECUTABLE NAMES sphinx-build
|
||||
HINTS
|
||||
$ENV{SPHINX_DIR}
|
||||
PATH_SUFFIXES bin
|
||||
DOC "Sphinx documentation generator")
|
||||
if(CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_CURRENT_BINARY_DIR)
|
||||
set(FISH_IN_TREE_BUILD TRUE)
|
||||
else()
|
||||
set(FISH_IN_TREE_BUILD FALSE)
|
||||
endif()
|
||||
|
||||
|
||||
# Tell Cargo where our build directory is so it can find Cargo.toml.
|
||||
set(VARS_FOR_CARGO
|
||||
"FISH_CMAKE_BINARY_DIR=${CMAKE_BINARY_DIR}"
|
||||
"PREFIX=${CMAKE_INSTALL_PREFIX}"
|
||||
"DOCDIR=${CMAKE_INSTALL_FULL_DOCDIR}"
|
||||
"DATADIR=${CMAKE_INSTALL_FULL_DATADIR}"
|
||||
"SYSCONFDIR=${CMAKE_INSTALL_FULL_SYSCONFDIR}"
|
||||
"BINDIR=${CMAKE_INSTALL_FULL_BINDIR}"
|
||||
"CARGO_TARGET_DIR=${FISH_RUST_BUILD_DIR}"
|
||||
"CARGO_BUILD_RUSTC=${Rust_COMPILER}"
|
||||
"${FISH_PCRE2_BUILDFLAG}"
|
||||
"FISH_SPHINX=${SPHINX_EXECUTABLE}"
|
||||
)
|
||||
# Set up the machinery around FISH-BUILD-VERSION-FILE
|
||||
# This defines the FBVF variable.
|
||||
include(Version)
|
||||
|
||||
# Let fish pick up when we're running out of the build directory without installing
|
||||
get_filename_component(REAL_CMAKE_BINARY_DIR "${CMAKE_BINARY_DIR}" REALPATH)
|
||||
@@ -51,49 +46,42 @@ get_filename_component(REAL_CMAKE_SOURCE_DIR "${CMAKE_SOURCE_DIR}" REALPATH)
|
||||
add_definitions(-DCMAKE_BINARY_DIR="${REAL_CMAKE_BINARY_DIR}")
|
||||
add_definitions(-DCMAKE_SOURCE_DIR="${REAL_CMAKE_SOURCE_DIR}")
|
||||
|
||||
set(build_types Release RelWithDebInfo Debug "")
|
||||
if(NOT "${CMAKE_BUILD_TYPE}" IN_LIST build_types)
|
||||
message(WARNING "Unsupported build type ${CMAKE_BUILD_TYPE}. If this doesn't build, try one of Release, RelWithDebInfo or Debug")
|
||||
endif()
|
||||
|
||||
add_custom_target(
|
||||
fish ALL
|
||||
COMMAND
|
||||
"${CMAKE_COMMAND}" -E
|
||||
env ${VARS_FOR_CARGO}
|
||||
${Rust_CARGO}
|
||||
build --bin fish
|
||||
$<$<CONFIG:Release>:--release>
|
||||
$<$<CONFIG:RelWithDebInfo>:--profile=release-with-debug>
|
||||
--target ${Rust_CARGO_TARGET}
|
||||
--no-default-features
|
||||
--features=${FISH_CARGO_FEATURES}
|
||||
${CARGO_FLAGS}
|
||||
&&
|
||||
"${CMAKE_COMMAND}" -E
|
||||
copy "${rust_target_dir}/${rust_profile}/fish" "${CMAKE_CURRENT_BINARY_DIR}"
|
||||
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
||||
USES_TERMINAL
|
||||
)
|
||||
|
||||
function(CREATE_LINK target)
|
||||
# Define a function to build and link dependencies.
|
||||
function(CREATE_TARGET target)
|
||||
add_custom_target(
|
||||
${target} ALL
|
||||
DEPENDS fish
|
||||
COMMAND ln -f fish ${target}
|
||||
WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}"
|
||||
COMMAND
|
||||
"${CMAKE_COMMAND}" -E
|
||||
env ${VARS_FOR_CARGO}
|
||||
${Rust_CARGO}
|
||||
build --bin ${target}
|
||||
$<$<CONFIG:Release>:--release>
|
||||
$<$<CONFIG:RelWithDebInfo>:--release>
|
||||
--target ${Rust_CARGO_TARGET}
|
||||
${CARGO_FLAGS}
|
||||
${FEATURES_ARG}
|
||||
&&
|
||||
"${CMAKE_COMMAND}" -E
|
||||
copy "${rust_target_dir}/${rust_profile}/${target}" "${CMAKE_CURRENT_BINARY_DIR}"
|
||||
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
||||
USES_TERMINAL
|
||||
)
|
||||
endfunction(CREATE_LINK)
|
||||
endfunction(CREATE_TARGET)
|
||||
|
||||
# Define fish.
|
||||
create_target(fish)
|
||||
|
||||
# Define fish_indent.
|
||||
create_link(fish_indent)
|
||||
create_target(fish_indent)
|
||||
|
||||
# Define fish_key_reader.
|
||||
create_link(fish_key_reader)
|
||||
create_target(fish_key_reader)
|
||||
|
||||
# Set up the docs.
|
||||
include(cmake/Docs.cmake)
|
||||
|
||||
# A helper for running tests.
|
||||
add_executable(fish_test_helper src/fish_test_helper.c)
|
||||
# Set up tests.
|
||||
include(cmake/Tests.cmake)
|
||||
|
||||
|
||||
@@ -126,3 +126,4 @@ enforcement ladder](https://github.com/mozilla/diversity).
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
https://www.contributor-covenant.org/faq. Translations are available at
|
||||
https://www.contributor-covenant.org/translations.
|
||||
|
||||
|
||||
312
CONTRIBUTING.rst
312
CONTRIBUTING.rst
@@ -11,20 +11,13 @@ Contributions are welcome, and there are many ways to contribute!
|
||||
Whether you want to change some of the core Rust source, enhance or add a completion script or function,
|
||||
improve the documentation or translate something, this document will tell you how.
|
||||
|
||||
Getting Set Up
|
||||
==============
|
||||
|
||||
Mailing List
|
||||
============
|
||||
|
||||
Send patches to the public mailing list: mailto:~krobelus/fish-shell@lists.sr.ht.
|
||||
Archives are available at https://lists.sr.ht/~krobelus/fish-shell/.
|
||||
|
||||
GitHub
|
||||
======
|
||||
|
||||
Fish is available on GitHub, at https://github.com/fish-shell/fish-shell.
|
||||
Fish is developed on Github, at https://github.com/fish-shell/fish-shell.
|
||||
|
||||
First, you'll need an account there, and you'll need a git clone of fish.
|
||||
Fork it on GitHub and then run::
|
||||
Fork it on Github and then run::
|
||||
|
||||
git clone https://github.com/<USERNAME>/fish-shell.git
|
||||
|
||||
@@ -36,7 +29,7 @@ For that, you'll require:
|
||||
- Rust - when in doubt, try rustup
|
||||
- CMake
|
||||
- PCRE2 (headers and libraries) - optional, this will be downloaded if missing
|
||||
- gettext (only the msgfmt tool) - optional, for translation support
|
||||
- gettext (headers and libraries) - optional, for translation support
|
||||
- Sphinx - optional, to build the documentation
|
||||
|
||||
Of course not everything is required always - if you just want to contribute something to the documentation you'll just need Sphinx,
|
||||
@@ -49,32 +42,15 @@ Guidelines
|
||||
|
||||
In short:
|
||||
|
||||
- Be conservative in what you need (keep to the agreed minimum supported Rust version, limit new dependencies)
|
||||
- Use automated tools to help you (``cargo xtask check``)
|
||||
|
||||
Commit History
|
||||
==============
|
||||
|
||||
We use a linear, `recipe-style <https://www.bitsnbites.eu/git-history-work-log-vs-recipe/>`__ history.
|
||||
Every commit should pass our checks.
|
||||
We do not want "fixup" commits in our history.
|
||||
If you notice an issue with a commit in a pull request, or get feedback suggesting changes,
|
||||
you should rewrite the commit history and fix the relevant commits directly,
|
||||
instead of adding new "fixup" commits.
|
||||
When a pull request is ready, we rebase it on top of the current master branch,
|
||||
so don't be shy about rewriting the history of commits which are not on master yet.
|
||||
Rebasing (not merging) your pull request on the latest version of master is also welcome, especially if it resolves conflicts.
|
||||
|
||||
If you're using Git, consider using `jj <https://www.jj-vcs.dev/>`__ to make this easier.
|
||||
|
||||
If a commit should close an issue, add a ``Fixes #<issue-number>`` line at the end of the commit description.
|
||||
- Be conservative in what you need (``C++11``, few dependencies)
|
||||
- Use automated tools to help you (including ``make test``, ``build_tools/style.fish`` and ``make lint``)
|
||||
|
||||
Contributing completions
|
||||
========================
|
||||
|
||||
Completion scripts are the most common contribution to fish, and they are very welcome.
|
||||
|
||||
In general, we'll take all well-written completion scripts for a command that is publicly available.
|
||||
In general, we'll take all well-written completion scripts for a command that is publically available.
|
||||
This means no private tools or personal scripts, and we do reserve the right to reject for other reasons.
|
||||
|
||||
Before you try to contribute them to fish, consider if the authors of the tool you are completing want to maintain the script instead.
|
||||
@@ -88,7 +64,7 @@ Completion scripts should
|
||||
|
||||
1. Use as few dependencies as possible - try to use fish's builtins like ``string`` instead of ``grep`` and ``awk``,
|
||||
use ``python`` to read json instead of ``jq`` (because it's already a soft dependency for fish's tools)
|
||||
2. If it uses a common unix tool, use POSIX-compatible invocations - ideally it would work on GNU/Linux, macOS, the BSDs and other systems
|
||||
2. If it uses a common unix tool, use posix-compatible invocations - ideally it would work on GNU/Linux, macOS, the BSDs and other systems
|
||||
3. Option and argument descriptions should be kept short.
|
||||
The shorter the description, the more likely it is that fish can use more columns.
|
||||
4. Function names should start with ``__fish``, and functions should be kept in the completion file unless they're used elsewhere.
|
||||
@@ -105,40 +81,45 @@ Contributing documentation
|
||||
==========================
|
||||
|
||||
The documentation is stored in ``doc_src/``, and written in ReStructured Text and built with Sphinx.
|
||||
The builtins and various functions shipped with fish are documented in ``doc_src/cmds/``.
|
||||
|
||||
To build an HTML version of the docs locally, run::
|
||||
To build it locally, run from the main fish-shell directory::
|
||||
|
||||
cargo xtask html-docs
|
||||
sphinx-build -j 8 -b html -n doc_src/ /tmp/fish-doc/
|
||||
|
||||
will output to ``target/fish-docs/html`` or, if you use CMake::
|
||||
|
||||
cmake --build build -t sphinx-docs
|
||||
|
||||
will output to ``build/cargo/fish-docs/html/``. You can also run ``sphinx-build`` directly, which allows choosing the output directory::
|
||||
|
||||
sphinx-build -j auto -b html doc_src/ /tmp/fish-doc/
|
||||
|
||||
will output HTML docs to ``/tmp/fish-doc``.
|
||||
|
||||
After building them, you can open the HTML docs in a browser and see that it looks okay.
|
||||
which will build the docs as html in /tmp/fish-doc. You can open it in a browser and see that it looks okay.
|
||||
|
||||
The builtins and various functions shipped with fish are documented in doc_src/cmds/.
|
||||
|
||||
Code Style
|
||||
==========
|
||||
|
||||
For formatting, we use:
|
||||
To ensure your changes conform to the style rules run
|
||||
|
||||
- ``rustfmt`` for Rust
|
||||
::
|
||||
|
||||
build_tools/style.fish
|
||||
|
||||
before committing your change. That will run our autoformatters:
|
||||
|
||||
- ``git-clang-format`` for c++
|
||||
- ``fish_indent`` (shipped with fish) for fish script
|
||||
- ``ruff format`` for Python
|
||||
- ``black`` for python
|
||||
|
||||
To reformat files, there is a script
|
||||
If you’ve already committed your changes that’s okay since it will then
|
||||
check the files in the most recent commit. This can be useful after
|
||||
you’ve merged another person’s change and want to check that it’s style
|
||||
is acceptable. However, in that case it will run ``clang-format`` to
|
||||
ensure the entire file, not just the lines modified by the commit,
|
||||
conform to the style.
|
||||
|
||||
If you want to check the style of the entire code base run
|
||||
|
||||
::
|
||||
|
||||
build_tools/style.fish --all
|
||||
build_tools/style.fish somefile.rs some.fish
|
||||
|
||||
That command will refuse to restyle any files if you have uncommitted
|
||||
changes.
|
||||
|
||||
Fish Script Style Guide
|
||||
-----------------------
|
||||
@@ -190,10 +171,10 @@ made to run fish_indent via e.g.
|
||||
(add-hook 'fish-mode-hook (lambda ()
|
||||
(add-hook 'before-save-hook 'fish_indent-before-save)))
|
||||
|
||||
Minimum Supported Rust Version (MSRV) Policy
|
||||
--------------------------------------------
|
||||
Rust Style Guide
|
||||
----------------
|
||||
|
||||
We support at least the version of ``rustc`` available in Debian Stable.
|
||||
Use ``cargo fmt`` and ``cargo clippy``. Clippy warnings can be turned off if there's a good reason to.
|
||||
|
||||
Testing
|
||||
=======
|
||||
@@ -202,166 +183,151 @@ The source code for fish includes a large collection of tests. If you
|
||||
are making any changes to fish, running these tests is a good way to make
|
||||
sure the behaviour remains consistent and regressions are not
|
||||
introduced. Even if you don’t run the tests on your machine, they will
|
||||
still be run via GitHub Actions.
|
||||
still be run via Github Actions.
|
||||
|
||||
You are strongly encouraged to add tests when changing the functionality
|
||||
of fish, especially if you are fixing a bug to help ensure there are no
|
||||
regressions in the future (i.e., we don’t reintroduce the bug).
|
||||
|
||||
Unit tests live next to the implementation in Rust source files, in inline submodules (``mod tests {}``).
|
||||
The tests can be found in three places:
|
||||
|
||||
System tests live in ``tests/``:
|
||||
- src/tests for unit tests.
|
||||
- tests/checks for script tests, run by `littlecheck <https://github.com/ridiculousfish/littlecheck>`__
|
||||
- tests/pexpects for interactive tests using `pexpect <https://pexpect.readthedocs.io/en/stable/>`__
|
||||
|
||||
- ``tests/checks`` are run by `littlecheck <https://github.com/ridiculousfish/littlecheck>`__
|
||||
and test noninteractive (script) behavior,
|
||||
except for ``tests/checks/tmux-*`` which test interactive scenarios.
|
||||
- ``tests/pexpects`` tests interactive scenarios using `pexpect <https://pexpect.readthedocs.io/en/stable/>`__
|
||||
When in doubt, the bulk of the tests should be added as a littlecheck test in tests/checks, as they are the easiest to modify and run, and much faster and more dependable than pexpect tests. The syntax is fairly self-explanatory. It's a fish script with the expected output in ``# CHECK:`` or ``# CHECKERR:`` (for stderr) comments.
|
||||
|
||||
When in doubt, the bulk of the tests should be added as a littlecheck test in tests/checks, as they are the easiest to modify and run, and much faster and more dependable than pexpect tests.
|
||||
The syntax is fairly self-explanatory.
|
||||
It's a fish script with the expected output in ``# CHECK:`` or ``# CHECKERR:`` (for stderr) comments.
|
||||
If your littlecheck test has a specific dependency, use ``# REQUIRE: ...`` with a POSIX sh script.
|
||||
|
||||
The pexpect tests are written in Python and can simulate input and output to/from a terminal, so they are needed for anything that needs actual interactivity.
|
||||
The runner is in tests/pexpect_helper.py, in case you need to modify something there.
|
||||
|
||||
These tests can be run via the tests/test_driver.py Python script, which will set up the environment.
|
||||
It sets up a temporary $HOME and also uses it as the current directory, so you do not need to create a temporary directory in them.
|
||||
|
||||
If you need a command to do something weird to test something, maybe add it to the ``fish_test_helper`` binary (in ``tests/fish_test_helper.c``).
|
||||
The pexpects are written in python and can simulate input and output to/from a terminal, so they are needed for anything that needs actual interactivity. The runner is in build_tools/pexpect_helper.py, in case you need to modify something there.
|
||||
|
||||
Local testing
|
||||
-------------
|
||||
|
||||
The tests can be run on your local system::
|
||||
The tests can be run on your local computer on all operating systems.
|
||||
|
||||
cargo build
|
||||
# Run unit tests
|
||||
cargo test
|
||||
# Run system tests
|
||||
tests/test_driver.py target/debug
|
||||
# Run a specific system test.
|
||||
tests/test_driver.py target/debug tests/checks/abbr.fish
|
||||
::
|
||||
|
||||
Here, the first argument to test_driver.py refers to a directory with ``fish``, ``fish_indent`` and ``fish_key_reader`` in it.
|
||||
In this example we're in the root of the workspace and have run ``cargo build`` without ``--release``, so it's a debug build.
|
||||
cmake path/to/fish-shell
|
||||
make test
|
||||
|
||||
To run all tests and linters, use::
|
||||
Git hooks
|
||||
---------
|
||||
|
||||
cargo xtask check
|
||||
Since developers sometimes forget to run the tests, it can be helpful to
|
||||
use git hooks (see githooks(5)) to automate it.
|
||||
|
||||
One possibility is a pre-push hook script like this one:
|
||||
|
||||
.. code:: sh
|
||||
|
||||
#!/bin/sh
|
||||
#### A pre-push hook for the fish-shell project
|
||||
# This will run the tests when a push to master is detected, and will stop that if the tests fail
|
||||
# Save this as .git/hooks/pre-push and make it executable
|
||||
|
||||
protected_branch='master'
|
||||
|
||||
# Git gives us lines like "refs/heads/frombranch SOMESHA1 refs/heads/tobranch SOMESHA1"
|
||||
# We're only interested in the branches
|
||||
while read from _ to _; do
|
||||
if [ "x$to" = "xrefs/heads/$protected_branch" ]; then
|
||||
isprotected=1
|
||||
fi
|
||||
done
|
||||
if [ "x$isprotected" = x1 ]; then
|
||||
echo "Running tests before push to master"
|
||||
make test
|
||||
RESULT=$?
|
||||
if [ $RESULT -ne 0 ]; then
|
||||
echo "Tests failed for a push to master, we can't let you do that" >&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
exit 0
|
||||
|
||||
This will check if the push is to the master branch and, if it is, only
|
||||
allow the push if running ``make test`` succeeds. In some circumstances
|
||||
it may be advisable to circumvent this check with
|
||||
``git push --no-verify``, but usually that isn’t necessary.
|
||||
|
||||
To install the hook, place the code in a new file
|
||||
``.git/hooks/pre-push`` and make it executable.
|
||||
|
||||
Coverity Scan
|
||||
-------------
|
||||
|
||||
We use Coverity’s static analysis tool which offers free access to open
|
||||
source projects. While access to the tool itself is restricted,
|
||||
fish-shell organization members should know that they can login
|
||||
`here <https://scan.coverity.com/projects/fish-shell-fish-shell?tab=overview>`__
|
||||
with their GitHub account. Currently, tests are triggered upon merging
|
||||
the ``master`` branch into ``coverity_scan_master``. Even if you are not
|
||||
a fish developer, you can keep an eye on our statistics there.
|
||||
|
||||
Contributing Translations
|
||||
=========================
|
||||
|
||||
Fish uses GNU gettext to translate messages from English to other languages.
|
||||
We use custom tools for extracting messages from source files and to localize at runtime.
|
||||
This means that we do not have a runtime dependency on the gettext library.
|
||||
It also means that some features are not supported, such as message context and plurals.
|
||||
We also expect all files to be UTF-8-encoded.
|
||||
In practice, this should not matter much for contributing translations.
|
||||
Fish uses the GNU gettext library to translate messages from English to
|
||||
other languages.
|
||||
|
||||
Translation sources are stored in the ``localization/po`` directory and named ``ll_CC.po``,
|
||||
where ``ll`` is the two (or possibly three) letter ISO 639-1 language code of the target language
|
||||
(e.g. ``pt`` for Portuguese). ``CC`` is an ISO 3166 country/territory code,
|
||||
(e.g. ``BR`` for Brazil).
|
||||
An example for a valid name is ``pt_BR.po``, indicating Brazilian Portuguese.
|
||||
These are the files you will interact with when adding translations.
|
||||
Creating and updating translations requires the Gettext tools, including
|
||||
``xgettext``, ``msgfmt`` and ``msgmerge``. Translation sources are
|
||||
stored in the ``po`` directory, named ``LANG.po``, where ``LANG`` is the
|
||||
two letter ISO 639-1 language code of the target language (eg ``de`` for
|
||||
German).
|
||||
|
||||
Adding translations for a new language
|
||||
--------------------------------------
|
||||
To create a new translation:
|
||||
|
||||
Creating new translations requires the Gettext tools.
|
||||
More specifically, you will need ``msguniq`` and ``msgmerge`` for creating translations for a new
|
||||
language.
|
||||
To create a new translation, run::
|
||||
* generate a ``messages.pot`` file by running ``build_tools/fish_xgettext.fish`` from
|
||||
the source tree
|
||||
* copy ``messages.pot`` to ``po/LANG.po``
|
||||
|
||||
build_tools/update_translations.fish localization/po/ll_CC.po
|
||||
To update a translation:
|
||||
|
||||
This will create a new PO file containing all messages available for translation.
|
||||
If the file already exists, it will be updated.
|
||||
* generate a ``messages.pot`` file by running
|
||||
``build_tools/fish_xgettext.fish`` from the source tree
|
||||
|
||||
After modifying a PO file, you can recompile fish, and it will integrate the modifications you made.
|
||||
This requires that the ``msgfmt`` utility is installed (comes as part of ``gettext``).
|
||||
It is important that the ``localize-messages`` cargo feature is enabled, which it is by default.
|
||||
You can explicitly enable it using::
|
||||
* update the existing translation by running
|
||||
``msgmerge --update --no-fuzzy-matching po/LANG.po messages.pot``
|
||||
|
||||
cargo build --features=localize-messages
|
||||
|
||||
Use environment variables to tell fish which language to use, e.g.::
|
||||
|
||||
LANG=pt_BR.utf8 fish
|
||||
|
||||
or within the running fish shell::
|
||||
|
||||
set LANG pt_BR.utf8
|
||||
|
||||
For more options regarding how to choose languages, see
|
||||
`the corresponding gettext documentation
|
||||
<https://www.gnu.org/software/gettext/manual/html_node/Locale-Environment-Variables.html>`__.
|
||||
One neat thing you can do is set a list of languages to check for translations in the order defined
|
||||
using the ``LANGUAGE`` variable, e.g.::
|
||||
|
||||
set LANGUAGE pt_BR de_DE
|
||||
|
||||
to try to translate messages to Portuguese, if that fails try German, and if that fails too you will
|
||||
see the English version defined in the source code.
|
||||
|
||||
Modifying existing translations
|
||||
-------------------------------
|
||||
|
||||
If you want to work on translations for a language which already has a corresponding ``po`` file, it
|
||||
is sufficient to edit this file. No other changes are necessary.
|
||||
|
||||
After recompiling fish, you should be able to see your translations in action. See the previous
|
||||
section for details.
|
||||
|
||||
Editing PO files
|
||||
----------------
|
||||
The ``--no-fuzzy-matching`` is important as we have had terrible experiences with gettext's "fuzzy" translations in the past.
|
||||
|
||||
Many tools are available for editing translation files, including
|
||||
command-line and graphical user interface programs. For simple use, you can use your text editor.
|
||||
command-line and graphical user interface programs. For simple use, you can just use your text editor.
|
||||
|
||||
Open up the PO file, for example ``localization/po/sv.po``, and you'll see something like::
|
||||
Open up the po file, for example ``po/sv.po``, and you'll see something like::
|
||||
|
||||
msgid "%s: No suitable job\n"
|
||||
msgstr ""
|
||||
msgid "%ls: No suitable job\n"
|
||||
msgstr ""
|
||||
|
||||
The ``msgid`` here is the "name" of the string to translate, typically the English string to translate.
|
||||
The second line (``msgstr``) is where your translation goes.
|
||||
The ``msgid`` here is the "name" of the string to translate, typically the english string to translate. The second line (``msgstr``) is where your translation goes.
|
||||
|
||||
For example::
|
||||
|
||||
msgid "%s: No suitable job\n"
|
||||
msgstr "%s: Inget passande jobb\n"
|
||||
msgid "%ls: No suitable job\n"
|
||||
msgstr "%ls: Inget passande jobb\n"
|
||||
|
||||
Any ``%s`` or ``%d`` are placeholders that fish will use for formatting at runtime. It is important that they match - the translated string should have the same placeholders in the same order.
|
||||
Any ``%s`` / ``%ls`` or ``%d`` are placeholders that fish will use for formatting at runtime. It is important that they match - the translated string should have the same placeholders in the same order.
|
||||
|
||||
Also any escaped characters, like that ``\n`` newline at the end, should be kept so the translation has the same behavior.
|
||||
|
||||
Our tests run ``msgfmt --check-format /path/to/file``, so they would catch mismatched placeholders - otherwise fish would crash at runtime when the string is about to be used.
|
||||
|
||||
Be cautious about blindly updating an existing translation file.
|
||||
``msgid`` strings should never be updated manually, only by running the appropriate script.
|
||||
|
||||
Modifications to strings in source files
|
||||
----------------------------------------
|
||||
|
||||
If a string changes in the sources, the old translations will no longer work.
|
||||
They will be preserved in the PO files, but commented-out (starting with ``#~``).
|
||||
If you add/remove/change a translatable strings in a source file,
|
||||
run ``build_tools/update_translations.fish`` to propagate this to all translation files (``localization/po/*.po``).
|
||||
This is only relevant for developers modifying the source files of fish or fish scripts.
|
||||
Be cautious about blindly updating an existing translation file. Trivial
|
||||
changes to an existing message (eg changing the punctuation) will cause
|
||||
existing translations to be removed, since the tools do literal string
|
||||
matching. Therefore, in general, you need to carefully review any
|
||||
recommended deletions.
|
||||
|
||||
Setting Code Up For Translations
|
||||
--------------------------------
|
||||
|
||||
All non-debug messages output for user consumption should be marked for
|
||||
translation. In Rust, this requires the use of the ``wgettext!`` or ``wgettext_fmt!``
|
||||
macros:
|
||||
translation. In C++, this requires the use of the ``_`` (underscore)
|
||||
macro:
|
||||
|
||||
::
|
||||
|
||||
streams.out.append(wgettext_fmt!("%s: There are no jobs\n", argv[0]));
|
||||
streams.out.append_format(_(L"%ls: There are no jobs\n"), argv[0]);
|
||||
|
||||
All messages in fish script must be enclosed in single or double quote
|
||||
characters for our message extraction script to find them.
|
||||
@@ -370,26 +336,20 @@ that the following are **not** valid:
|
||||
|
||||
::
|
||||
|
||||
echo (_ hello)
|
||||
_ "goodbye"
|
||||
echo (_ hello)
|
||||
_ "goodbye"
|
||||
|
||||
Above should be written like this instead:
|
||||
|
||||
::
|
||||
|
||||
echo (_ "hello")
|
||||
echo (_ "goodbye")
|
||||
echo (_ "hello")
|
||||
echo (_ "goodbye")
|
||||
|
||||
You can use either single or double quotes to enclose the
|
||||
message to be translated. You can also optionally include spaces after
|
||||
the opening parentheses or before the closing parentheses.
|
||||
|
||||
Updating Dependencies
|
||||
=====================
|
||||
|
||||
To update dependencies, run ``build_tools/update-dependencies.sh``.
|
||||
This currently requires `updatecli <https://github.com/updatecli/updatecli>`__ and a few other tools.
|
||||
|
||||
Versioning
|
||||
==========
|
||||
|
||||
|
||||
2
COPYING
2
COPYING
@@ -1,7 +1,7 @@
|
||||
Fish is a smart and user-friendly command line shell.
|
||||
|
||||
Copyright (C) 2005-2009 Axel Liljencrantz
|
||||
Copyright (C) 2009- fish-shell contributors
|
||||
Copyright (C) 2009-2024 fish-shell contributors
|
||||
|
||||
fish is free software.
|
||||
|
||||
|
||||
1025
Cargo.lock
generated
1025
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
227
Cargo.toml
227
Cargo.toml
@@ -1,161 +1,69 @@
|
||||
[workspace]
|
||||
members = ["crates/*"]
|
||||
resolver = "2"
|
||||
members = ["printf"]
|
||||
|
||||
[workspace.package]
|
||||
# To build revisions that use Corrosion (those before 2024-01), use CMake 3.19, Rustc 1.78 and Rustup 1.27.
|
||||
rust-version = "1.85"
|
||||
edition = "2024"
|
||||
repository = "https://github.com/fish-shell/fish-shell"
|
||||
# see doc_src/license.rst for details
|
||||
# don't forget to update COPYING and debian/copyright too
|
||||
license = "GPL-2.0-only AND LGPL-2.0-or-later AND MIT AND PSF-2.0"
|
||||
|
||||
[workspace.dependencies]
|
||||
assert_matches = "1.5.0"
|
||||
bitflags = "2.5.0"
|
||||
cc = "1.0.94"
|
||||
cfg-if = "1.0.3"
|
||||
clap = { version = "4.5.54", features = ["derive"] }
|
||||
errno = "0.3.0"
|
||||
fish-build-helper = { path = "crates/build-helper" }
|
||||
fish-build-man-pages = { path = "crates/build-man-pages" }
|
||||
fish-color = { path = "crates/color" }
|
||||
fish-common = { path = "crates/common" }
|
||||
fish-fallback = { path = "crates/fallback" }
|
||||
fish-gettext = { path = "crates/gettext" }
|
||||
fish-gettext-extraction = { path = "crates/gettext-extraction" }
|
||||
fish-gettext-maps = { path = "crates/gettext-maps" }
|
||||
fish-gettext-mo-file-parser = { path = "crates/gettext-mo-file-parser" }
|
||||
fish-printf = { path = "crates/printf", features = ["widestring"] }
|
||||
fish-tempfile = { path = "crates/tempfile" }
|
||||
fish-util = { path = "crates/util" }
|
||||
fish-wcstringutil = { path = "crates/wcstringutil" }
|
||||
fish-widecharwidth = { path = "crates/widecharwidth" }
|
||||
fish-widestring = { path = "crates/widestring" }
|
||||
fish-wgetopt = { path = "crates/wgetopt" }
|
||||
itertools = "0.14.0"
|
||||
libc = "0.2.177"
|
||||
# lru pulls in hashbrown by default, which uses a faster (though less DoS resistant) hashing algo.
|
||||
# disabling default features uses the stdlib instead, but it doubles the time to rewrite the history
|
||||
# files as of 22 April 2024.
|
||||
lru = "0.16.2"
|
||||
nix = { version = "0.31.1", default-features = false, features = [
|
||||
"event",
|
||||
"fs",
|
||||
"inotify",
|
||||
"hostname",
|
||||
"resource",
|
||||
"process",
|
||||
"signal",
|
||||
"term",
|
||||
"user",
|
||||
] }
|
||||
num-traits = "0.2.19"
|
||||
once_cell = "1.19.0"
|
||||
pcre2 = { git = "https://github.com/fish-shell/rust-pcre2", tag = "0.2.9-utf32", default-features = false, features = [
|
||||
"utf32",
|
||||
] }
|
||||
phf = { version = "0.13", default-features = false }
|
||||
phf_codegen = "0.13"
|
||||
portable-atomic = { version = "1", default-features = false, features = [
|
||||
"fallback",
|
||||
] }
|
||||
proc-macro2 = "1.0"
|
||||
rand = { version = "0.9.2", default-features = false, features = [
|
||||
"small_rng",
|
||||
"thread_rng",
|
||||
] }
|
||||
rsconf = "0.3.0"
|
||||
rust-embed = { version = "8.11.0", features = [
|
||||
"deterministic-timestamps",
|
||||
"include-exclude",
|
||||
"interpolate-folder-path",
|
||||
] }
|
||||
serial_test = { version = "3", default-features = false }
|
||||
widestring = "1.2.0"
|
||||
unicode-segmentation = "1.12.0"
|
||||
unicode-width = "0.2.0"
|
||||
unix_path = "1.0.1"
|
||||
xterm-color = "1.0.1"
|
||||
rust-version = "1.70"
|
||||
edition = "2021"
|
||||
|
||||
[profile.release]
|
||||
overflow-checks = true
|
||||
lto = true
|
||||
|
||||
[profile.release-with-debug]
|
||||
inherits = "release"
|
||||
debug = true
|
||||
|
||||
[package]
|
||||
name = "fish"
|
||||
version = "4.5.0"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
default-run = "fish"
|
||||
license.workspace = true
|
||||
# see doc_src/license.rst for details
|
||||
# don't forget to update COPYING and debian/copyright too
|
||||
license = "GPL-2.0-only AND LGPL-2.0-or-later AND MIT AND PSF-2.0"
|
||||
repository = "https://github.com/fish-shell/fish-shell"
|
||||
homepage = "https://fishshell.com"
|
||||
readme = "README.rst"
|
||||
|
||||
[dependencies]
|
||||
assert_matches.workspace = true
|
||||
bitflags.workspace = true
|
||||
cfg-if.workspace = true
|
||||
errno.workspace = true
|
||||
fish-build-helper.workspace = true
|
||||
fish-build-man-pages = { workspace = true, optional = true }
|
||||
fish-color.workspace = true
|
||||
fish-common.workspace = true
|
||||
fish-fallback.workspace = true
|
||||
fish-gettext = { workspace = true, optional = true }
|
||||
fish-gettext-extraction = { workspace = true, optional = true }
|
||||
fish-printf.workspace = true
|
||||
fish-tempfile.workspace = true
|
||||
fish-util.workspace = true
|
||||
fish-wcstringutil.workspace = true
|
||||
fish-wgetopt.workspace = true
|
||||
fish-widecharwidth.workspace = true
|
||||
fish-widestring.workspace = true
|
||||
itertools.workspace = true
|
||||
libc.workspace = true
|
||||
lru.workspace = true
|
||||
macro_rules_attribute = "0.2.2"
|
||||
nix.workspace = true
|
||||
num-traits.workspace = true
|
||||
once_cell.workspace = true
|
||||
pcre2.workspace = true
|
||||
rand.workspace = true
|
||||
unicode-width.workspace = true
|
||||
xterm-color.workspace = true
|
||||
pcre2 = { git = "https://github.com/fish-shell/rust-pcre2", tag = "0.2.9-utf32", default-features = false, features = [
|
||||
"utf32",
|
||||
] }
|
||||
|
||||
bitflags = "2.5.0"
|
||||
errno = "0.3.0"
|
||||
lazy_static = "1.4.0"
|
||||
libc = "0.2.155"
|
||||
# lru pulls in hashbrown by default, which uses a faster (though less DoS resistant) hashing algo.
|
||||
# disabling default features uses the stdlib instead, but it doubles the time to rewrite the history
|
||||
# files as of 22 April 2024.
|
||||
lru = "0.12.3"
|
||||
nix = { version = "0.29.0", default-features = false, features = [
|
||||
"event",
|
||||
"inotify",
|
||||
"resource",
|
||||
"fs",
|
||||
] }
|
||||
num-traits = "0.2.19"
|
||||
once_cell = "1.19.0"
|
||||
fish-printf = { path = "./printf", features = ["widestring"] }
|
||||
|
||||
# Don't use the "getrandom" feature as it requires "getentropy" which was not
|
||||
# available on macOS < 10.12. We can enable "getrandom" when we raise the
|
||||
# minimum supported version to 10.12.
|
||||
rand = { version = "0.8.5", default-features = false, features = ["small_rng"] }
|
||||
widestring = "1.1.0"
|
||||
# We need 0.9.0 specifically for some crash fixes.
|
||||
terminfo = "0.9.0"
|
||||
|
||||
[target.'cfg(not(target_has_atomic = "64"))'.dependencies]
|
||||
portable-atomic.workspace = true
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
rust-embed = { workspace = true, features = [
|
||||
"deterministic-timestamps",
|
||||
"debug-embed",
|
||||
"include-exclude",
|
||||
"interpolate-folder-path",
|
||||
] }
|
||||
[target.'cfg(not(windows))'.dependencies]
|
||||
rust-embed = { workspace = true, features = [
|
||||
"deterministic-timestamps",
|
||||
"include-exclude",
|
||||
"interpolate-folder-path",
|
||||
portable-atomic = { version = "1", default-features = false, features = [
|
||||
"fallback",
|
||||
] }
|
||||
|
||||
[dev-dependencies]
|
||||
serial_test.workspace = true
|
||||
serial_test = { version = "1.0.0", default-features = false }
|
||||
|
||||
[build-dependencies]
|
||||
cc.workspace = true
|
||||
fish-build-helper.workspace = true
|
||||
fish-gettext-mo-file-parser.workspace = true
|
||||
phf_codegen = { workspace = true, optional = true }
|
||||
rsconf.workspace = true
|
||||
|
||||
[target.'cfg(windows)'.build-dependencies]
|
||||
unix_path.workspace = true
|
||||
cc = "1.0.94"
|
||||
rsconf = "0.2.2"
|
||||
|
||||
[lib]
|
||||
crate-type = ["rlib"]
|
||||
@@ -174,56 +82,17 @@ name = "fish_key_reader"
|
||||
path = "src/bin/fish_key_reader.rs"
|
||||
|
||||
[features]
|
||||
default = ["embed-manpages", "localize-messages"]
|
||||
default = []
|
||||
benchmark = []
|
||||
embed-manpages = ["dep:fish-build-man-pages"]
|
||||
# Enable gettext localization at runtime. Requires the `msgfmt` tool to generate catalog data at
|
||||
# build time.
|
||||
localize-messages = ["dep:fish-gettext"]
|
||||
# This feature is used to enable extracting messages from the source code for localization.
|
||||
# It only needs to be enabled if updating these messages (and the corresponding PO files) is
|
||||
# desired. This happens when running tests via `cargo xtask check` and when calling
|
||||
# `build_tools/update_translations.fish`, so there should not be a need to enable it manually.
|
||||
gettext-extract = ["dep:fish-gettext-extraction"]
|
||||
|
||||
# The following features are auto-detected by the build-script and should not be enabled manually.
|
||||
asan = []
|
||||
tsan = []
|
||||
|
||||
[workspace.lints]
|
||||
[lints]
|
||||
rust.non_camel_case_types = "allow"
|
||||
rust.non_upper_case_globals = "allow"
|
||||
rust.unknown_lints = "allow"
|
||||
rust.unstable_name_collisions = "allow"
|
||||
rustdoc.private_intra_doc_links = "allow"
|
||||
|
||||
[workspace.lints.clippy]
|
||||
assigning_clones = "warn"
|
||||
cloned_instead_of_copied = "warn"
|
||||
explicit_into_iter_loop = "warn"
|
||||
format_push_string = "warn"
|
||||
implicit_clone = "warn"
|
||||
len_without_is_empty = "allow" # we're not a library crate
|
||||
let_and_return = "allow"
|
||||
manual_assert = "warn"
|
||||
manual_range_contains = "allow"
|
||||
map_unwrap_or = "warn"
|
||||
mut_mut = "warn"
|
||||
needless_lifetimes = "allow"
|
||||
new_without_default = "allow"
|
||||
option_map_unit_fn = "allow"
|
||||
ptr_offset_by_literal = "warn"
|
||||
ref_option = "warn"
|
||||
semicolon_if_nothing_returned = "warn"
|
||||
stable_sort_primitive = "warn"
|
||||
str_to_string = "warn"
|
||||
unnecessary_semicolon = "warn"
|
||||
unused_trait_names = "warn"
|
||||
|
||||
# We do not want to use the e?print(ln)?! macros.
|
||||
# These lints flag their use.
|
||||
# In the future, they might change to flag other methods of printing.
|
||||
print_stdout = "deny"
|
||||
print_stderr = "deny"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
clippy.manual_range_contains = "allow"
|
||||
clippy.needless_return = "allow"
|
||||
|
||||
19
Dockerfile
Normal file
19
Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
FROM centos:latest
|
||||
|
||||
# Build dependency
|
||||
RUN yum update -y &&\
|
||||
yum install -y epel-release &&\
|
||||
yum install -y clang cmake3 gcc-c++ make &&\
|
||||
yum clean all
|
||||
|
||||
# Test dependency
|
||||
RUN yum install -y expect vim-common
|
||||
|
||||
ADD . /src
|
||||
WORKDIR /src
|
||||
|
||||
# Build fish
|
||||
RUN cmake3 . &&\
|
||||
make &&\
|
||||
make install
|
||||
|
||||
10
GNUmakefile
10
GNUmakefile
@@ -7,7 +7,7 @@
|
||||
CMAKE ?= cmake
|
||||
|
||||
GENERATOR ?= $(shell (which ninja > /dev/null 2> /dev/null && echo Ninja) || \
|
||||
echo 'Unix Makefiles')
|
||||
echo 'Unix Makefiles')
|
||||
prefix ?= /usr/local
|
||||
PREFIX ?= $(prefix)
|
||||
|
||||
@@ -34,7 +34,7 @@ all: .begin build/fish
|
||||
.PHONY: .begin
|
||||
.begin:
|
||||
@which $(CMAKE) > /dev/null 2> /dev/null || \
|
||||
(echo 'Please install CMake and then re-run the `make` command!' 1>&2 && false)
|
||||
(echo 'Please install CMake and then re-run the `make` command!' 1>&2 && false)
|
||||
|
||||
.PHONY: build/fish
|
||||
build/fish: build/$(BUILDFILE)
|
||||
@@ -55,11 +55,7 @@ clean:
|
||||
|
||||
.PHONY: test
|
||||
test: build/fish
|
||||
$(CMAKE) --build build --target fish_run_tests
|
||||
|
||||
.PHONY: fish_run_tests
|
||||
fish_run_tests: build/fish
|
||||
$(CMAKE) --build build --target fish_run_tests
|
||||
$(CMAKE) --build build --target test
|
||||
|
||||
.PHONY: install
|
||||
install: build/fish
|
||||
|
||||
118
README.rst
118
README.rst
@@ -37,7 +37,7 @@ fish can be installed:
|
||||
- using the `installer from fishshell.com <https://fishshell.com/>`__
|
||||
- as a `standalone app from fishshell.com <https://fishshell.com/>`__
|
||||
|
||||
Note: The minimum supported macOS version is 10.12.
|
||||
Note: The minimum supported macOS version is 10.10 "Yosemite".
|
||||
|
||||
Packages for Linux
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
@@ -47,12 +47,12 @@ Linux/CentOS are available from the `openSUSE Build
|
||||
Service <https://software.opensuse.org/download.html?project=shells%3Afish&package=fish>`__.
|
||||
|
||||
Packages for Ubuntu are available from the `fish
|
||||
PPA <https://launchpad.net/~fish-shell/+archive/ubuntu/release-4>`__,
|
||||
PPA <https://launchpad.net/~fish-shell/+archive/ubuntu/release-3>`__,
|
||||
and can be installed using the following commands:
|
||||
|
||||
::
|
||||
|
||||
sudo apt-add-repository ppa:fish-shell/release-4
|
||||
sudo apt-add-repository ppa:fish-shell/release-3
|
||||
sudo apt update
|
||||
sudo apt install fish
|
||||
|
||||
@@ -67,7 +67,7 @@ Windows
|
||||
listed above under “Packages for Linux”, or from source with the
|
||||
instructions below.
|
||||
- Fish can also be installed on all versions of Windows using
|
||||
`Cygwin <https://cygwin.com/>`__ or `MSYS2 <https://github.com/Berrysoft/fish-msys2>`__.
|
||||
`Cygwin <https://cygwin.com/>`__ (from the **Shells** category).
|
||||
|
||||
Building from source
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
@@ -76,7 +76,7 @@ If packages are not available for your platform, GPG-signed tarballs are
|
||||
available from `fishshell.com <https://fishshell.com/>`__ and
|
||||
`fish-shell on
|
||||
GitHub <https://github.com/fish-shell/fish-shell/releases>`__. See the
|
||||
`Building <#building>`_ section for instructions.
|
||||
`Building <#building>`__ section for instructions.
|
||||
|
||||
Running fish
|
||||
------------
|
||||
@@ -88,19 +88,23 @@ Dependencies
|
||||
|
||||
Running fish requires:
|
||||
|
||||
- A terminfo database, typically from curses or ncurses (preinstalled on most \*nix systems) - this needs to be the directory tree format, not the "hashed" database.
|
||||
If this is unavailable, fish uses an included xterm-256color definition.
|
||||
- some common \*nix system utilities (currently ``mktemp``), in
|
||||
addition to the basic POSIX utilities (``cat``, ``cut``, ``dirname``,
|
||||
``ls``, ``mkdir``, ``mkfifo``, ``rm``, ``sh``, ``sort``, ``tee``, ``tr``,
|
||||
``file``, ``ls``, ``mkdir``, ``mkfifo``, ``rm``, ``sort``, ``tee``, ``tr``,
|
||||
``uname`` and ``sed`` at least, but the full coreutils plus ``find`` and
|
||||
``awk`` is preferred)
|
||||
- The gettext library, if compiled with
|
||||
translation support
|
||||
|
||||
The following optional features also have specific requirements:
|
||||
|
||||
- builtin commands that have the ``--help`` option or print usage
|
||||
messages require ``man`` for display
|
||||
messages require ``nroff`` or ``mandoc`` for
|
||||
display
|
||||
- automated completion generation from manual pages requires Python 3.5+
|
||||
- the ``fish_config`` web configuration tool requires Python 3.5+ and a web browser
|
||||
- the :ref:`alt-o <shared-binds-alt-o>` binding requires the ``file`` program.
|
||||
- system clipboard integration (with the default Ctrl-V and Ctrl-X
|
||||
bindings) require either the ``xsel``, ``xclip``,
|
||||
``wl-copy``/``wl-paste`` or ``pbcopy``/``pbpaste`` utilities
|
||||
@@ -112,96 +116,74 @@ The following optional features also have specific requirements:
|
||||
Building
|
||||
--------
|
||||
|
||||
.. _dependencies-1:
|
||||
|
||||
Dependencies
|
||||
~~~~~~~~~~~~
|
||||
|
||||
Compiling fish requires:
|
||||
Compiling fish from a tarball requires:
|
||||
|
||||
- Rust (version 1.85 or later), including cargo
|
||||
- CMake (version 3.15 or later)
|
||||
- a C compiler (for system feature detection and the test helper binary)
|
||||
- a C++11 compiler (g++ 4.8 or later, or clang 3.3 or later)
|
||||
- CMake (version 3.5 or later)
|
||||
- PCRE2 (headers and libraries) - optional, this will be downloaded if missing
|
||||
- gettext (only the msgfmt tool) - optional, for translation support
|
||||
- an Internet connection, as other dependencies will be downloaded automatically
|
||||
- gettext (headers and libraries) - optional, for translation support
|
||||
|
||||
Sphinx is also optionally required to build the documentation from a
|
||||
cloned git repository.
|
||||
|
||||
Additionally, running the full test suite requires diff, git, Python 3.5+, pexpect, less, tmux and wget.
|
||||
Additionally, running the test suite requires Python 3.5+ and the pexpect package.
|
||||
|
||||
Building from source with CMake
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
Dependencies, git master
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Rather than building from source, consider using a packaged build for your platform. Using the
|
||||
steps below makes fish difficult to uninstall or upgrade. Release packages are available from the
|
||||
links above, and up-to-date `development builds of fish are available for many platforms
|
||||
<https://github.com/fish-shell/fish-shell/wiki/Development-builds>`__
|
||||
Building from git master currently requires:
|
||||
|
||||
- Rust (version 1.70 or later)
|
||||
- CMake (version 3.19 or later)
|
||||
- a C compiler (for system feature detection and the test helper binary)
|
||||
- PCRE2 (headers and libraries) - optional, this will be downloaded if missing
|
||||
- gettext (headers and libraries) - optional, for translation support
|
||||
- an Internet connection, as other dependencies will be downloaded automatically
|
||||
|
||||
|
||||
Building from source (all platforms) - Makefile generator
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
To install into ``/usr/local``, run:
|
||||
|
||||
.. code:: shell
|
||||
.. code:: bash
|
||||
|
||||
mkdir build; cd build
|
||||
cmake ..
|
||||
cmake --build .
|
||||
sudo cmake --install .
|
||||
make
|
||||
sudo make install
|
||||
|
||||
The install directory can be changed using the
|
||||
``-DCMAKE_INSTALL_PREFIX`` parameter for ``cmake``.
|
||||
|
||||
CMake Build options
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
Build options
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
In addition to the normal CMake build options (like ``CMAKE_INSTALL_PREFIX``), fish's CMake build has some other options available to customize it.
|
||||
In addition to the normal CMake build options (like ``CMAKE_INSTALL_PREFIX``), fish has some other options available to customize it.
|
||||
|
||||
- Rust_COMPILER=path - the path to rustc. If not set, cmake will check $PATH and ~/.cargo/bin
|
||||
- Rust_CARGO=path - the path to cargo. If not set, cmake will check $PATH and ~/.cargo/bin
|
||||
- Rust_CARGO_TARGET=target - the target to pass to cargo. Set this for cross-compilation.
|
||||
- WITH_DOCS=ON|OFF - whether to build the documentation. By default, this is ON when Sphinx is installed.
|
||||
- FISH_INDENT_FOR_BUILDING_DOCS - useful for cross-compilation.
|
||||
Set this to the path to the ``fish_indent`` executable to use for building HTML docs.
|
||||
By default, ``${CMAKE_BINARY_DIR}/fish_indent`` will be used.
|
||||
If that's not runnable on the compile host,
|
||||
you can build a native one with ``cargo build --bin fish_indent`` and set this to ``$PWD/target/debug/fish_indent``.
|
||||
- BUILD_DOCS=ON|OFF - whether to build the documentation. This is automatically set to OFF when Sphinx isn't installed.
|
||||
- INSTALL_DOCS=ON|OFF - whether to install the docs. This is automatically set to on when BUILD_DOCS is or prebuilt documentation is available (like when building in-tree from a tarball).
|
||||
- FISH_USE_SYSTEM_PCRE2=ON|OFF - whether to use an installed pcre2. This is normally autodetected.
|
||||
- WITH_MESSAGE_LOCALIZATION=ON|OFF - whether to include translations.
|
||||
- extra_functionsdir, extra_completionsdir and extra_confdir - to compile in an additional directory to be searched for functions, completions and configuration snippets
|
||||
- MAC_CODESIGN_ID=String|OFF - the codesign ID to use on Mac, or "OFF" to disable codesigning.
|
||||
- WITH_GETTEXT=ON|OFF - whether to build with gettext support for translations.
|
||||
|
||||
Building fish with Cargo
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
Note that fish does *not* support static linking and will attempt to error out if it detects it.
|
||||
|
||||
You can also build fish with Cargo.
|
||||
This example uses `uv <https://github.com/astral-sh/uv>`__ to install Sphinx (which is used for man-pages and ``--help`` options).
|
||||
You can also install Sphinx another way and drop the ``uv run --no-managed-python`` prefix.
|
||||
Help, it didn’t build!
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
.. code:: shell
|
||||
On Debian or Ubuntu you want these packages:
|
||||
|
||||
git clone https://github.com/fish-shell/fish-shell
|
||||
cd fish-shell
|
||||
::
|
||||
|
||||
# Optional: check out a specific version rather than building the latest
|
||||
# development version.
|
||||
git checkout "$(git for-each-ref refs/tags/ | awk '$2 == "tag" { print $3 }' | tail -1)"
|
||||
sudo apt install build-essential cmake libpcre2-dev gettext
|
||||
|
||||
uv run --no-managed-python \
|
||||
cargo install --path .
|
||||
|
||||
This will place standalone binaries in ``~/.cargo/bin/``, but you can move them wherever you want.
|
||||
|
||||
To disable translations, disable the ``localize-messages`` feature by passing ``--no-default-features --features=embed-manpages`` to cargo.
|
||||
|
||||
You can also link this build statically (but not against glibc) and move it to other computers.
|
||||
|
||||
Here are the remaining advantages of a full installation, as currently done by CMake:
|
||||
|
||||
- Man pages like ``fish(1)`` installed in standard locations, easily accessible from outside fish.
|
||||
- Separate files for builtins (e.g. ``$PREFIX/share/fish/man/man1/abbr.1``).
|
||||
- A local copy of the HTML documentation, typically accessed via the ``help`` fish function.
|
||||
In Cargo builds, ``help`` will redirect to `<https://fishshell.com/docs/current/>`__
|
||||
- Ability to use our CMake options extra_functionsdir, extra_completionsdir and extra_confdir,
|
||||
(also recorded in ``$PREFIX/share/pkgconfig/fish.pc``)
|
||||
which are used by some package managers to house third-party completions.
|
||||
Regardless of build system, fish uses ``$XDG_DATA_DIRS/{vendor_completion.d,vendor_conf.d,vendor_functions.d}``.
|
||||
On RedHat, CentOS, or Amazon EC2 everything should be preinstalled.
|
||||
|
||||
Contributing Changes to the Code
|
||||
--------------------------------
|
||||
|
||||
35
SECURITY.md
35
SECURITY.md
@@ -1,35 +0,0 @@
|
||||
# Security Reporting
|
||||
|
||||
If you wish to report a security vulnerability privately, we appreciate your diligence. Please follow the guidelines below to submit your report.
|
||||
|
||||
## Reporting
|
||||
|
||||
To report a security vulnerability, please provide the following information:
|
||||
|
||||
1. **PROJECT**
|
||||
|
||||
- Include the URL of the project repository - Example: <https://github.com/fish-shell/fish-shell>
|
||||
|
||||
2. **PUBLIC**
|
||||
|
||||
- Indicate whether this vulnerability has already been publicly discussed or disclosed.
|
||||
- If so, provide relevant links.
|
||||
|
||||
3. **DESCRIPTION**
|
||||
- Provide a detailed description of the security vulnerability.
|
||||
- Include as much information as possible to help us understand and address the issue.
|
||||
|
||||
Send this information, along with any additional relevant details, to <rf@fishshell.com>.
|
||||
|
||||
## Confidentiality
|
||||
|
||||
We kindly ask you to keep the report confidential until a public announcement is made.
|
||||
|
||||
## Notes
|
||||
|
||||
- Vulnerabilities will be handled on a best-effort basis.
|
||||
- You may request an advance copy of the patched release, but we cannot guarantee early access before the public release.
|
||||
- You will be notified via email simultaneously with the public announcement.
|
||||
- We will respond within a few weeks to confirm whether your report has been accepted or rejected.
|
||||
|
||||
Thank you for helping to improve the security of our project!
|
||||
@@ -8,4 +8,5 @@ for file in *.fish
|
||||
echo FAILING FILE $file
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/sh
|
||||
|
||||
if [ "$#" -gt 2 ] || [ "$#" -eq 0 ]; then
|
||||
if [ "$#" -gt 2 -o "$#" -eq 0 ]; then
|
||||
echo "Usage: driver.sh /path/to/fish [/path/to/other/fish]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
328
build.rs
328
build.rs
@@ -1,8 +1,8 @@
|
||||
use fish_build_helper::{
|
||||
env_var, fish_build_dir, target_os, target_os_is_apple, target_os_is_bsd, target_os_is_cygwin,
|
||||
workspace_root,
|
||||
};
|
||||
use rsconf::Target;
|
||||
#![allow(clippy::uninlined_format_args)]
|
||||
|
||||
use rsconf::{LinkType, Target};
|
||||
use std::env;
|
||||
use std::error::Error;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
fn main() {
|
||||
@@ -11,41 +11,48 @@ fn main() {
|
||||
// Add our default to enable tools that don't go through CMake, like "cargo test" and the
|
||||
// language server.
|
||||
|
||||
rsconf::set_env_value(
|
||||
"FISH_RESOLVED_BUILD_DIR",
|
||||
// If set by CMake, this might include symlinks. Since we want to compare this to the
|
||||
// dir fish is executed in we need to canonicalize it.
|
||||
fish_build_dir().canonicalize().unwrap().to_str().unwrap(),
|
||||
);
|
||||
|
||||
// FISH_BUILD_DIR is set by CMake, if we are using it.
|
||||
// OUT_DIR is set by Cargo when the build script is running (not compiling)
|
||||
let default_build_dir = env::var("OUT_DIR").unwrap();
|
||||
let build_dir = option_env!("FISH_BUILD_DIR").unwrap_or(&default_build_dir);
|
||||
let build_dir = std::fs::canonicalize(build_dir).unwrap();
|
||||
let build_dir = build_dir.to_str().unwrap();
|
||||
rsconf::set_env_value("FISH_BUILD_DIR", build_dir);
|
||||
// We need to canonicalize (i.e. realpath) the manifest dir because we want to be able to
|
||||
// compare it directly as a string at runtime.
|
||||
rsconf::set_env_value(
|
||||
"CARGO_MANIFEST_DIR",
|
||||
workspace_root().canonicalize().unwrap().to_str().unwrap(),
|
||||
std::fs::canonicalize(env!("CARGO_MANIFEST_DIR"))
|
||||
.unwrap()
|
||||
.as_path()
|
||||
.to_str()
|
||||
.unwrap(),
|
||||
);
|
||||
|
||||
// Some build info
|
||||
rsconf::set_env_value("BUILD_TARGET_TRIPLE", &env_var("TARGET").unwrap());
|
||||
rsconf::set_env_value("BUILD_HOST_TRIPLE", &env_var("HOST").unwrap());
|
||||
rsconf::set_env_value("BUILD_PROFILE", &env_var("PROFILE").unwrap());
|
||||
|
||||
// Per https://doc.rust-lang.org/cargo/reference/build-scripts.html#inputs-to-the-build-script,
|
||||
// the source directory is the current working directory of the build script
|
||||
rsconf::set_env_value("FISH_BUILD_VERSION", &get_version());
|
||||
rsconf::set_env_value(
|
||||
"FISH_BUILD_VERSION",
|
||||
&get_version(&env::current_dir().unwrap()),
|
||||
);
|
||||
|
||||
fish_build_helper::rebuild_if_embedded_path_changed("share");
|
||||
rsconf::rebuild_if_path_changed("src/libc.c");
|
||||
cc::Build::new()
|
||||
.file("src/libc.c")
|
||||
.include(build_dir)
|
||||
.compile("flibc.a");
|
||||
|
||||
let build = cc::Build::new();
|
||||
let mut build = cc::Build::new();
|
||||
// Add to the default library search path
|
||||
build.flag_if_supported("-L/usr/local/lib/");
|
||||
rsconf::add_library_search_path("/usr/local/lib");
|
||||
let mut target = Target::new_from(build).unwrap();
|
||||
// Keep verbose mode on until we've ironed out rust build script stuff
|
||||
target.set_verbose(true);
|
||||
detect_cfgs(&mut target);
|
||||
|
||||
#[cfg(all(target_env = "gnu", target_feature = "crt-static"))]
|
||||
compile_error!(
|
||||
"Statically linking against glibc has unavoidable crashes and is unsupported. Use dynamic linking or link statically against musl."
|
||||
);
|
||||
compile_error!("Statically linking against glibc has unavoidable crashes and is unsupported. Use dynamic linking or link statically against musl.");
|
||||
}
|
||||
|
||||
/// Check target system support for certain functionality dynamically when the build is invoked,
|
||||
@@ -60,45 +67,116 @@ fn main() {
|
||||
/// `Cargo.toml`) behind a feature we just enabled.
|
||||
///
|
||||
/// [0]: https://github.com/rust-lang/cargo/issues/5499
|
||||
#[rustfmt::skip]
|
||||
fn detect_cfgs(target: &mut Target) {
|
||||
for (name, handler) in [
|
||||
// Ignore the first entry, it just sets up the type inference.
|
||||
("", &(|_: &Target| false) as &dyn Fn(&Target) -> bool),
|
||||
("apple", &(|_| target_os_is_apple())),
|
||||
("bsd", &(|_| target_os_is_bsd())),
|
||||
("cygwin", &(|_| target_os_is_cygwin())),
|
||||
("have_eventfd", &|target| {
|
||||
// FIXME: NetBSD 10 has eventfd, but the libc crate does not expose it.
|
||||
if target_os() == "netbsd" {
|
||||
false
|
||||
} else {
|
||||
target.has_header("sys/eventfd.h")
|
||||
}
|
||||
}),
|
||||
("have_localeconv_l", &|target| {
|
||||
target.has_symbol("localeconv_l")
|
||||
}),
|
||||
("have_pipe2", &|target| target.has_symbol("pipe2")),
|
||||
("have_posix_spawn", &|target| {
|
||||
if matches!(target_os().as_str(), "openbsd" | "android") {
|
||||
// OpenBSD's posix_spawn returns status 127 instead of erroring with ENOEXEC when faced with a
|
||||
// shebang-less script. Disable posix_spawn on OpenBSD.
|
||||
//
|
||||
// Android is broken for unclear reasons
|
||||
false
|
||||
} else {
|
||||
target.has_header("spawn.h")
|
||||
}
|
||||
}),
|
||||
// Ignore the first entry, it just sets up the type inference. Model new entries after the
|
||||
// second line.
|
||||
(
|
||||
"",
|
||||
&(|_: &Target| Ok(false)) as &dyn Fn(&Target) -> Result<bool, Box<dyn Error>>,
|
||||
),
|
||||
("bsd", &detect_bsd),
|
||||
("gettext", &have_gettext),
|
||||
("small_main_stack", &has_small_stack),
|
||||
("using_cmake", &|_| {
|
||||
option_env!("FISH_CMAKE_BINARY_DIR").is_some()
|
||||
// See if libc supports the thread-safe localeconv_l(3) alternative to localeconv(3).
|
||||
("localeconv_l", &|target| {
|
||||
Ok(target.has_symbol("localeconv_l"))
|
||||
}),
|
||||
("waitstatus_signal_ret", &|target| {
|
||||
target.r#if("WEXITSTATUS(0x007f) == 0x7f", &["sys/wait.h"])
|
||||
("FISH_USE_POSIX_SPAWN", &|target| {
|
||||
Ok(target.has_header("spawn.h"))
|
||||
}),
|
||||
("HAVE_PIPE2", &|target| {
|
||||
Ok(target.has_symbol("pipe2"))
|
||||
}),
|
||||
("HAVE_EVENTFD", &|target| {
|
||||
// FIXME: NetBSD 10 has eventfd, but the libc crate does not expose it.
|
||||
if cfg!(target_os = "netbsd") {
|
||||
Ok(false)
|
||||
} else {
|
||||
Ok(target.has_header("sys/eventfd.h"))
|
||||
}
|
||||
}),
|
||||
("HAVE_WAITSTATUS_SIGNAL_RET", &|target| {
|
||||
Ok(target.r#if("WEXITSTATUS(0x007f) == 0x7f", &["sys/wait.h"]))
|
||||
}),
|
||||
] {
|
||||
rsconf::declare_cfg(name, handler(target));
|
||||
match handler(target) {
|
||||
Err(e) => {
|
||||
rsconf::warn!("{}: {}", name, e);
|
||||
rsconf::declare_cfg(name, false);
|
||||
},
|
||||
Ok(enabled) => rsconf::declare_cfg(name, enabled),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Detect if we're being compiled for a BSD-derived OS, allowing targeting code conditionally with
|
||||
/// `#[cfg(bsd)]`.
|
||||
///
|
||||
/// Rust offers fine-grained conditional compilation per-os for the popular operating systems, but
|
||||
/// doesn't necessarily include less-popular forks nor does it group them into families more
|
||||
/// specific than "windows" vs "unix" so we can conditionally compile code for BSD systems.
|
||||
fn detect_bsd(_: &Target) -> Result<bool, Box<dyn Error>> {
|
||||
// Instead of using `uname`, we can inspect the TARGET env variable set by Cargo. This lets us
|
||||
// support cross-compilation scenarios.
|
||||
let mut target = std::env::var("TARGET").unwrap();
|
||||
if !target.chars().all(|c| c.is_ascii_lowercase()) {
|
||||
target = target.to_ascii_lowercase();
|
||||
}
|
||||
let is_bsd = target.ends_with("bsd") || target.ends_with("dragonfly");
|
||||
#[cfg(any(
|
||||
target_os = "dragonfly",
|
||||
target_os = "freebsd",
|
||||
target_os = "netbsd",
|
||||
target_os = "openbsd",
|
||||
))]
|
||||
assert!(is_bsd, "Target incorrectly detected as not BSD!");
|
||||
Ok(is_bsd)
|
||||
}
|
||||
|
||||
/// Detect libintl/gettext and its needed symbols to enable internationalization/localization
|
||||
/// support.
|
||||
fn have_gettext(target: &Target) -> Result<bool, Box<dyn Error>> {
|
||||
// The following script correctly detects and links against gettext, but so long as we are using
|
||||
// C++ and generate a static library linked into the C++ binary via CMake, we need to account
|
||||
// for the CMake option WITH_GETTEXT being explicitly disabled.
|
||||
rsconf::rebuild_if_env_changed("CMAKE_WITH_GETTEXT");
|
||||
if let Some(with_gettext) = std::env::var_os("CMAKE_WITH_GETTEXT") {
|
||||
if with_gettext.eq_ignore_ascii_case("0") {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
|
||||
// In order for fish to correctly operate, we need some way of notifying libintl to invalidate
|
||||
// its localizations when the locale environment variables are modified. Without the libintl
|
||||
// symbol _nl_msg_cat_cntr, we cannot use gettext even if we find it.
|
||||
let mut libraries = Vec::new();
|
||||
let mut found = 0;
|
||||
let symbols = ["gettext", "_nl_msg_cat_cntr"];
|
||||
for symbol in &symbols {
|
||||
// Historically, libintl was required in order to use gettext() and co, but that
|
||||
// functionality was subsumed by some versions of libc.
|
||||
if target.has_symbol(symbol) {
|
||||
// No need to link anything special for this symbol
|
||||
found += 1;
|
||||
continue;
|
||||
}
|
||||
for library in ["intl", "gettextlib"] {
|
||||
if target.has_symbol_in(symbol, &[library]) {
|
||||
libraries.push(library);
|
||||
found += 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
match found {
|
||||
0 => Ok(false),
|
||||
1 => Err(format!("gettext found but cannot be used without {}", symbols[1]).into()),
|
||||
_ => {
|
||||
rsconf::link_libraries(&libraries, LinkType::Default);
|
||||
Ok(true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -108,94 +186,104 @@ fn detect_cfgs(target: &mut Target) {
|
||||
///
|
||||
/// 0.5 MiB is small enough that we'd have to drastically reduce MAX_STACK_DEPTH to less than 10, so
|
||||
/// we instead use a workaround to increase the main thread size.
|
||||
fn has_small_stack(_: &Target) -> bool {
|
||||
#[cfg(not(any(target_os = "ios", target_os = "macos", target_os = "netbsd")))]
|
||||
return false;
|
||||
fn has_small_stack(_: &Target) -> Result<bool, Box<dyn Error>> {
|
||||
#[cfg(not(any(target_os = "macos", target_os = "netbsd")))]
|
||||
return Ok(false);
|
||||
|
||||
// NetBSD 10 also needs this but can't find pthread_get_stacksize_np.
|
||||
#[cfg(target_os = "netbsd")]
|
||||
return true;
|
||||
return Ok(true);
|
||||
|
||||
#[cfg(any(target_os = "ios", target_os = "macos"))]
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
use core::ffi;
|
||||
|
||||
unsafe extern "C" {
|
||||
unsafe fn pthread_get_stacksize_np(thread: *const ffi::c_void) -> usize;
|
||||
unsafe fn pthread_self() -> *const ffi::c_void;
|
||||
extern "C" {
|
||||
fn pthread_get_stacksize_np(thread: *const ffi::c_void) -> usize;
|
||||
fn pthread_self() -> *const ffi::c_void;
|
||||
}
|
||||
|
||||
// build.rs is executed on the main thread, so we are getting the main thread's stack size.
|
||||
// Modern macOS versions default to an 8 MiB main stack but legacy OS X have a 0.5 MiB one.
|
||||
let stack_size = unsafe { pthread_get_stacksize_np(pthread_self()) };
|
||||
const TWO_MIB: usize = 2 * 1024 * 1024 - 1;
|
||||
stack_size <= TWO_MIB
|
||||
match stack_size {
|
||||
0..=TWO_MIB => Ok(true),
|
||||
_ => Ok(false),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn setup_paths() {
|
||||
#[cfg(windows)]
|
||||
use unix_path::{Path, PathBuf};
|
||||
|
||||
fn overridable_path(
|
||||
env_var_name: &str,
|
||||
f: impl FnOnce(Option<String>) -> Option<PathBuf>,
|
||||
) -> Option<PathBuf> {
|
||||
rsconf::rebuild_if_env_changed(env_var_name);
|
||||
let maybe_path = f(env_var(env_var_name));
|
||||
if let Some(path) = maybe_path.as_ref() {
|
||||
rsconf::set_env_value(env_var_name, path.to_str().unwrap());
|
||||
fn get_path(name: &str, default: &str, onvar: PathBuf) -> PathBuf {
|
||||
let mut var = PathBuf::from(env::var(name).unwrap_or(default.to_string()));
|
||||
if var.is_relative() {
|
||||
var = onvar.join(var);
|
||||
}
|
||||
maybe_path
|
||||
var
|
||||
}
|
||||
|
||||
fn join_if_relative(parent_if_relative: &Path, path: String) -> PathBuf {
|
||||
let path = PathBuf::from(path);
|
||||
if path.is_relative() {
|
||||
parent_if_relative.join(path)
|
||||
} else {
|
||||
path
|
||||
}
|
||||
let prefix = PathBuf::from(env::var("PREFIX").unwrap_or("/usr/local".to_string()));
|
||||
if prefix.is_relative() {
|
||||
panic!("Can't have relative prefix");
|
||||
}
|
||||
rsconf::rebuild_if_env_changed("PREFIX");
|
||||
rsconf::set_env_value("PREFIX", prefix.to_str().unwrap());
|
||||
|
||||
let prefix = overridable_path("PREFIX", |env_prefix| {
|
||||
Some(PathBuf::from(env_prefix.unwrap_or("/usr/local".to_owned())))
|
||||
})
|
||||
.unwrap();
|
||||
let datadir = get_path("DATADIR", "share/", prefix.clone());
|
||||
rsconf::set_env_value("DATADIR", datadir.to_str().unwrap());
|
||||
rsconf::rebuild_if_env_changed("DATADIR");
|
||||
|
||||
overridable_path("SYSCONFDIR", |env_sysconfdir| {
|
||||
Some(join_if_relative(
|
||||
&prefix,
|
||||
env_sysconfdir.unwrap_or("/etc/".to_owned()),
|
||||
))
|
||||
});
|
||||
let bindir = get_path("BINDIR", "bin/", prefix.clone());
|
||||
rsconf::set_env_value("BINDIR", bindir.to_str().unwrap());
|
||||
rsconf::rebuild_if_env_changed("BINDIR");
|
||||
|
||||
let datadir = overridable_path("DATADIR", |env_datadir| {
|
||||
env_datadir.map(|p| join_if_relative(&prefix, p))
|
||||
});
|
||||
overridable_path("BINDIR", |env_bindir| {
|
||||
env_bindir.map(|p| join_if_relative(&prefix, p))
|
||||
});
|
||||
overridable_path("DOCDIR", |env_docdir| {
|
||||
env_docdir.map(|p| {
|
||||
join_if_relative(
|
||||
&datadir
|
||||
.expect("Setting DOCDIR without setting DATADIR is not currently supported"),
|
||||
p,
|
||||
)
|
||||
})
|
||||
});
|
||||
let sysconfdir = get_path("SYSCONFDIR", "etc/", datadir.clone());
|
||||
rsconf::set_env_value("SYSCONFDIR", sysconfdir.to_str().unwrap());
|
||||
rsconf::rebuild_if_env_changed("SYSCONFDIR");
|
||||
|
||||
let localedir = get_path("LOCALEDIR", "locale/", datadir.clone());
|
||||
rsconf::set_env_value("LOCALEDIR", localedir.to_str().unwrap());
|
||||
rsconf::rebuild_if_env_changed("LOCALEDIR");
|
||||
|
||||
let docdir = get_path("DOCDIR", "doc/fish", datadir.clone());
|
||||
rsconf::set_env_value("DOCDIR", docdir.to_str().unwrap());
|
||||
rsconf::rebuild_if_env_changed("DOCDIR");
|
||||
}
|
||||
|
||||
fn get_version() -> String {
|
||||
fn get_version(src_dir: &Path) -> String {
|
||||
use std::fs::read_to_string;
|
||||
use std::process::Command;
|
||||
String::from_utf8(
|
||||
Command::new("build_tools/git_version_gen.sh")
|
||||
.output()
|
||||
.unwrap()
|
||||
.stdout,
|
||||
)
|
||||
.unwrap()
|
||||
.trim_ascii_end()
|
||||
.to_owned()
|
||||
|
||||
if let Ok(var) = std::env::var("FISH_BUILD_VERSION") {
|
||||
return var;
|
||||
}
|
||||
|
||||
let path = PathBuf::from(src_dir).join("version");
|
||||
if let Ok(strver) = read_to_string(path) {
|
||||
return strver.to_string();
|
||||
}
|
||||
|
||||
let args = &["describe", "--always", "--dirty=-dirty"];
|
||||
if let Ok(output) = Command::new("git").args(args).output() {
|
||||
let rev = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||
if !rev.is_empty() {
|
||||
// If it contains a ".", we have a proper version like "3.7",
|
||||
// or "23.2.1-1234-gfab1234"
|
||||
if rev.contains(".") {
|
||||
return rev;
|
||||
}
|
||||
// If it doesn't, we probably got *just* the commit SHA,
|
||||
// like "f1242abcdef".
|
||||
// So we prepend the crate version so it at least looks like
|
||||
// "3.8-gf1242abcdef"
|
||||
// This lacks the commit *distance*, but that can't be helped without
|
||||
// tags.
|
||||
let version = env!("CARGO_PKG_VERSION").to_owned();
|
||||
return version + "-g" + &rev;
|
||||
}
|
||||
}
|
||||
// TODO: Do we just use the cargo version here?
|
||||
|
||||
"unknown".to_string()
|
||||
}
|
||||
|
||||
@@ -1,94 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
{
|
||||
set -ex
|
||||
|
||||
lint=true
|
||||
if [ "$FISH_CHECK_LINT" = false ]; then
|
||||
lint=false
|
||||
fi
|
||||
|
||||
check_dependency_versions=false
|
||||
if [ "${FISH_CHECK_DEPENDENCY_VERSIONS:-false}" != false ]; then
|
||||
check_dependency_versions=true
|
||||
fi
|
||||
|
||||
if $check_dependency_versions; then
|
||||
command -v curl
|
||||
command -v jq
|
||||
command -v rustup
|
||||
command -v uv
|
||||
sort --version-sort </dev/null
|
||||
# To match existing behavior, only check Rust/dockerfiles for now.
|
||||
# TODO: remove this from this script.
|
||||
updatecli diff --config=updatecli.d/docker.yml --config=updatecli.d/rust.yml
|
||||
fi
|
||||
|
||||
cargo_args=$FISH_CHECK_CARGO_ARGS
|
||||
target_triple=$FISH_CHECK_TARGET_TRIPLE
|
||||
if [ -n "$target_triple" ]; then
|
||||
cargo_args="$cargo_args --target=$FISH_CHECK_TARGET_TRIPLE"
|
||||
fi
|
||||
|
||||
cargo() {
|
||||
subcmd=$1
|
||||
shift
|
||||
if [ -n "$FISH_CHECK_RUST_TOOLCHAIN" ]; then
|
||||
# shellcheck disable=2086
|
||||
command cargo "+$FISH_CHECK_RUST_TOOLCHAIN" "$subcmd" $cargo_args "$@"
|
||||
else
|
||||
# shellcheck disable=2086
|
||||
command cargo "$subcmd" $cargo_args "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
cleanup () {
|
||||
if [ -n "$gettext_template_dir" ] && [ -e "$gettext_template_dir" ]; then
|
||||
rm -r "$gettext_template_dir"
|
||||
fi
|
||||
}
|
||||
|
||||
trap cleanup EXIT INT TERM HUP
|
||||
|
||||
if $lint; then
|
||||
export RUSTFLAGS="--deny=warnings ${RUSTFLAGS}"
|
||||
export RUSTDOCFLAGS="--deny=warnings ${RUSTDOCFLAGS}"
|
||||
fi
|
||||
|
||||
workspace_root="$(dirname "$0")/.."
|
||||
target_dir=${CARGO_TARGET_DIR:-$workspace_root/target}
|
||||
if [ -n "$target_triple" ]; then
|
||||
target_dir="$target_dir/$target_triple"
|
||||
fi
|
||||
# The directory containing the binaries produced by cargo/rustc.
|
||||
# Currently, all builds are debug builds.
|
||||
build_dir="$target_dir/debug"
|
||||
|
||||
if [ -n "$FISH_TEST_MAX_CONCURRENCY" ]; then
|
||||
export RUST_TEST_THREADS="$FISH_TEST_MAX_CONCURRENCY"
|
||||
export CARGO_BUILD_JOBS="$FISH_TEST_MAX_CONCURRENCY"
|
||||
fi
|
||||
|
||||
gettext_template_dir=$(mktemp -d)
|
||||
(
|
||||
export FISH_GETTEXT_EXTRACTION_DIR="$gettext_template_dir"
|
||||
cargo build --workspace --all-targets --features=gettext-extract
|
||||
)
|
||||
if $lint; then
|
||||
if command -v cargo-deny >/dev/null; then
|
||||
cargo deny --all-features --locked --exclude-dev check licenses
|
||||
fi
|
||||
PATH="$build_dir:$PATH" "$workspace_root/build_tools/style.fish" --all --check
|
||||
for features in "" --no-default-features; do
|
||||
cargo clippy --workspace --all-targets $features
|
||||
done
|
||||
fi
|
||||
cargo test --no-default-features --workspace --all-targets
|
||||
cargo test --doc --workspace
|
||||
if $lint; then
|
||||
cargo doc --workspace --no-deps
|
||||
fi
|
||||
FISH_GETTEXT_EXTRACTION_DIR=$gettext_template_dir "$workspace_root/tests/test_driver.py" "$build_dir"
|
||||
|
||||
exit
|
||||
}
|
||||
15
build_tools/diff_profiles.fish
Executable file → Normal file
15
build_tools/diff_profiles.fish
Executable file → Normal file
@@ -5,13 +5,6 @@
|
||||
#
|
||||
# Usage: ./diff_profiles.fish profile1.log profile2.log > profile_diff.log
|
||||
|
||||
if test (count $argv) -ne 2
|
||||
|
||||
echo "Incorrect number of arguments."
|
||||
echo "Usage: "(status filename)" profile1.log profile2.log"
|
||||
exit 1
|
||||
end
|
||||
|
||||
set -l profile1 (cat $argv[1])
|
||||
set -l profile2 (cat $argv[2])
|
||||
|
||||
@@ -22,13 +15,13 @@ while set -l next_line_no (math $line_no + 1) && set -q profile1[$next_line_no]
|
||||
set -l line1 $profile1[$line_no]
|
||||
set -l line2 $profile2[$line_no]
|
||||
|
||||
if not string match -qr '^\s*\d+\s+\d+' $line1
|
||||
if not string match -qr '^\d+\t\d+' $line1
|
||||
echo $line1
|
||||
continue
|
||||
end
|
||||
|
||||
set -l results1 (string match -r '^\s*(\d+)\s+(\d+)\s+(.*)' $line1)
|
||||
set -l results2 (string match -r '^\s*(\d+)\s+(\d+)\s+(.*)' $line2)
|
||||
set -l results1 (string match -r '^(\d+)\t(\d+)\s+(.*)' $line1)
|
||||
set -l results2 (string match -r '^(\d+)\t(\d+)\s+(.*)' $line2)
|
||||
|
||||
# times from both files
|
||||
set -l time1 $results1[2..3]
|
||||
@@ -49,5 +42,5 @@ while set -l next_line_no (math $line_no + 1) && set -q profile1[$next_line_no]
|
||||
set diff[1] (math $time1[1] - $time2[1])
|
||||
set diff[2] (math $time1[2] - $time2[2])
|
||||
|
||||
printf '%10d %10d %s\n' $diff[1] $diff[2] $remainder1
|
||||
echo $diff[1] $diff[2] $remainder1
|
||||
end
|
||||
|
||||
23
build_tools/extract_help_sections.fish
Normal file
23
build_tools/extract_help_sections.fish
Normal file
@@ -0,0 +1,23 @@
|
||||
#!/usr/bin/env fish
|
||||
# Build a list of all sections in the html sphinx docs, separately by page,
|
||||
# so it can be added to share/functions/help.fish
|
||||
# Use like
|
||||
# fish extract_help_sections.fish user_doc/html/{fish_for_bash_users.html,faq.html,interactive.html,language.html,tutorial.html}
|
||||
# TODO: Currently `help` uses variable names we can't generate, so it needs to be touched up manually.
|
||||
# Also this could easily be broken by changes in sphinx, ideally we'd have a way to let it print the section titles.
|
||||
#
|
||||
|
||||
for file in $argv
|
||||
set -l varname (string replace -r '.*/(.*).html' '$1' -- $file | string escape --style=var)pages
|
||||
# Technically we can use any id in the document as an anchor, but listing them all is probably too much.
|
||||
# Sphinx stores section titles (in a slug-ized form) in the id,
|
||||
# and stores explicit section links in a `span` tag like
|
||||
# `<span id="identifiers"></span>`
|
||||
# We extract both separately.
|
||||
set -l sections (string replace -rf '.*class="headerlink" href="#([^"]*)".*' '$1' <$file)
|
||||
# Sections titled "id5" and such are internal cruft and shouldn't be offered.
|
||||
set -a sections (string replace -rf '.*span id="([^"]*)".*' '$1' <$file | string match -rv 'id\d+')
|
||||
|
||||
set sections (printf '%s\n' $sections | sort -u)
|
||||
echo set -l $varname $sections
|
||||
end
|
||||
@@ -1,147 +1,68 @@
|
||||
#!/usr/bin/env fish
|
||||
#
|
||||
# Tool to generate gettext messages template file.
|
||||
# Writes to stdout.
|
||||
# Intended to be called from `update_translations.fish`.
|
||||
# Tool to generate messages.pot
|
||||
|
||||
argparse use-existing-template= -- $argv
|
||||
or exit $status
|
||||
# Create temporary directory for these operations. OS X `mktemp` is somewhat restricted, so this block
|
||||
# works around that - based on share/functions/funced.fish.
|
||||
set -q TMPDIR
|
||||
or set -l TMPDIR /tmp
|
||||
set -l tmpdir (mktemp -d $TMPDIR/fish.XXXXXX)
|
||||
or exit 1
|
||||
|
||||
begin
|
||||
# Write header. This is required by msguniq.
|
||||
# Note that this results in the file being overwritten.
|
||||
# This is desired behavior, to get rid of the results of prior invocations
|
||||
# of this script.
|
||||
begin
|
||||
echo 'msgid ""'
|
||||
echo 'msgstr ""'
|
||||
echo '"Content-Type: text/plain; charset=UTF-8\n"'
|
||||
echo ""
|
||||
end
|
||||
# This is a gigantic crime.
|
||||
# xgettext still does not support rust *at all*, so we use cargo-expand to get all our wgettext invocations.
|
||||
set -l expanded (cargo expand --lib; for f in fish{,_indent,_key_reader}; cargo expand --bin $f; end)
|
||||
|
||||
set -g workspace_root (path resolve (status dirname)/..)
|
||||
# Extract any gettext call
|
||||
set -l strs (printf '%s\n' $expanded | grep -A1 wgettext_static_str |
|
||||
grep 'widestring::internals::core::primitive::str =' |
|
||||
string match -rg '"(.*)"' | string match -rv '^%ls$|^$' |
|
||||
# escaping difference between gettext and cargo-expand: single-quotes
|
||||
string replace -a "\'" "'" | sort -u)
|
||||
|
||||
set -l rust_extraction_dir
|
||||
if set -l --query _flag_use_existing_template
|
||||
set rust_extraction_dir $_flag_use_existing_template
|
||||
else
|
||||
set rust_extraction_dir (mktemp -d)
|
||||
# We need to build to ensure that the proc macro for extracting strings runs.
|
||||
FISH_GETTEXT_EXTRACTION_DIR=$rust_extraction_dir cargo check --features=gettext-extract
|
||||
or exit 1
|
||||
end
|
||||
# Extract any constants
|
||||
set -a strs (string match -rv 'BUILD_VERSION:|PACKAGE_NAME' -- $expanded |
|
||||
string match -rg 'const [A-Z_]*: &str = "(.*)"' | string replace -a "\'" "'")
|
||||
|
||||
function mark_section
|
||||
set -l section_name $argv[1]
|
||||
echo 'msgid "fish-section-'$section_name'"'
|
||||
echo 'msgstr ""'
|
||||
echo ''
|
||||
end
|
||||
# We construct messages.pot ourselves instead of forcing this into msgmerge or whatever.
|
||||
# The escaping so far works out okay.
|
||||
for str in $strs
|
||||
# grep -P needed for string escape to be compatible (PCRE-style),
|
||||
# -H gives the filename, -n the line number.
|
||||
# If you want to run this on non-GNU grep: Don't.
|
||||
echo "#:" (grep -PHn -r -- (string escape --style=regex -- $str) src/ |
|
||||
head -n1 | string replace -r ':\s.*' '')
|
||||
echo "msgid \"$str\""
|
||||
echo 'msgstr ""'
|
||||
end >messages.pot
|
||||
|
||||
mark_section tier1-from-rust
|
||||
# This regex handles descriptions for `complete` and `function` statements. These messages are not
|
||||
# particularly important to translate. Hence the "implicit" label.
|
||||
set -l implicit_regex '(?:^| +)(?:complete|function).*? (?:-d|--description) (([\'"]).+?(?<!\\\\)\\2).*'
|
||||
|
||||
# Get rid of duplicates and sort.
|
||||
find $rust_extraction_dir -type f -exec cat {} + | msguniq --no-wrap --sort-output
|
||||
or exit 1
|
||||
# This regex handles explicit requests to translate a message. These are more important to translate
|
||||
# than messages which should be implicitly translated.
|
||||
set -l explicit_regex '.*\( *_ (([\'"]).+?(?<!\\\\)\\2) *\).*'
|
||||
|
||||
if not set -l --query _flag_use_existing_template
|
||||
rm -r $rust_extraction_dir
|
||||
end
|
||||
mkdir -p $tmpdir/implicit/share/completions $tmpdir/implicit/share/functions
|
||||
mkdir -p $tmpdir/explicit/share/completions $tmpdir/explicit/share/functions
|
||||
|
||||
function extract_fish_script_messages_impl
|
||||
set -l regex $argv[1]
|
||||
set -e argv[1]
|
||||
# Using xgettext causes more trouble than it helps.
|
||||
# This is due to handling of escaping in fish differing from formats xgettext understands
|
||||
# (e.g. POSIX shell strings).
|
||||
# We work around this issue by manually writing the file content.
|
||||
for f in share/config.fish share/completions/*.fish share/functions/*.fish
|
||||
# Extract explicit attempts to translate a message. That is, those that are of the form
|
||||
# `(_ "message")`.
|
||||
string replace --filter --regex $explicit_regex '$1' <$f | string unescape \
|
||||
| string replace --all '"' '\\"' | string replace -r '(.*)' 'N_ "$1"' >$tmpdir/explicit/$f
|
||||
|
||||
# Steps:
|
||||
# 1. We extract strings to be translated from the relevant files and drop the rest. This step
|
||||
# depends on the regex matching the entire line, and the first capture group matching the
|
||||
# string.
|
||||
# 2. We unescape. This gets rid of some escaping necessary in fish strings.
|
||||
# 3. The resulting strings are sorted alphabetically. This step is optional. Not sorting would
|
||||
# result in strings from the same file appearing together. Removing duplicates is also
|
||||
# optional, since msguniq takes care of that later on as well.
|
||||
# 4. Single backslashes are replaced by double backslashes. This results in the backslashes
|
||||
# being interpreted as literal backslashes by gettext tooling.
|
||||
# 5. Double quotes are escaped, such that they are not interpreted as the start or end of
|
||||
# a msgid.
|
||||
# 6. We transform the string into the format expected in a PO file.
|
||||
cat $argv |
|
||||
string replace --filter --regex $regex '$1' |
|
||||
string unescape |
|
||||
sort -u |
|
||||
sed -E -e 's_\\\\_\\\\\\\\_g' -e 's_"_\\\\"_g' -e 's_^(.*)$_msgid "\1"\nmsgstr ""\n_'
|
||||
end
|
||||
# Handle `complete` / `function` description messages. The `| fish` is subtle. It basically
|
||||
# avoids the need to use `source` with a command substitution that could affect the current
|
||||
# shell.
|
||||
string replace --filter --regex $implicit_regex '$1' <$f | string unescape \
|
||||
| string replace --all '"' '\\"' | string replace -r '(.*)' 'N_ "$1"' >$tmpdir/implicit/$f
|
||||
end
|
||||
|
||||
function extract_fish_script_messages
|
||||
set -l tier $argv[1]
|
||||
set -e argv[1]
|
||||
if not set -q argv[1]
|
||||
return
|
||||
end
|
||||
# This regex handles explicit requests to translate a message. These are more important to translate
|
||||
# than messages which should be implicitly translated.
|
||||
set -l explicit_regex '.*\( *_ (([\'"]).+?(?<!\\\\)\\2) *\).*'
|
||||
mark_section "$tier-from-script-explicitly-added"
|
||||
extract_fish_script_messages_impl $explicit_regex $argv
|
||||
xgettext -j -k -kN_ -LShell --from-code=UTF-8 -cDescription --no-wrap -o messages.pot $tmpdir/{ex,im}plicit/share/*/*.fish
|
||||
|
||||
# This regex handles descriptions for `complete` and `function` statements. These messages are not
|
||||
# particularly important to translate. Hence the "implicit" label.
|
||||
set -l implicit_regex '^(?:\s|and |or )*(?:complete|function).*? (?:-d|--description) (([\'"]).+?(?<!\\\\)\\2).*'
|
||||
mark_section "$tier-from-script-implicitly-added"
|
||||
extract_fish_script_messages_impl $implicit_regex $argv
|
||||
end
|
||||
# Remove the tmpdir from the location to avoid churn
|
||||
sed -i 's_^#: /.*/share/_#: share/_' messages.pot
|
||||
|
||||
set -g share_dir $workspace_root/share
|
||||
|
||||
set -l tier1 $share_dir/config.fish
|
||||
set -l tier2
|
||||
set -l tier3
|
||||
|
||||
for file in $share_dir/completions/*.fish $share_dir/functions/*.fish
|
||||
# set -l tier (string match -r '^# localization: .*' <$file)
|
||||
set -l tier (string replace -rf -m1 \
|
||||
'^# localization: (.*)$' '$1' <$file)
|
||||
if set -q tier[1]
|
||||
switch "$tier"
|
||||
case tier1 tier2 tier3
|
||||
set -a $tier $file
|
||||
case 'skip*'
|
||||
case '*'
|
||||
echo >&2 "$file:1 unexpected localization tier: $tier"
|
||||
exit 1
|
||||
end
|
||||
continue
|
||||
end
|
||||
set -l dirname (path basename (path dirname $file))
|
||||
set -l command_name (path basename --no-extension $file)
|
||||
if test $dirname = functions &&
|
||||
string match -q -- 'fish_*' $command_name
|
||||
set -a tier1 $file
|
||||
continue
|
||||
end
|
||||
if test $dirname != completions
|
||||
echo >&2 "$file:1 missing localization tier for function file"
|
||||
exit 1
|
||||
end
|
||||
if test -e $workspace_root/doc_src/cmds/$command_name.rst
|
||||
set -a tier1 $file
|
||||
else
|
||||
set -a tier3 $file
|
||||
end
|
||||
end
|
||||
|
||||
extract_fish_script_messages tier1 $tier1
|
||||
extract_fish_script_messages tier2 $tier2
|
||||
extract_fish_script_messages tier3 $tier3
|
||||
end |
|
||||
# At this point, all extracted strings have been written to stdout,
|
||||
# starting with the ones taken from the Rust sources,
|
||||
# followed by strings explicitly marked for translation in fish scripts,
|
||||
# and finally the strings from fish scripts which get translated implicitly.
|
||||
# Because we do not eliminate duplicates across these categories,
|
||||
# we do it here, since other gettext tools expect no duplicates.
|
||||
msguniq --no-wrap
|
||||
rm -r $tmpdir
|
||||
|
||||
@@ -1,22 +1,70 @@
|
||||
#!/bin/sh
|
||||
# Originally from the git sources (GIT-VERSION-GEN)
|
||||
# Presumably (C) Junio C Hamano <junkio@cox.net>
|
||||
# Reused under GPL v2.0
|
||||
# Modified for fish by David Adam <zanchey@ucc.gu.uwa.edu.au>
|
||||
|
||||
set -e
|
||||
|
||||
# Find the fish directory as two levels up from script directory.
|
||||
FISH_BASE_DIR="$( cd "$( dirname "$( dirname "$0" )" )" && pwd )"
|
||||
DEF_VER=unknown
|
||||
git_permission_failed=0
|
||||
|
||||
version=$(
|
||||
awk <"$FISH_BASE_DIR/Cargo.toml" -F'"' '$1 == "version = " { print $2 }'
|
||||
)
|
||||
if git_version=$(
|
||||
GIT_CEILING_DIRECTORIES=$FISH_BASE_DIR/.. \
|
||||
git -C "$FISH_BASE_DIR" describe --always --dirty 2>/dev/null); then
|
||||
if [ "$git_version" = "${git_version#"$version"}" ]; then
|
||||
version=$version-g$git_version
|
||||
# First see if there is a version file (included in release tarballs),
|
||||
# then try git-describe, then default.
|
||||
if test -f version
|
||||
then
|
||||
VN=$(cat version) || VN="$DEF_VER"
|
||||
else
|
||||
if VN=$(git -C "$FISH_BASE_DIR" describe --always --dirty 2>/dev/null); then
|
||||
:
|
||||
else
|
||||
version=$git_version
|
||||
if test $? = 128; then
|
||||
# Current git versions return status 128
|
||||
# when run in a repo owned by another user.
|
||||
# Even for describe and everything.
|
||||
# This occurs for `sudo make install`.
|
||||
git_permission_failed=1
|
||||
fi
|
||||
VN="$DEF_VER"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "$version"
|
||||
# If the first param is --stdout, then output to stdout and exit.
|
||||
if test "$1" = '--stdout'
|
||||
then
|
||||
echo $VN
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Set the output directory as either the first param or cwd.
|
||||
test -n "$1" && OUTPUT_DIR=$1/ || OUTPUT_DIR=
|
||||
FBVF="${OUTPUT_DIR}FISH-BUILD-VERSION-FILE"
|
||||
|
||||
if test "$VN" = unknown && test -r "$FBVF" && test "$git_permission_failed" = 1
|
||||
then
|
||||
# HACK: Git failed, so we keep the current version file.
|
||||
# This helps in case you built fish as a normal user
|
||||
# and then try to `sudo make install` it.
|
||||
date +%s > ${OUTPUT_DIR}fish-build-version-witness.txt
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if test -r "$FBVF"
|
||||
then
|
||||
VC=$(grep -v '^#' "$FBVF" | tr -d '"' | sed -e 's/^FISH_BUILD_VERSION=//')
|
||||
else
|
||||
VC="unset"
|
||||
fi
|
||||
|
||||
# Maybe output the FBVF
|
||||
# It looks like FISH_BUILD_VERSION="2.7.1-621-ga2f065e6"
|
||||
test "$VN" = "$VC" || {
|
||||
echo >&2 "FISH_BUILD_VERSION=$VN"
|
||||
echo "FISH_BUILD_VERSION=\"$VN\"" >"$FBVF"
|
||||
}
|
||||
|
||||
# Output the fish-build-version-witness.txt
|
||||
# See https://cmake.org/cmake/help/v3.4/policy/CMP0058.html
|
||||
date +%s > ${OUTPUT_DIR}fish-build-version-witness.txt
|
||||
|
||||
@@ -1,392 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>platform-application</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.no-container</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.container-manager</key>
|
||||
<true/>
|
||||
<key>com.apple.private.skip-library-validation</key>
|
||||
<true/>
|
||||
<key>com.apple.private.MobileContainerManager.allowed</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.adprivacyd</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.amfid</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.AppBundles</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.AppDataContainers</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.automation-mode</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.Biome</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.Calendar</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.CallHistory</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.CarrierBundles</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.chronod</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.CloudDocsDB</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.CloudKit</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.containers</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.CoreFollowUp</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.CoreKnowledge</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.Cryptex</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.demo_backup</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.DocumentRevisions</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.DumpPanic</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.ExposureNotification</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.FaceTime</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.familycircled</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.FindMy</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.fpsd</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.Health</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.HomeAI</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.HomeKit</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.iCloudDrive</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.idcredd</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.IdentityServices</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.kbd</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.Keychains</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.Lockdown</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.Mail</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.Messages</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.MessagesMetaData</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.MobileContainerManager</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.MobileDocuments</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.MobileIdentityService</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.mobilesync</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.multimodalsearchd</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.NanoTimeKit.FaceSupport</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.News</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.Notes</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.Photos</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.PhotosLibraries</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.pipelined</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.preferences</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.PrivacyAccounting</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.Safari</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.SearchParty</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.SecureElementService</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.SensorKit</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.SFAnalytics</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.SiriInference</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.SiriReferenceResolution</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.SiriVocabulary</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.SoC</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.SpeechPersonalizedLM</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.Spotlight</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.StatusKit</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.Stocks</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.Suggestions</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.SymptomFramework</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.sysdagnose.ScreenshotServicesService</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.TCC</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.TimeMachine</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.triald</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.trustd</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.trustd-private</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.universalaccess</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.Voicemail</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.Wireless</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.disk-device-access</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.ane_model_cache</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.apfs_boot_mount</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.clientScripter</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.mediaanalysisd</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.CarPlayAppBlacklist</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.DeviceCheck</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.DictionaryServices.dictionary2</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.DuetExpertCenterAsset</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.EmbeddedNL</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.Font5</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.Font6</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.HealthKt.FeatureAvailability</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.HomeKit</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.MacinTalkVoiceAssets</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.MailDynamicData</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.MXLongFormVideoApps</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.network.networknomicon</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.PKITrustSupplementals</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.SharingDeviceAssets</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.SiriShortcutsMobileAsset</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.TimeZoneUpdate</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.VoiceServices.CombinedVocalizerVoices</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.VoiceServices.CustomVoice</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.VoiceServices.GryphonVoice</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.VoiceServicesVocalizerVoice</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.VoiceServices.VoiceResources</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.VoiceTriggerAssets</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.CoreAnalytics</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.coreduet_knowledge_store</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.coreidvd</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.coreknowledge</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.CoreRoutine</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.CoreSpeech</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.dmd</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.dprivacyd_storage</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.ExtensibleSSO</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.facekit</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.fpsd</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.MobileStorageMounter</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.MusicApp</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.nsurlsessiond</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.pearl-field-diagnostics</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.proactivepredictions</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.QLThumbnailCache</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.remotemanagementd</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.RoleAccountStaging</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.sensorkit</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.shortcuts</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.siriremembers</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.timezone</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.triald</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.voiceshortcuts</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage-exempt.heritable</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.AppleMediaServices</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.ContactlessReader</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.CoreRoutine</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.DiagnosticReports</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.DiagnosticReports.read-write</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.DoNotDisturb</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.Home</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.IntelligencePlatform</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.Location</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.ManagedConfiguration</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.MapsSync</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.MobileBackup</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.MobileStorageMounter</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.PassKit</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.SiriFeatureStore</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.SiriSELF</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.SoundProfileAsset</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.TextUnderstanding</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.Weather</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.appleaccountd</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.ciconia</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.clipserviced</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.coreduet_knowledge_store</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.driverkitd</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.geoanalyticsd</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.geod</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.launchd</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.sessionkitd</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.sysdiagnose.ScreenshotServicesService</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.sysdiagnose.sysdiagnose</key>
|
||||
<true/>
|
||||
<key>com.apple.private.security.storage.tmp</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.critical</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.datavault.metadata</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.install</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.install.heritable</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.restricted-block-devices</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.MobileAssetDownload</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.amsengagementd</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.HealthKit.FeatureAvailability</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.Trial.Siri.SiriDialogAssets</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.Trial.Siri.SiriExperienceCam</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.Trial.Siri.SiriFindMyConfigurationFiles</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.Trial.Siri.SiriInferredHelpfulness</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.Trial.Siri.SiriTextToSpeech</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.Trial.Siri.SiriUnderstandingAsrAssistant</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.Trial.Siri.SiriUnderstandingAsrHammer</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.Trial.Siri.SiriUnderstandingAsrUaap</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.Trial.Siri.SiriUnderstandingAttentionAssets</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.Trial.Siri.SiriUnderstandingMorphun</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.Trial.Siri.SiriUnderstandingNL</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.com.apple.MobileAsset.Trial.Siri.SiriUnderstandingNLOverrides</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.coreparsec_feedbacks</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.coreparsec_uploadables</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.early_boot_mount</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.storage.screentime</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.volume.ISCRecovery</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.volume.Preboot</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.volume.Recovery</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.volume.Update</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.volume.VM</key>
|
||||
<true/>
|
||||
<key>com.apple.rootless.volume.iSCPreboot</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,24 +1,23 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""Command line test driver."""
|
||||
""" Command line test driver. """
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import asyncio
|
||||
import datetime
|
||||
from difflib import SequenceMatcher
|
||||
import io
|
||||
import re
|
||||
import shlex
|
||||
import subprocess
|
||||
import sys
|
||||
import unicodedata
|
||||
|
||||
try:
|
||||
from itertools import zip_longest
|
||||
except ImportError:
|
||||
from itertools import izip_longest as zip_longest # type: ignore
|
||||
|
||||
from itertools import izip_longest as zip_longest
|
||||
from difflib import SequenceMatcher
|
||||
|
||||
# Directives can occur at the beginning of a line, or anywhere in a line that does not start with #.
|
||||
COMMENT_RE = r"^(?:[^#].*)?#\s*"
|
||||
@@ -33,15 +32,12 @@ CHECK_STDOUT_RE = re.compile(COMMENT_RE + r"CHECK:\s+(.*)\n")
|
||||
# A regex capturing lines that should be checked against stderr.
|
||||
CHECK_STDERR_RE = re.compile(COMMENT_RE + r"CHECKERR:\s+(.*)\n")
|
||||
|
||||
VARIABLE_OVERRIDE_RE = re.compile(r"\w+=.*")
|
||||
|
||||
SKIP = object()
|
||||
|
||||
|
||||
def find_command(program):
|
||||
import os
|
||||
|
||||
path, _ = os.path.split(program)
|
||||
path, name = os.path.split(program)
|
||||
if path:
|
||||
return os.path.isfile(program) and os.access(program, os.X_OK)
|
||||
for path in os.environ["PATH"].split(os.pathsep):
|
||||
@@ -51,7 +47,6 @@ def find_command(program):
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class Config(object):
|
||||
def __init__(self):
|
||||
# Whether to have verbose output.
|
||||
@@ -62,7 +57,7 @@ class Config(object):
|
||||
self.progress = False
|
||||
|
||||
def colors(self):
|
||||
"""Return a dictionary mapping color names to ANSI escapes"""
|
||||
""" Return a dictionary mapping color names to ANSI escapes """
|
||||
|
||||
def ansic(n):
|
||||
return "\033[%dm" % n if self.colorize else ""
|
||||
@@ -94,6 +89,9 @@ def output(*args):
|
||||
print("".join(args) + "\n")
|
||||
|
||||
|
||||
import unicodedata
|
||||
|
||||
|
||||
def esc(m):
|
||||
map = {
|
||||
"\n": "\\n",
|
||||
@@ -130,7 +128,7 @@ class CheckerError(Exception):
|
||||
|
||||
|
||||
class Line(object):
|
||||
"""A line that remembers where it came from."""
|
||||
""" A line that remembers where it came from. """
|
||||
|
||||
def __init__(self, text, number, file):
|
||||
self.text = text
|
||||
@@ -158,7 +156,7 @@ class Line(object):
|
||||
raise NotImplementedError
|
||||
|
||||
def subline(self, text):
|
||||
"""Return a substring of our line with the given text, preserving number and file."""
|
||||
""" Return a substring of our line with the given text, preserving number and file. """
|
||||
return Line(text, self.number, self.file)
|
||||
|
||||
@staticmethod
|
||||
@@ -230,7 +228,7 @@ class TestFailure(object):
|
||||
if self.signal:
|
||||
fmtstrs += [
|
||||
" Process was killed by signal {BOLD}" + self.signal + "{RESET}",
|
||||
"",
|
||||
""
|
||||
]
|
||||
if self.line and self.check:
|
||||
fmtstrs += [
|
||||
@@ -302,8 +300,6 @@ class TestFailure(object):
|
||||
if a
|
||||
else ""
|
||||
)
|
||||
# Convince type checker that bstr will in fact be a string when read.
|
||||
bstr = ""
|
||||
if b:
|
||||
bstr = (
|
||||
"on line "
|
||||
@@ -353,7 +349,7 @@ class TestFailure(object):
|
||||
return "\n".join(fmtstrs).format(**fields)
|
||||
|
||||
def print_message(self):
|
||||
"""Print our message to stdout."""
|
||||
""" Print our message to stdout. """
|
||||
print(self.message())
|
||||
|
||||
|
||||
@@ -371,10 +367,7 @@ def perform_substitution(input_str, subs):
|
||||
text = m.group(1)
|
||||
for key, replacement in subs_ordered:
|
||||
if text.startswith(key):
|
||||
# shell-quote the replacement, so it's usable in #RUN lines.
|
||||
# We could loosen this and only do it for #RUN/#REQUIRES,
|
||||
# but so far we don't need it anywhere.
|
||||
return shlex.quote(replacement + text[len(key) :])
|
||||
return replacement + text[len(key) :]
|
||||
# No substitution found, so we default to running it as-is,
|
||||
# which will end up running it via $PATH.
|
||||
return text
|
||||
@@ -382,30 +375,28 @@ def perform_substitution(input_str, subs):
|
||||
return re.sub(r"%(%|[a-zA-Z0-9_-]+)", subber, input_str)
|
||||
|
||||
|
||||
def runproc(cmd, env=None):
|
||||
"""Wrapper around subprocess.Popen to save typing"""
|
||||
return asyncio.run(runproc_async(cmd, env=env))
|
||||
|
||||
|
||||
async def runproc_async(cmd, env=None, cwd=None):
|
||||
"""Wrapper around subprocess.Popen to save typing"""
|
||||
PIPE = asyncio.subprocess.PIPE
|
||||
DEVNULL = asyncio.subprocess.DEVNULL
|
||||
return await asyncio.create_subprocess_shell(
|
||||
cmd, stdin=DEVNULL, stdout=PIPE, stderr=PIPE, env=env, cwd=cwd
|
||||
def runproc(cmd):
|
||||
""" Wrapper around subprocess.Popen to save typing """
|
||||
PIPE = subprocess.PIPE
|
||||
proc = subprocess.Popen(
|
||||
cmd,
|
||||
stdin=PIPE,
|
||||
stdout=PIPE,
|
||||
stderr=PIPE,
|
||||
shell=True,
|
||||
close_fds=True, # For Python 2.6 as shipped on RHEL 6
|
||||
)
|
||||
return proc
|
||||
|
||||
|
||||
class TestRun(object):
|
||||
def __init__(self, name, runcmd, checker, subs, config, env=None, cwd=None):
|
||||
def __init__(self, name, runcmd, checker, subs, config):
|
||||
self.name = name
|
||||
self.runcmd = runcmd
|
||||
self.subbed_command = perform_substitution(runcmd.args, subs)
|
||||
self.checker = checker
|
||||
self.subs = subs
|
||||
self.config = config
|
||||
self.env = env
|
||||
self.cwd = cwd
|
||||
|
||||
def check(self, lines, checks):
|
||||
# Reverse our lines and checks so we can pop off the end.
|
||||
@@ -450,7 +441,7 @@ class TestRun(object):
|
||||
# SCREENFULS of text.
|
||||
# So we truncate the check list.
|
||||
if len(usedchecks) > len(usedlines):
|
||||
usedchecks = usedchecks[: len(usedlines) + 5]
|
||||
usedchecks = usedchecks[:len(usedlines) + 5]
|
||||
|
||||
# Do a SequenceMatch! This gives us a diff-like thing.
|
||||
diff = SequenceMatcher(a=usedlines, b=usedchecks, autojunk=False)
|
||||
@@ -478,43 +469,24 @@ class TestRun(object):
|
||||
return None
|
||||
|
||||
def run(self):
|
||||
"""Run the command. Return a TestFailure, or None."""
|
||||
return asyncio.run(self.run_async())
|
||||
|
||||
async def run_async(self):
|
||||
"""Run the command. Return a TestFailure, or None."""
|
||||
""" Run the command. Return a TestFailure, or None. """
|
||||
|
||||
def split_by_newlines(s):
|
||||
"""Decode a string and split it by newlines only,
|
||||
retaining the newlines.
|
||||
"""
|
||||
return [
|
||||
s + "\n"
|
||||
for s in s.decode("utf-8", errors="backslashreplace").split("\n")
|
||||
]
|
||||
return [s + "\n" for s in s.decode("utf-8").split("\n")]
|
||||
|
||||
if self.config.verbose:
|
||||
print(self.subbed_command)
|
||||
proc = await runproc_async(self.subbed_command, env=self.env, cwd=self.cwd)
|
||||
stdout, stderr = await proc.communicate()
|
||||
|
||||
# Work around type system limitations / bad API design which makes the typechecker unhappy.
|
||||
if proc.returncode is None:
|
||||
raise RuntimeError(
|
||||
"After `proc.communicate()` the return code must be an int."
|
||||
)
|
||||
status = proc.returncode
|
||||
proc = runproc(self.subbed_command)
|
||||
stdout, stderr = proc.communicate()
|
||||
# HACK: This is quite cheesy: POSIX specifies that sh should return 127 for a missing command.
|
||||
# It's also possible that it'll be returned in other situations,
|
||||
# most likely when the last command in a shell script doesn't exist.
|
||||
# So we check if the command *we execute* exists, and complain then.
|
||||
cmd = next(
|
||||
(
|
||||
word
|
||||
for word in shlex.split(self.subbed_command)
|
||||
if not VARIABLE_OVERRIDE_RE.match(word)
|
||||
)
|
||||
)
|
||||
status = proc.returncode
|
||||
cmd = shlex.split(self.subbed_command)[0]
|
||||
if status == 127 and not find_command(cmd):
|
||||
raise CheckerError("Command could not be found: " + cmd)
|
||||
if status == 126 and not find_command(cmd):
|
||||
@@ -545,7 +517,6 @@ class TestRun(object):
|
||||
# Process was killed by a signal and failed,
|
||||
# add a message.
|
||||
import signal
|
||||
|
||||
# Unfortunately strsignal only exists in python 3.8+,
|
||||
# and signal.signals is 3.5+.
|
||||
if hasattr(signal, "Signals"):
|
||||
@@ -599,7 +570,8 @@ class CheckCmd(object):
|
||||
raise NotImplementedError
|
||||
|
||||
@staticmethod
|
||||
def parse(line: Line, checktype: str) -> "CheckCmd":
|
||||
def parse(line, checktype):
|
||||
# type: (Line) -> CheckCmd
|
||||
# Everything inside {{}} is a regular expression.
|
||||
# Everything outside of it is a literal string.
|
||||
# Split around {{...}}. Then every odd index will be a regex, and
|
||||
@@ -617,14 +589,8 @@ class CheckCmd(object):
|
||||
pieces = bracket_re.split(line.text)
|
||||
even = True
|
||||
re_strings = []
|
||||
for i, piece in enumerate(pieces):
|
||||
for piece in pieces:
|
||||
if even:
|
||||
if i != 0 and piece.startswith("}"):
|
||||
raise CheckerError(
|
||||
"Expression ('{{%s}}}') is ambiguous, consider using '{{%s}}{{[}]}}' if you really mean that"
|
||||
% (pieces[i - 1], pieces[i - 1]),
|
||||
line,
|
||||
)
|
||||
# piece is a literal string.
|
||||
re_strings.append(re.escape(piece))
|
||||
else:
|
||||
@@ -653,7 +619,6 @@ class CheckCmd(object):
|
||||
class Checker(object):
|
||||
def __init__(self, name, lines):
|
||||
self.name = name
|
||||
|
||||
# Helper to yield subline containing group1 from all matching lines.
|
||||
def group1s(regex):
|
||||
for line in lines:
|
||||
@@ -685,17 +650,8 @@ class Checker(object):
|
||||
]
|
||||
|
||||
|
||||
def check_file(input_file, name, subs, config, failure_handler, env=None):
|
||||
"""Check a single file. Return a True on success, False on error."""
|
||||
return asyncio.run(
|
||||
check_file_async(input_file, name, subs, config, failure_handler, env=env)
|
||||
)
|
||||
|
||||
|
||||
async def check_file_async(
|
||||
input_file, name, subs, config, failure_handler, env=None, cwd=None
|
||||
):
|
||||
"""Check a single file. Return a True on success, False on error."""
|
||||
def check_file(input_file, name, subs, config, failure_handler):
|
||||
""" Check a single file. Return a True on success, False on error. """
|
||||
success = True
|
||||
lines = Line.readfile(input_file, name)
|
||||
checker = Checker(name, lines)
|
||||
@@ -703,15 +659,10 @@ async def check_file_async(
|
||||
# Run all the REQUIRES lines first,
|
||||
# if any of them fail it's a SKIP
|
||||
for reqcmd in checker.requirecmds:
|
||||
proc = await runproc_async(
|
||||
perform_substitution(reqcmd.args, subs), env=env, cwd=cwd
|
||||
proc = runproc(
|
||||
perform_substitution(reqcmd.args, subs)
|
||||
)
|
||||
await proc.communicate()
|
||||
# Work around type system limitations / bad API design which makes the typechecker unhappy.
|
||||
if proc.returncode is None:
|
||||
raise RuntimeError(
|
||||
"After `proc.communicate()` the return code must be an int."
|
||||
)
|
||||
proc.communicate()
|
||||
if proc.returncode > 0:
|
||||
return SKIP
|
||||
|
||||
@@ -720,24 +671,16 @@ async def check_file_async(
|
||||
|
||||
# Only then run the RUN lines.
|
||||
for runcmd in checker.runcmds:
|
||||
failure = await TestRun(
|
||||
name, runcmd, checker, subs, config, env=env, cwd=cwd
|
||||
).run_async()
|
||||
failure = TestRun(name, runcmd, checker, subs, config).run()
|
||||
if failure:
|
||||
failure_handler(failure)
|
||||
success = False
|
||||
return success
|
||||
|
||||
|
||||
def check_path(path, subs, config, failure_handler, env=None):
|
||||
return asyncio.run(check_path_async(path, subs, config, failure_handler, env=env))
|
||||
|
||||
|
||||
async def check_path_async(path, subs, config, failure_handler, env=None, cwd=None):
|
||||
def check_path(path, subs, config, failure_handler):
|
||||
with io.open(path, encoding="utf-8") as fd:
|
||||
return await check_file_async(
|
||||
fd, path, subs, config, failure_handler, env=env, cwd=cwd
|
||||
)
|
||||
return check_file(fd, path, subs, config, failure_handler)
|
||||
|
||||
|
||||
def parse_subs(subs):
|
||||
@@ -762,7 +705,7 @@ def parse_subs(subs):
|
||||
|
||||
|
||||
def get_argparse():
|
||||
"""Return a littlecheck argument parser."""
|
||||
""" Return a littlecheck argument parser. """
|
||||
parser = argparse.ArgumentParser(
|
||||
description="littlecheck: command line tool tester."
|
||||
)
|
||||
@@ -0,0 +1,3 @@
|
||||
# LSAN can detect leaks tracing back to __asan::AsanThread::ThreadStart (probably caused by our
|
||||
# threads not exiting before their TLS dtors are called). Just ignore it.
|
||||
leak:AsanThread
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/bin/sh
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Helper to notarize an .app.zip or .pkg file.
|
||||
|
||||
@@ -13,7 +13,7 @@ for INPUT in "$@"; do
|
||||
echo "Processing $INPUT"
|
||||
test -f "$INPUT" || die "Not a file: $INPUT"
|
||||
ext="${INPUT##*.}"
|
||||
{ test "$ext" = "zip" || test "$ext" = "pkg"; } || die "Unrecognized extension: $ext"
|
||||
(test "$ext" = "zip" || test "$ext" = "pkg") || die "Unrecognized extension: $ext"
|
||||
|
||||
xcrun notarytool submit "$INPUT" --keychain-profile AC_PASSWORD --wait
|
||||
|
||||
@@ -21,7 +21,9 @@ for INPUT in "$@"; do
|
||||
TMPDIR=$(mktemp -d)
|
||||
echo "Extracting to $TMPDIR"
|
||||
unzip -q "$INPUT" -d "$TMPDIR"
|
||||
STAPLE_TARGET=$(echo "$TMPDIR"/*)
|
||||
# Force glob expansion.
|
||||
STAPLE_TARGET="$TMPDIR"/*
|
||||
STAPLE_TARGET=$(echo $STAPLE_TARGET)
|
||||
else
|
||||
STAPLE_TARGET="$INPUT"
|
||||
fi
|
||||
@@ -33,7 +35,7 @@ for INPUT in "$@"; do
|
||||
INPUT_FULL=$(realpath "$INPUT")
|
||||
rm -f "$INPUT"
|
||||
cd "$(dirname "$STAPLE_TARGET")"
|
||||
zip -r -q "$INPUT_FULL" "$(basename "$STAPLE_TARGET")"
|
||||
zip -r -q "$INPUT_FULL" $(basename "$STAPLE_TARGET")
|
||||
fi
|
||||
echo "Processed $INPUT"
|
||||
|
||||
|
||||
@@ -1,164 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Script to produce an OS X installer .pkg and .app(.zip)
|
||||
|
||||
usage() {
|
||||
echo "Build macOS packages, optionally signing and notarizing them."
|
||||
echo "Usage: $0 options"
|
||||
echo "Options:"
|
||||
echo " -s Enables code signing"
|
||||
echo " -f <APP_KEY.p12> Path to .p12 file for application signing"
|
||||
echo " -i <INSTALLER_KEY.p12> Path to .p12 file for installer signing"
|
||||
echo " -p <PASSWORD> Password for the .p12 files (necessary to access the certificates)"
|
||||
echo " -e <entitlements file> (Optional) Path to an entitlements XML file"
|
||||
echo " -n Enables notarization. This will fail if code signing is not also enabled."
|
||||
echo " -j <API_KEY.JSON> Path to JSON file generated with \`rcodesign encode-app-store-connect-api-key\` (required for notarization)"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
set -x
|
||||
set -e
|
||||
|
||||
SIGN=
|
||||
NOTARIZE=
|
||||
|
||||
ARM64_DEPLOY_TARGET='MACOSX_DEPLOYMENT_TARGET=11.0'
|
||||
X86_64_DEPLOY_TARGET='MACOSX_DEPLOYMENT_TARGET=10.12'
|
||||
cmake_args=()
|
||||
|
||||
while getopts "c:sf:i:p:e:nj:" opt; do
|
||||
case $opt in
|
||||
c) cmake_args+=("$OPTARG");;
|
||||
s) SIGN=1;;
|
||||
f) P12_APP_FILE=$(realpath "$OPTARG");;
|
||||
i) P12_INSTALL_FILE=$(realpath "$OPTARG");;
|
||||
p) P12_PASSWORD="$OPTARG";;
|
||||
e) ENTITLEMENTS_FILE=$(realpath "$OPTARG");;
|
||||
n) NOTARIZE=1;;
|
||||
j) API_KEY_FILE=$(realpath "$OPTARG");;
|
||||
\?) usage;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ -n "$SIGN" ] && { [ -z "$P12_APP_FILE" ] || [ -z "$P12_INSTALL_FILE" ] || [ -z "$P12_PASSWORD" ]; }; then
|
||||
usage
|
||||
fi
|
||||
|
||||
if [ -n "$NOTARIZE" ] && [ -z "$API_KEY_FILE" ]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
VERSION=$(build_tools/git_version_gen.sh)
|
||||
|
||||
echo "Version is $VERSION"
|
||||
|
||||
PKGDIR=$(mktemp -d)
|
||||
echo "$PKGDIR"
|
||||
|
||||
SRC_DIR=$PWD
|
||||
OUTPUT_PATH=${FISH_ARTEFACT_PATH:-~/fish_built}
|
||||
|
||||
mkdir -p "$PKGDIR/build_x86_64" "$PKGDIR/build_arm64" "$PKGDIR/root" "$PKGDIR/intermediates" "$PKGDIR/dst"
|
||||
|
||||
do_cmake() {
|
||||
cmake \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DCMAKE_EXE_LINKER_FLAGS="-Wl,-ld_classic" \
|
||||
-DCMAKE_OSX_ARCHITECTURES='arm64;x86_64' \
|
||||
-DFISH_USE_SYSTEM_PCRE2=OFF \
|
||||
"${cmake_args[@]}" \
|
||||
"$@" \
|
||||
"$SRC_DIR"
|
||||
}
|
||||
|
||||
# Build and install for arm64.
|
||||
# Pass FISH_USE_SYSTEM_PCRE2=OFF because a system PCRE2 on macOS will not be signed by fish,
|
||||
# and will probably not be built universal, so the package will fail to validate/run on other systems.
|
||||
# Note CMAKE_OSX_ARCHITECTURES is still relevant for the Mac app.
|
||||
{ cd "$PKGDIR/build_arm64" \
|
||||
&& do_cmake -DRust_CARGO_TARGET=aarch64-apple-darwin \
|
||||
&& env $ARM64_DEPLOY_TARGET make VERBOSE=1 -j 12 \
|
||||
&& env DESTDIR="$PKGDIR/root/" $ARM64_DEPLOY_TARGET make install;
|
||||
}
|
||||
|
||||
# Build for x86-64 but do not install; instead we will make a fat binary inside the root.
|
||||
{ cd "$PKGDIR/build_x86_64" \
|
||||
&& do_cmake -DRust_CARGO_TARGET=x86_64-apple-darwin \
|
||||
&& env $X86_64_DEPLOY_TARGET make VERBOSE=1 -j 12; }
|
||||
|
||||
# Fatten it up.
|
||||
FILE=$PKGDIR/root/usr/local/bin/fish
|
||||
X86_FILE=$PKGDIR/build_x86_64/$(basename "$FILE")
|
||||
rcodesign macho-universal-create --output "$FILE" "$FILE" "$X86_FILE"
|
||||
chmod 755 "$FILE"
|
||||
|
||||
if test -n "$SIGN"; then
|
||||
echo "Signing executables"
|
||||
ARGS=(
|
||||
--p12-file "$P12_APP_FILE"
|
||||
--p12-password "$P12_PASSWORD"
|
||||
--code-signature-flags runtime
|
||||
--for-notarization
|
||||
)
|
||||
if [ -n "$ENTITLEMENTS_FILE" ]; then
|
||||
ARGS+=(--entitlements-xml-file "$ENTITLEMENTS_FILE")
|
||||
fi
|
||||
(set +x; rcodesign sign "${ARGS[@]}" "$PKGDIR"/root/usr/local/bin/fish)
|
||||
fi
|
||||
|
||||
pkgbuild --scripts "$SRC_DIR/build_tools/osx_package_scripts" --root "$PKGDIR/root/" --identifier 'com.ridiculousfish.fish-shell-pkg' --version "$VERSION" "$PKGDIR/intermediates/fish.pkg"
|
||||
productbuild --package-path "$PKGDIR/intermediates" --distribution "$SRC_DIR/build_tools/osx_distribution.xml" --resources "$SRC_DIR/build_tools/osx_package_resources/" "$OUTPUT_PATH/fish-$VERSION.pkg"
|
||||
|
||||
if test -n "$SIGN"; then
|
||||
echo "Signing installer"
|
||||
ARGS=(
|
||||
--p12-file "$P12_INSTALL_FILE"
|
||||
--p12-password "$P12_PASSWORD"
|
||||
--code-signature-flags runtime
|
||||
--for-notarization
|
||||
)
|
||||
(set +x; rcodesign sign "${ARGS[@]}" "$OUTPUT_PATH/fish-$VERSION.pkg")
|
||||
fi
|
||||
|
||||
# Make the app
|
||||
(cd "$PKGDIR/build_arm64" && env $ARM64_DEPLOY_TARGET make -j 12 fish_macapp)
|
||||
(cd "$PKGDIR/build_x86_64" && env $X86_64_DEPLOY_TARGET make -j 12 fish_macapp)
|
||||
|
||||
# Make the app's /usr/local/bin/fish binary universal. Note fish.app/Contents/MacOS/fish already is, courtesy of CMake.
|
||||
cd "$PKGDIR/build_arm64"
|
||||
FILE=fish.app/Contents/Resources/base/usr/local/bin/fish
|
||||
X86_FILE=$PKGDIR/build_x86_64/fish.app/Contents/Resources/base/usr/local/bin/$(basename "$FILE")
|
||||
rcodesign macho-universal-create --output "$FILE" "$FILE" "$X86_FILE"
|
||||
# macho-universal-create screws up the permissions.
|
||||
chmod 755 "$FILE"
|
||||
|
||||
if test -n "$SIGN"; then
|
||||
echo "Signing app"
|
||||
ARGS=(
|
||||
--p12-file "$P12_APP_FILE"
|
||||
--p12-password "$P12_PASSWORD"
|
||||
--code-signature-flags runtime
|
||||
--for-notarization
|
||||
)
|
||||
if [ -n "$ENTITLEMENTS_FILE" ]; then
|
||||
ARGS+=(--entitlements-xml-file "$ENTITLEMENTS_FILE")
|
||||
fi
|
||||
(set +x; rcodesign sign "${ARGS[@]}" "fish.app")
|
||||
|
||||
fi
|
||||
|
||||
cp -R "fish.app" "$OUTPUT_PATH/fish-$VERSION.app"
|
||||
cd "$OUTPUT_PATH"
|
||||
|
||||
# Maybe notarize.
|
||||
if test -n "$NOTARIZE"; then
|
||||
echo "Notarizing"
|
||||
rcodesign notarize --staple --wait --max-wait-seconds 1800 --api-key-file "$API_KEY_FILE" "$OUTPUT_PATH/fish-$VERSION.pkg"
|
||||
rcodesign notarize --staple --wait --max-wait-seconds 1800 --api-key-file "$API_KEY_FILE" "$OUTPUT_PATH/fish-$VERSION.app"
|
||||
fi
|
||||
|
||||
# Zip it up.
|
||||
zip -r "fish-$VERSION.app.zip" "fish-$VERSION.app" && rm -Rf "fish-$VERSION.app"
|
||||
|
||||
rm -rf "$PKGDIR"
|
||||
@@ -1 +0,0 @@
|
||||
make_macos_pkg.sh
|
||||
183
build_tools/make_pkg.sh
Executable file
183
build_tools/make_pkg.sh
Executable file
@@ -0,0 +1,183 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Script to produce an OS X installer .pkg and .app(.zip)
|
||||
|
||||
usage() {
|
||||
echo "Build macOS packages, optionally signing and notarizing them."
|
||||
echo "Usage: $0 options"
|
||||
echo "Options:"
|
||||
echo " -s Enables code signing"
|
||||
echo " -f <APP_KEY.p12> Path to .p12 file for application signing"
|
||||
echo " -i <INSTALLER_KEY.p12> Path to .p12 file for installer signing"
|
||||
echo " -p <PASSWORD> Password for the .p12 files (necessary to access the certificates)"
|
||||
echo " -e <entitlements file> (Optional) Path to an entitlements XML file"
|
||||
echo " -n Enables notarization. This will fail if code signing is not also enabled."
|
||||
echo " -j <API_KEY.JSON> Path to JSON file generated with `rcodesign encode-app-store-connect-api-key` (required for notarization)"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
set -x
|
||||
set -e
|
||||
|
||||
SIGN=
|
||||
NOTARIZE=
|
||||
|
||||
ARM64_DEPLOY_TARGET='MACOSX_DEPLOYMENT_TARGET=11.0'
|
||||
X86_64_DEPLOY_TARGET='MACOSX_DEPLOYMENT_TARGET=10.9'
|
||||
|
||||
# As of this writing, the most recent Rust release supports macOS back to 10.12.
|
||||
# The first supported version of macOS on arm64 is 10.15, so any Rust is fine for arm64.
|
||||
# We wish to support back to 10.9 on x86-64; the last version of Rust to support that is
|
||||
# version 1.73.0.
|
||||
RUST_VERSION_X86_64=1.73.0
|
||||
|
||||
while getopts "sf:i:p:e:nj:" opt; do
|
||||
case $opt in
|
||||
s) SIGN=1;;
|
||||
f) P12_APP_FILE=$(realpath "$OPTARG");;
|
||||
i) P12_INSTALL_FILE=$(realpath "$OPTARG");;
|
||||
p) P12_PASSWORD="$OPTARG";;
|
||||
e) ENTITLEMENTS_FILE=$(realpath "$OPTARG");;
|
||||
n) NOTARIZE=1;;
|
||||
j) API_KEY_FILE=$(realpath "$OPTARG");;
|
||||
\?) usage;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ -n "$SIGN" ] && ([ -z "$P12_APP_FILE" ] || [-z "$P12_INSTALL_FILE"] || [ -z "$P12_PASSWORD" ]); then
|
||||
usage
|
||||
fi
|
||||
|
||||
if [ -n "$NOTARIZE" ] && [ -z "$API_KEY_FILE" ]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
VERSION=$(git describe --always --dirty 2>/dev/null)
|
||||
if test -z "$VERSION" ; then
|
||||
echo "Could not get version from git"
|
||||
if test -f version; then
|
||||
VERSION=$(cat version)
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Version is $VERSION"
|
||||
|
||||
PKGDIR=$(mktemp -d)
|
||||
echo "$PKGDIR"
|
||||
|
||||
SRC_DIR=$PWD
|
||||
OUTPUT_PATH=${FISH_ARTEFACT_PATH:-~/fish_built}
|
||||
|
||||
mkdir -p "$PKGDIR/build_x86_64" "$PKGDIR/build_arm64" "$PKGDIR/root" "$PKGDIR/intermediates" "$PKGDIR/dst"
|
||||
|
||||
# Build and install for arm64.
|
||||
# Pass FISH_USE_SYSTEM_PCRE2=OFF because a system PCRE2 on macOS will not be signed by fish,
|
||||
# and will probably not be built universal, so the package will fail to validate/run on other systems.
|
||||
# Note CMAKE_OSX_ARCHITECTURES is still relevant for the Mac app.
|
||||
{ cd "$PKGDIR/build_arm64" \
|
||||
&& cmake \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DCMAKE_EXE_LINKER_FLAGS="-Wl,-ld_classic" \
|
||||
-DWITH_GETTEXT=OFF \
|
||||
-DRust_CARGO_TARGET=aarch64-apple-darwin \
|
||||
-DCMAKE_OSX_ARCHITECTURES='arm64;x86_64' \
|
||||
-DFISH_USE_SYSTEM_PCRE2=OFF \
|
||||
"$SRC_DIR" \
|
||||
&& env $ARM64_DEPLOY_TARGET make VERBOSE=1 -j 12 \
|
||||
&& env DESTDIR="$PKGDIR/root/" $ARM64_DEPLOY_TARGET make install;
|
||||
}
|
||||
|
||||
# Build for x86-64 but do not install; instead we will make some fat binaries inside the root.
|
||||
# Set RUST_VERSION_X86_64 to the last version of Rust that supports macOS 10.9.
|
||||
{ cd "$PKGDIR/build_x86_64" \
|
||||
&& cmake \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DCMAKE_EXE_LINKER_FLAGS="-Wl,-ld_classic" \
|
||||
-DWITH_GETTEXT=OFF \
|
||||
-DRust_TOOLCHAIN="$RUST_VERSION_X86_64" \
|
||||
-DRust_CARGO_TARGET=x86_64-apple-darwin \
|
||||
-DCMAKE_OSX_ARCHITECTURES='arm64;x86_64' \
|
||||
-DFISH_USE_SYSTEM_PCRE2=OFF "$SRC_DIR" \
|
||||
&& env $X86_64_DEPLOY_TARGET make VERBOSE=1 -j 12; }
|
||||
|
||||
# Fatten them up.
|
||||
for FILE in "$PKGDIR"/root/usr/local/bin/*; do
|
||||
X86_FILE="$PKGDIR/build_x86_64/$(basename $FILE)"
|
||||
rcodesign macho-universal-create --output "$FILE" "$FILE" "$X86_FILE"
|
||||
chmod 755 "$FILE"
|
||||
done
|
||||
|
||||
if test -n "$SIGN"; then
|
||||
echo "Signing executables"
|
||||
ARGS=(
|
||||
--p12-file "$P12_APP_FILE"
|
||||
--p12-password "$P12_PASSWORD"
|
||||
--code-signature-flags runtime
|
||||
--for-notarization
|
||||
)
|
||||
if [ -n "$ENTITLEMENTS_FILE" ]; then
|
||||
ARGS+=(--entitlements-xml-file "$ENTITLEMENTS_FILE")
|
||||
fi
|
||||
for FILE in "$PKGDIR"/root/usr/local/bin/*; do
|
||||
(set +x; rcodesign sign "${ARGS[@]}" "$FILE")
|
||||
done
|
||||
fi
|
||||
|
||||
pkgbuild --scripts "$SRC_DIR/build_tools/osx_package_scripts" --root "$PKGDIR/root/" --identifier 'com.ridiculousfish.fish-shell-pkg' --version "$VERSION" "$PKGDIR/intermediates/fish.pkg"
|
||||
productbuild --package-path "$PKGDIR/intermediates" --distribution "$SRC_DIR/build_tools/osx_distribution.xml" --resources "$SRC_DIR/build_tools/osx_package_resources/" "$OUTPUT_PATH/fish-$VERSION.pkg"
|
||||
|
||||
if test -n "$SIGN"; then
|
||||
echo "Signing installer"
|
||||
ARGS=(
|
||||
--p12-file "$P12_INSTALL_FILE"
|
||||
--p12-password "$P12_PASSWORD"
|
||||
--code-signature-flags runtime
|
||||
--for-notarization
|
||||
)
|
||||
(set +x; rcodesign sign "${ARGS[@]}" "$OUTPUT_PATH/fish-$VERSION.pkg")
|
||||
fi
|
||||
|
||||
# Make the app
|
||||
(cd "$PKGDIR/build_arm64" && env $ARM64_DEPLOY_TARGET make -j 12 fish_macapp)
|
||||
(cd "$PKGDIR/build_x86_64" && env $X86_64_DEPLOY_TARGET make -j 12 fish_macapp)
|
||||
|
||||
# Make the app's /usr/local/bin binaries universal. Note fish.app/Contents/MacOS/fish already is, courtsey of CMake.
|
||||
cd "$PKGDIR/build_arm64"
|
||||
for FILE in fish.app/Contents/Resources/base/usr/local/bin/*; do
|
||||
X86_FILE="$PKGDIR/build_x86_64/fish.app/Contents/Resources/base/usr/local/bin/$(basename $FILE)"
|
||||
rcodesign macho-universal-create --output "$FILE" "$FILE" "$X86_FILE"
|
||||
|
||||
# macho-universal-create screws up the permissions.
|
||||
chmod 755 "$FILE"
|
||||
done
|
||||
|
||||
if test -n "$SIGN"; then
|
||||
echo "Signing app"
|
||||
ARGS=(
|
||||
--p12-file "$P12_APP_FILE"
|
||||
--p12-password "$P12_PASSWORD"
|
||||
--code-signature-flags runtime
|
||||
--for-notarization
|
||||
)
|
||||
if [ -n "$ENTITLEMENTS_FILE" ]; then
|
||||
ARGS+=(--entitlements-xml-file "$ENTITLEMENTS_FILE")
|
||||
fi
|
||||
(set +x; rcodesign sign "${ARGS[@]}" "fish.app")
|
||||
|
||||
fi
|
||||
|
||||
cp -R "fish.app" "$OUTPUT_PATH/fish-$VERSION.app"
|
||||
cd "$OUTPUT_PATH"
|
||||
|
||||
# Maybe notarize.
|
||||
if test -n "$NOTARIZE"; then
|
||||
echo "Notarizing"
|
||||
rcodesign notarize --staple --wait --max-wait-seconds 1800 --api-key-file "$API_KEY_FILE" "$OUTPUT_PATH/fish-$VERSION.pkg"
|
||||
rcodesign notarize --staple --wait --max-wait-seconds 1800 --api-key-file "$API_KEY_FILE" "$OUTPUT_PATH/fish-$VERSION.app"
|
||||
fi
|
||||
|
||||
# Zip it up.
|
||||
zip -r "fish-$VERSION.app.zip" "fish-$VERSION.app" && rm -Rf "fish-$VERSION.app"
|
||||
|
||||
rm -rf "$PKGDIR"
|
||||
@@ -1,43 +1,79 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Script to generate a tarball
|
||||
# We use git to output a tree. But we also want to build the user documentation
|
||||
# and put that in the tarball, so that nobody needs to have sphinx installed
|
||||
# to build it.
|
||||
# Outputs to $FISH_ARTEFACT_PATH or ~/fish_built by default
|
||||
|
||||
# Exit on error
|
||||
set -e
|
||||
|
||||
# Get the version
|
||||
VERSION=$(build_tools/git_version_gen.sh)
|
||||
# We wil generate a tarball with a prefix "fish-VERSION"
|
||||
# git can do that automatically for us via git-archive
|
||||
# but to get the documentation in, we need to make a symlink called "fish-VERSION"
|
||||
# and tar from that, so that the documentation gets the right prefix
|
||||
|
||||
prefix=fish-$VERSION
|
||||
path=${FISH_ARTEFACT_PATH:-~/fish_built}/$prefix.tar.xz
|
||||
# Use Ninja if available, as it automatically paralellises
|
||||
BUILD_TOOL="make"
|
||||
BUILD_GENERATOR="Unix Makefiles"
|
||||
if command -v ninja >/dev/null; then
|
||||
BUILD_TOOL="ninja"
|
||||
BUILD_GENERATOR="Ninja"
|
||||
fi
|
||||
|
||||
tmpdir=$(mktemp -d)
|
||||
manifest=$tmpdir/Cargo.toml
|
||||
lockfile=$tmpdir/Cargo.lock
|
||||
# We need GNU tar as that supports the --mtime and --transform options
|
||||
TAR=notfound
|
||||
for try in tar gtar gnutar; do
|
||||
if $try -Pcf /dev/null --mtime now /dev/null >/dev/null 2>&1; then
|
||||
TAR=$try
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
sed "s/^version = \".*\"\$/version = \"$VERSION\"/g" Cargo.toml \
|
||||
>"$manifest"
|
||||
awk -v version=$VERSION '
|
||||
/^name = "fish"$/ { ok=1 }
|
||||
ok == 1 && /^version = ".*"$/ {
|
||||
ok = 2;
|
||||
$0 = "version = \"" version "\"";
|
||||
}
|
||||
{print}
|
||||
' \
|
||||
Cargo.lock >"$lockfile"
|
||||
if [ "$TAR" = "notfound" ]; then
|
||||
echo 'No suitable tar (supporting --mtime) found as tar/gtar/gnutar in PATH'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
git archive \
|
||||
--prefix="$prefix/" \
|
||||
--add-virtual-file="$prefix/Cargo.toml:$(cat "$manifest")" \
|
||||
--add-virtual-file="$prefix/Cargo.lock:$(cat "$lockfile")" \
|
||||
HEAD |
|
||||
xz >"$path"
|
||||
# Get the current directory, which we'll use for symlinks
|
||||
wd="$PWD"
|
||||
|
||||
rm "$manifest"
|
||||
rm "$lockfile"
|
||||
rmdir "$tmpdir"
|
||||
# Get the version from git-describe
|
||||
VERSION=$(git describe --dirty 2>/dev/null)
|
||||
|
||||
# The name of the prefix, which is the directory that you get when you untar
|
||||
prefix="fish-$VERSION"
|
||||
|
||||
# The path where we will output the tar file
|
||||
# Defaults to ~/fish_built
|
||||
path=${FISH_ARTEFACT_PATH:-~/fish_built}/$prefix.tar
|
||||
|
||||
# Clean up stuff we've written before
|
||||
rm -f "$path" "$path".xz
|
||||
|
||||
# git starts the archive
|
||||
git archive --format=tar --prefix="$prefix"/ HEAD > "$path"
|
||||
|
||||
# tarball out the documentation, generate a version file
|
||||
PREFIX_TMPDIR=$(mktemp -d)
|
||||
cd "$PREFIX_TMPDIR"
|
||||
echo "$VERSION" > version
|
||||
cmake -G "$BUILD_GENERATOR" "$wd"
|
||||
$BUILD_TOOL doc
|
||||
|
||||
TAR_APPEND="$TAR --append --file=$path --mtime=now --owner=0 --group=0 \
|
||||
--mode=g+w,a+rX --transform s/^/$prefix\//"
|
||||
$TAR_APPEND --no-recursion user_doc
|
||||
$TAR_APPEND user_doc/html user_doc/man
|
||||
$TAR_APPEND version
|
||||
|
||||
cd -
|
||||
rm -r "$PREFIX_TMPDIR"
|
||||
|
||||
# xz it
|
||||
xz "$path"
|
||||
|
||||
# Output what we did, and the sha256 hash
|
||||
echo "Tarball written to $path"
|
||||
openssl dgst -sha256 "$path"
|
||||
echo "Tarball written to $path".xz
|
||||
openssl dgst -sha256 "$path".xz
|
||||
|
||||
@@ -11,22 +11,22 @@ set -e
|
||||
# We need GNU tar as that supports the --mtime and --transform options
|
||||
TAR=notfound
|
||||
for try in tar gtar gnutar; do
|
||||
if $try -Pcf /dev/null --mtime now /dev/null >/dev/null 2>&1; then
|
||||
TAR=$try
|
||||
break
|
||||
fi
|
||||
if $try -Pcf /dev/null --mtime now /dev/null >/dev/null 2>&1; then
|
||||
TAR=$try
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$TAR" = "notfound" ]; then
|
||||
echo 'No suitable tar (supporting --mtime) found as tar/gtar/gnutar in PATH'
|
||||
exit 1
|
||||
echo 'No suitable tar (supporting --mtime) found as tar/gtar/gnutar in PATH'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get the current directory, which we'll use for telling Cargo where to find the sources
|
||||
wd="$PWD"
|
||||
|
||||
# Get the version from git-describe
|
||||
VERSION=$(build_tools/git_version_gen.sh)
|
||||
VERSION=$(git describe --dirty 2>/dev/null)
|
||||
|
||||
# The name of the prefix, which is the directory that you get when you untar
|
||||
prefix="fish-$VERSION"
|
||||
@@ -42,16 +42,10 @@ rm -f "$path" "$path".xz
|
||||
PREFIX_TMPDIR=$(mktemp -d)
|
||||
cd "$PREFIX_TMPDIR"
|
||||
|
||||
# Add .cargo/config.toml. This means that the caller may need to remove that file from the tarball.
|
||||
# See e4674cd7b5f (.cargo/config.toml: exclude from tarball, 2025-01-12)
|
||||
|
||||
mkdir .cargo
|
||||
{
|
||||
cat "$wd"/.cargo/config.toml
|
||||
cargo vendor --manifest-path "$wd/Cargo.toml"
|
||||
} > .cargo/config.toml
|
||||
cargo vendor --manifest-path "$wd/Cargo.toml" > .cargo/config.toml
|
||||
|
||||
tar cfvJ "$path".xz vendor .cargo
|
||||
tar cfvJ $path.xz vendor .cargo
|
||||
|
||||
cd -
|
||||
rm -r "$PREFIX_TMPDIR"
|
||||
|
||||
@@ -1,28 +1,29 @@
|
||||
<html>
|
||||
<head>
|
||||
<style>
|
||||
body {
|
||||
font-family: system-ui, -apple-system, "Helvetica Neue", sans-serif;
|
||||
font-size: 10pt;
|
||||
}
|
||||
code, tt {
|
||||
font-family: ui-monospace, Menlo, monospace;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<p>
|
||||
<strong>fish</strong> is a smart and user-friendly command line shell. For more information, visit <a href="https://fishshell.com">fishshell.com</a>.
|
||||
</p>
|
||||
<p>
|
||||
<strong>fish</strong> will be installed into <tt>/usr/local/</tt>, and its path will be added to <wbr><tt>/etc/shells</tt> if necessary.
|
||||
</p>
|
||||
<p>
|
||||
Your default shell will <em>not</em> be changed. To make <strong>fish</strong> your login shell after the installation, run:
|
||||
</p>
|
||||
<p>
|
||||
<code>chsh -s /usr/local/bin/fish</code>
|
||||
</p>
|
||||
<p>Enjoy! Bugs can be reported on <a href="https://github.com/fish-shell/fish-shell/">GitHub</a>.</p>
|
||||
</body>
|
||||
<head>
|
||||
<style>
|
||||
body {
|
||||
font-family: system-ui, -apple-system, "Helvetica Neue", sans-serif;
|
||||
font-size: 10pt;
|
||||
}
|
||||
code, tt {
|
||||
font-family: ui-monospace, Menlo, monospace;
|
||||
}
|
||||
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<p>
|
||||
<strong>fish</strong> is a smart and user-friendly command line shell. For more information, visit <a href="https://fishshell.com">fishshell.com</a>.
|
||||
</p>
|
||||
<p>
|
||||
<strong>fish</strong> will be installed into <tt>/usr/local/</tt>, and its path will be added to <wbr><tt>/etc/shells</tt> if necessary.
|
||||
</p>
|
||||
<p>
|
||||
Your default shell will <em>not</em> be changed. To make <strong>fish</strong> your login shell after the installation, run:
|
||||
</p>
|
||||
<p>
|
||||
<code>chsh -s /usr/local/bin/fish</code>
|
||||
</p>
|
||||
<p>Enjoy! Bugs can be reported on <a href="https://github.org/fish-shell/fish-shell/">GitHub</a>.</p>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -4,12 +4,12 @@
|
||||
|
||||
if test $# -eq 0
|
||||
then
|
||||
echo "usage: $0 shellname [shellname ...]"
|
||||
exit 1
|
||||
echo usage: $0 shellname [shellname ...]
|
||||
exit 1
|
||||
fi
|
||||
|
||||
scriptname=$(basename "$0")
|
||||
if [ "$(id -u)" -ne 0 ]; then
|
||||
scriptname=`basename "$0"`
|
||||
if [[ $UID -ne 0 ]]; then
|
||||
echo "${scriptname} must be run as root"
|
||||
exit 1
|
||||
fi
|
||||
@@ -20,7 +20,6 @@ tmpfile=${file}.tmp
|
||||
|
||||
set -o noclobber
|
||||
|
||||
# shellcheck disable=SC2064
|
||||
trap "rm -f $tmpfile" EXIT
|
||||
|
||||
if ! cat $file > $tmpfile
|
||||
@@ -33,13 +32,15 @@ EOF
|
||||
fi
|
||||
|
||||
# Append a newline if it doesn't exist
|
||||
[ -z "$(tail -c1 "$tmpfile")" ] || echo "" >> "$tmpfile"
|
||||
if [ "$(tail -c1 "$tmpfile"; echo x)" != $'\nx' ]; then
|
||||
echo "" >> "$tmpfile"
|
||||
fi
|
||||
|
||||
for i
|
||||
do
|
||||
if ! grep -q "^${i}$" "$tmpfile"
|
||||
then
|
||||
echo "$i" >> "$tmpfile"
|
||||
echo $i >> "$tmpfile"
|
||||
fi
|
||||
done
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
#!/bin/sh -x
|
||||
|
||||
./add-shell "${DSTVOLUME}"usr/local/bin/fish
|
||||
./add-shell ${DSTVOLUME}usr/local/bin/fish
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/bin/sh -x
|
||||
|
||||
echo "Removing any previous installation"
|
||||
pkgutil --pkg-info "${INSTALL_PKG_SESSION_ID}" && pkgutil --only-files --files "${INSTALL_PKG_SESSION_ID}" | while read -r installed
|
||||
do rm -v "${DSTVOLUME}${installed}"
|
||||
pkgutil --pkg-info ${INSTALL_PKG_SESSION_ID} && pkgutil --only-files --files ${INSTALL_PKG_SESSION_ID} | while read installed
|
||||
do rm -v ${DSTVOLUME}${installed}
|
||||
done
|
||||
echo "... removed"
|
||||
|
||||
@@ -30,27 +30,20 @@ TIMEOUT_SECS = 5
|
||||
UNEXPECTED_SUCCESS = object()
|
||||
|
||||
# When rendering fish's output, remove the control sequences that modify terminal state,
|
||||
# to avoid confusing the calling terminal.
|
||||
# to avoid confusing the calling terminal. No need to replace things like colors and cursor
|
||||
# movement that are harmless and/or will not leak anyway.
|
||||
SANITIZE_FOR_PRINTING_RE = re.compile(
|
||||
r"""
|
||||
# Filter CSI commands except for colors and cursor movement.
|
||||
(?!\x1b\[\d*m)
|
||||
(?!\x1b\[K)
|
||||
(?!\x1b\[\d*[ABCD])
|
||||
\x1b\[[\x30-\x3f]*[\x20-\x2f]*[\x40-\x7e]
|
||||
# OSC
|
||||
| \x1b\].*?\x07
|
||||
# DCS
|
||||
| \x1bP.*?\x1b\\
|
||||
# application keypad mode
|
||||
\x1b\[\?1004[hl]
|
||||
| \x1b\[\?2004[hl]
|
||||
| \x1b\[>4;[10]m
|
||||
| \x1b\[>5u
|
||||
| \x1b\[<1u
|
||||
| \x1b=
|
||||
| \x1b>
|
||||
| \x1b\].*?\x07
|
||||
""",
|
||||
re.VERBOSE,
|
||||
)
|
||||
|
||||
assert SANITIZE_FOR_PRINTING_RE.sub("", "\x1b[>4;1m") == ""
|
||||
assert SANITIZE_FOR_PRINTING_RE.sub("", "\x1b[31m") == "\x1b[31m"
|
||||
re.VERBOSE)
|
||||
|
||||
|
||||
def get_prompt_re(counter):
|
||||
@@ -150,12 +143,7 @@ class SpawnedProc(object):
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name="fish",
|
||||
timeout=TIMEOUT_SECS,
|
||||
env=os.environ.copy(),
|
||||
scroll_content_up_supported: bool = False,
|
||||
**kwargs,
|
||||
self, name="fish", timeout=TIMEOUT_SECS, env=os.environ.copy(), **kwargs
|
||||
):
|
||||
"""Construct from a name, timeout, and environment.
|
||||
|
||||
@@ -167,40 +155,17 @@ class SpawnedProc(object):
|
||||
before giving up on some expected output.
|
||||
env: a string->string dictionary, describing the environment variables.
|
||||
"""
|
||||
import shlex
|
||||
|
||||
if name not in env:
|
||||
raise ValueError("'%s' variable not found in environment" % name)
|
||||
exe_path = env.get(name)
|
||||
# HACK: If there are no args, pexpect will fail if exe_path contains any shell metachars.
|
||||
# But not if there are args, in which case it probably switches spawning method?
|
||||
if "args" not in kwargs:
|
||||
exe_path = shlex.quote(exe_path)
|
||||
self.colorize = sys.stdout.isatty() or env.get("FISH_FORCE_COLOR", "0") == "1"
|
||||
self.messages = []
|
||||
self.start_time = None
|
||||
if "FISH_TEST_NO_RECURRENT_QUERIES" not in env:
|
||||
env["FISH_TEST_NO_RECURRENT_QUERIES"] = "1"
|
||||
self.spawn = pexpect.spawn(
|
||||
exe_path, env=env, encoding="utf-8", timeout=timeout, **kwargs
|
||||
)
|
||||
self.spawn.delaybeforesend = None
|
||||
self.prompt_counter = 0
|
||||
if scroll_content_up_supported:
|
||||
# XTGETTCAP
|
||||
key = bytes.hex(b"indn")
|
||||
value = bytes.hex(b"dont-care")
|
||||
self.spawn.send(f"\x1bP1+r{key}={value}\x1b\\")
|
||||
if env.get("TERM") != "dumb":
|
||||
self.send_primary_device_attribute()
|
||||
|
||||
def send_cursor_position_report(self, *, y: int, x: int):
|
||||
assert x != 0
|
||||
assert y != 0
|
||||
self.spawn.send(f"\x1b[{y};{x}R")
|
||||
|
||||
def send_primary_device_attribute(self):
|
||||
self.spawn.send("\x1b[?123c") # Primary Device Attribute
|
||||
|
||||
def time_since_first_message(self):
|
||||
"""Return a delta in seconds since the first message, or 0 if this is the first."""
|
||||
@@ -294,11 +259,7 @@ class SpawnedProc(object):
|
||||
failtype = pexpect_error_type(err)
|
||||
# If we get an EOF, we check if the process exited with a signal.
|
||||
# This shows us e.g. if it crashed
|
||||
if (
|
||||
failtype == "EOF"
|
||||
and self.spawn.signalstatus is not None
|
||||
and self.spawn.signalstatus != 0
|
||||
):
|
||||
if failtype == 'EOF' and self.spawn.signalstatus is not None and self.spawn.signalstatus != 0:
|
||||
failtype = "SIGNAL " + Signals(self.spawn.signalstatus).name
|
||||
|
||||
fmtkeys = {"failtype": failtype, "pat": escape(pat)}
|
||||
@@ -328,14 +289,12 @@ class SpawnedProc(object):
|
||||
print("")
|
||||
print("{CYAN}When written to the tty, this looks like:{RESET}".format(**colors))
|
||||
print("{CYAN}<-------{RESET}".format(**colors))
|
||||
sys.stdout.write(SANITIZE_FOR_PRINTING_RE.sub("", self.spawn.before))
|
||||
sys.stdout.write(SANITIZE_FOR_PRINTING_RE.sub('', self.spawn.before))
|
||||
sys.stdout.flush()
|
||||
maybe_nl = ""
|
||||
maybe_nl=""
|
||||
if not self.spawn.before.endswith("\n"):
|
||||
maybe_nl = "\n{CYAN}(no trailing newline)".format(**colors)
|
||||
print(
|
||||
"{RESET}{maybe_nl}{CYAN}------->{RESET}".format(maybe_nl=maybe_nl, **colors)
|
||||
)
|
||||
maybe_nl="\n{CYAN}(no trailing newline)".format(**colors)
|
||||
print("{RESET}{maybe_nl}{CYAN}------->{RESET}".format(maybe_nl=maybe_nl, **colors))
|
||||
|
||||
print("")
|
||||
|
||||
@@ -361,7 +320,7 @@ class SpawnedProc(object):
|
||||
filename=m.filename,
|
||||
lineno=m.lineno,
|
||||
etext=etext,
|
||||
**colors,
|
||||
**colors
|
||||
)
|
||||
)
|
||||
print("")
|
||||
@@ -402,24 +361,22 @@ class SpawnedProc(object):
|
||||
|
||||
|
||||
def control(char: str) -> str:
|
||||
"""Returns the char sent when control is pressed along the given key."""
|
||||
""" Returns the char sent when control is pressed along the given key. """
|
||||
assert len(char) == 1
|
||||
char = char.lower()
|
||||
if ord("a") <= ord(char) <= ord("z"):
|
||||
return chr(ord(char) - ord("a") + 1)
|
||||
return chr(
|
||||
{
|
||||
"@": 0,
|
||||
"`": 0,
|
||||
"[": 27,
|
||||
"{": 27,
|
||||
"\\": 28,
|
||||
"|": 28,
|
||||
"]": 29,
|
||||
"}": 29,
|
||||
"^": 30,
|
||||
"~": 30,
|
||||
"_": 31,
|
||||
"?": 127,
|
||||
}[char]
|
||||
)
|
||||
return chr({
|
||||
"@": 0,
|
||||
"`": 0,
|
||||
"[": 27,
|
||||
"{": 27,
|
||||
"\\": 28,
|
||||
"|": 28,
|
||||
"]": 29,
|
||||
"}": 29,
|
||||
"^": 30,
|
||||
"~": 30,
|
||||
"_": 31,
|
||||
"?": 127,
|
||||
}[char])
|
||||
@@ -1,110 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
workspace_root=$(dirname "$0")/..
|
||||
|
||||
relnotes_tmp=$(mktemp -d)
|
||||
mkdir -p "$relnotes_tmp/fake-workspace" "$relnotes_tmp/out"
|
||||
(
|
||||
cd "$workspace_root"
|
||||
cp -r doc_src CONTRIBUTING.rst README.rst "$relnotes_tmp/fake-workspace"
|
||||
)
|
||||
version=$(sed 's,^fish \(\S*\) .*,\1,; 1q' "$workspace_root/CHANGELOG.rst")
|
||||
add_stats=false
|
||||
# Skip on shallow clone (CI) for now.
|
||||
if test -z "$CI" || [ "$(git -C "$workspace_root" tag | wc -l)" -gt 1 ]; then {
|
||||
previous_version=$(
|
||||
cd "$workspace_root"
|
||||
git for-each-ref --format='%(objecttype) %(refname:strip=2)' refs/tags |
|
||||
awk '/tag/ {print $2}' | sort --version-sort |
|
||||
grep -vxF "$(git describe)" | tail -1
|
||||
)
|
||||
minor_version=${version%.*}
|
||||
previous_minor_version=${previous_version%.*}
|
||||
if [ "$minor_version" != "$previous_minor_version" ]; then
|
||||
add_stats=true
|
||||
fi
|
||||
} fi
|
||||
{
|
||||
sed -n 1,2p <"$workspace_root/CHANGELOG.rst"
|
||||
if $add_stats; then {
|
||||
ExtractCommitters() {
|
||||
git log "$1" --format="%aN"
|
||||
trailers='Co-authored-by|Signed-off-by'
|
||||
git log "$1" --format="%b" | sed -En "/^($trailers):\s*/{s///;s/\s*<.*//;p}"
|
||||
}
|
||||
ListCommitters() {
|
||||
comm "$@" "$relnotes_tmp/committers-then" "$relnotes_tmp/committers-now"
|
||||
}
|
||||
(
|
||||
cd "$workspace_root"
|
||||
ExtractCommitters "$previous_version" | sort -u >"$relnotes_tmp/committers-then"
|
||||
ExtractCommitters "$previous_version".. | sort -u >"$relnotes_tmp/committers-now"
|
||||
ListCommitters -13 >"$relnotes_tmp/committers-new"
|
||||
ListCommitters -12 >"$relnotes_tmp/committers-returning"
|
||||
num_commits=$(git log --no-merges --format=%H "$previous_version".. | wc -l)
|
||||
num_authors=$(wc -l <"$relnotes_tmp/committers-now")
|
||||
num_new_authors=$(wc -l <"$relnotes_tmp/committers-new")
|
||||
printf %s \
|
||||
"This release comprises $num_commits commits since $previous_version," \
|
||||
" contributed by $num_authors authors, $num_new_authors of which are new committers."
|
||||
echo
|
||||
echo
|
||||
)
|
||||
} fi
|
||||
|
||||
printf '%s\n' "$(awk <"$workspace_root/CHANGELOG.rst" '
|
||||
NR <= 2 || /^\.\. ignore / { next }
|
||||
/^===/ { exit }
|
||||
{ print }
|
||||
' | sed '$d')" |
|
||||
sed -e '$s/^----*$//' # Remove spurious transitions at the end of the document.
|
||||
|
||||
if $add_stats; then {
|
||||
JoinEscaped() {
|
||||
LC_CTYPE=C.UTF-8 sed 's/\S/\\&/g' |
|
||||
awk '
|
||||
NR != 1 { printf ",\n" }
|
||||
{ printf "%s", $0 }
|
||||
END { printf "\n" }
|
||||
'
|
||||
}
|
||||
echo ""
|
||||
echo "---"
|
||||
echo ""
|
||||
echo "Thanks to everyone who contributed through issue discussions, code reviews, or code changes."
|
||||
echo
|
||||
printf "Welcome our new committers: "
|
||||
JoinEscaped <"$relnotes_tmp/committers-new"
|
||||
echo
|
||||
printf "Welcome back our returning committers: "
|
||||
JoinEscaped <"$relnotes_tmp/committers-returning"
|
||||
} fi
|
||||
echo
|
||||
echo "---"
|
||||
echo
|
||||
echo 'Download links:'
|
||||
echo 'To download the source code for fish, we suggest the file named ``fish-'"$version"'.tar.xz``.'
|
||||
echo 'The file downloaded from ``Source code (tar.gz)`` will not build correctly.'
|
||||
echo 'A GPG signature using `this key <'"${FISH_GPG_PUBLIC_KEY_URL:-???}"'>`__ is available as ``fish-'"$version"'.tar.xz.asc``.'
|
||||
echo
|
||||
echo 'The files called ``fish-'"$version"'-linux-*.tar.xz`` contain'
|
||||
echo '`standalone fish binaries <https://github.com/fish-shell/fish-shell/?tab=readme-ov-file#building-fish-with-cargo>`__'
|
||||
echo 'for any Linux with the given CPU architecture.'
|
||||
} >"$relnotes_tmp/fake-workspace"/CHANGELOG.rst
|
||||
|
||||
sphinx-build >&2 -j auto \
|
||||
-W -E -b markdown -c "$workspace_root/doc_src" \
|
||||
-d "$relnotes_tmp/doctree" "$relnotes_tmp/fake-workspace/doc_src" "$relnotes_tmp/out" \
|
||||
-D markdown_http_base="https://fishshell.com/docs/$minor_version" \
|
||||
-D markdown_uri_doc_suffix=".html" \
|
||||
-D markdown_flavor=github \
|
||||
"$@"
|
||||
|
||||
# Skip changelog header
|
||||
sed -n 1p "$relnotes_tmp/out/relnotes.md" | grep -Fxq "# Release notes"
|
||||
sed -n 2p "$relnotes_tmp/out/relnotes.md" | grep -Fxq ''
|
||||
sed 1,2d "$relnotes_tmp/out/relnotes.md"
|
||||
|
||||
rm -r "$relnotes_tmp"
|
||||
@@ -1,314 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
{
|
||||
|
||||
set -ex
|
||||
|
||||
version=$1
|
||||
repository_owner=fish-shell
|
||||
remote=origin
|
||||
if [ -n "$2" ]; then
|
||||
set -u
|
||||
repository_owner=$2
|
||||
remote=$3
|
||||
set +u
|
||||
[ $# -eq 3 ]
|
||||
fi
|
||||
|
||||
[ -n "$version" ]
|
||||
|
||||
for tool in \
|
||||
cmake \
|
||||
bundle \
|
||||
diff \
|
||||
gh \
|
||||
gpg \
|
||||
jq \
|
||||
ninja \
|
||||
ruby \
|
||||
tar \
|
||||
timeout \
|
||||
uv \
|
||||
xz \
|
||||
; do
|
||||
if ! command -v "$tool" >/dev/null; then
|
||||
echo >&2 "$0: missing command: $1"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
committer=$(git var GIT_AUTHOR_IDENT)
|
||||
committer=${committer% *} # strip timezone
|
||||
committer=${committer% *} # strip timestamp
|
||||
gpg --local-user="$committer" --sign </dev/null >/dev/null
|
||||
|
||||
repo_root="$(dirname "$0")/.."
|
||||
fish_site=$repo_root/../fish-site
|
||||
fish_site_repo=git@github.com:$repository_owner/fish-site
|
||||
|
||||
for path in . "$fish_site"
|
||||
do
|
||||
if ! git -C "$path" diff HEAD --quiet ||
|
||||
git ls-files --others --exclude-standard | grep .; then
|
||||
echo >&2 "$0: index and worktree must be clean"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
(
|
||||
cd "$fish_site"
|
||||
[ "$(git rev-parse HEAD)" = \
|
||||
"$(git ls-remote "$fish_site_repo" refs/heads/master |
|
||||
awk '{print $1}')" ]
|
||||
)
|
||||
|
||||
if git tag | grep -qxF "$version"; then
|
||||
echo >&2 "$0: tag $version already exists"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
integration_branch=$(
|
||||
git for-each-ref --points-at=HEAD 'refs/heads/Integration_*' \
|
||||
--format='%(refname:strip=2)'
|
||||
)
|
||||
[ -n "$integration_branch" ] ||
|
||||
git merge-base --is-ancestor $remote/master HEAD
|
||||
|
||||
sed -n 1p CHANGELOG.rst | grep -q '^fish .*(released .*)$'
|
||||
sed -n 2p CHANGELOG.rst | grep -q '^===*$'
|
||||
|
||||
changelog_title="fish $version (released $(date +'%B %d, %Y'))"
|
||||
sed -i \
|
||||
-e "1c$changelog_title" \
|
||||
-e "2c$(printf %s "$changelog_title" | sed s/./=/g)" \
|
||||
CHANGELOG.rst
|
||||
|
||||
CreateCommit() {
|
||||
git commit -m "$1
|
||||
|
||||
Created by ./build_tools/release.sh $version"
|
||||
}
|
||||
|
||||
sed -i "s/^version = \".*\"/version = \"$1\"/g" Cargo.toml
|
||||
cargo fetch --offline # bumps the version in Cargo.lock
|
||||
if [ "$1" = "$version" ]; then
|
||||
# debchange is a Debian script to manage the Debian changelog, but
|
||||
# it's too annoying to install everywhere. Just do it by hand.
|
||||
cat - contrib/debian/changelog > contrib/debian/changelog.new <<EOF
|
||||
fish (${version}-1) stable; urgency=medium
|
||||
|
||||
* Release of new version $version.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/$version for details.
|
||||
|
||||
-- $committer $(date -R)
|
||||
|
||||
EOF
|
||||
mv contrib/debian/changelog.new contrib/debian/changelog
|
||||
git add contrib/debian/changelog
|
||||
fi
|
||||
git add CHANGELOG.rst Cargo.toml Cargo.lock
|
||||
CreateCommit "Release $version"
|
||||
|
||||
# Tags must be full objects, not lightweight tags, for
|
||||
# git_version-gen.sh to work.
|
||||
git -c "user.signingKey=$committer" \
|
||||
tag --sign --message="Release $version" $version
|
||||
|
||||
git push $remote $version
|
||||
|
||||
TIMEOUT=
|
||||
gh() {
|
||||
command ${TIMEOUT:+timeout $TIMEOUT} \
|
||||
gh --repo "$repository_owner/fish-shell" "$@"
|
||||
}
|
||||
|
||||
gh workflow run release.yml --ref="$version" \
|
||||
--raw-field="version=$version"
|
||||
|
||||
run_id=
|
||||
while [ -z "$run_id" ] && sleep 5
|
||||
do
|
||||
run_id=$(gh run list \
|
||||
--json=databaseId --jq=.[].databaseId \
|
||||
--workflow=release.yml --limit=1 \
|
||||
--commit="$(git rev-parse "$version^{commit}")")
|
||||
done
|
||||
|
||||
# Update fishshell.com
|
||||
tag_oid=$(git rev-parse "$version")
|
||||
tmpdir=$(mktemp -d)
|
||||
fish_tar_xz=fish-$version.tar.xz
|
||||
(
|
||||
local_tarball=$tmpdir/local-tarball
|
||||
mkdir "$local_tarball"
|
||||
FISH_ARTEFACT_PATH=$local_tarball ./build_tools/make_tarball.sh
|
||||
cd "$local_tarball"
|
||||
tar xf "$fish_tar_xz"
|
||||
)
|
||||
# TODO This works on draft releases only if "gh" is configured to
|
||||
# have write access to the fish-shell repository. Unless we are fine
|
||||
# publishing the release at this point, we should at least fail if
|
||||
# "gh" doesn't have write access.
|
||||
while ! \
|
||||
gh release download "$version" --dir="$tmpdir" \
|
||||
--pattern="$fish_tar_xz"
|
||||
do
|
||||
TIMEOUT=30 gh run watch "$run_id" ||:
|
||||
sleep 5
|
||||
done
|
||||
actual_tag_oid=$(git ls-remote "$remote" |
|
||||
awk '$2 == "refs/tags/'"$version"'" { print $1 }')
|
||||
[ "$tag_oid" = "$actual_tag_oid" ]
|
||||
|
||||
(
|
||||
cd "$tmpdir"
|
||||
tar xf "$fish_tar_xz"
|
||||
diff -ur "fish-$version" "local-tarball/fish-$version"
|
||||
gpg --local-user="$committer" --sign --detach --armor \
|
||||
"$fish_tar_xz"
|
||||
gh release upload "$version" "$fish_tar_xz.asc"
|
||||
)
|
||||
|
||||
(
|
||||
cd "$tmpdir/local-tarball/fish-$version"
|
||||
uv --no-managed-python venv
|
||||
. .venv/bin/activate
|
||||
cmake -GNinja -DCMAKE_BUILD_TYPE=Debug .
|
||||
ninja doc
|
||||
)
|
||||
CopyDocs() {
|
||||
rm -rf "$fish_site/site/docs/$1"
|
||||
cp -r "$tmpdir/local-tarball/fish-$version/cargo/fish-docs/html" "$fish_site/site/docs/$1"
|
||||
git -C $fish_site add "site/docs/$1"
|
||||
}
|
||||
minor_version=${version%.*}
|
||||
CopyDocs "$minor_version"
|
||||
latest_release=$(
|
||||
releases=$(git tag | grep '^[0-9]*\.[0-9]*\.[0-9]*.*' |
|
||||
sed $(: "De-prioritize release candidates (1.2.3-rc0)") \
|
||||
's/-/~/g' | LC_ALL=C sort --version-sort)
|
||||
printf %s\\n "$releases" | tail -1
|
||||
)
|
||||
if [ "$version" = "$latest_release" ]; then
|
||||
CopyDocs current
|
||||
fi
|
||||
rm -rf "$tmpdir"
|
||||
(
|
||||
cd "$fish_site"
|
||||
make
|
||||
git add -u
|
||||
git add docs
|
||||
if git ls-files --others --exclude-standard | grep .; then
|
||||
exit 1
|
||||
fi
|
||||
git commit --message="$(printf %s "\
|
||||
| Release $version (docs)
|
||||
|
|
||||
| Created by ../fish-shell/build_tools/release.sh
|
||||
" | sed 's,^\s*| \?,,')"
|
||||
)
|
||||
|
||||
gh_api_repo() {
|
||||
path=$1
|
||||
shift
|
||||
command gh api \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
"/repos/$repository_owner/fish-shell/$path" \
|
||||
"$@"
|
||||
}
|
||||
|
||||
# Approve macos-codesign
|
||||
# TODO what if current user can't approve?
|
||||
gh_pending_deployments() {
|
||||
gh_api_repo "actions/runs/$run_id/pending_deployments" "$@"
|
||||
}
|
||||
while {
|
||||
environment_id=$(gh_pending_deployments | jq .[].environment.id)
|
||||
[ -z "$environment_id" ]
|
||||
}
|
||||
do
|
||||
sleep 5
|
||||
done
|
||||
echo '
|
||||
{
|
||||
"environment_ids": ['"$environment_id"'],
|
||||
"state": "approved",
|
||||
"comment": "Approved via ./build_tools/release.sh"
|
||||
}
|
||||
' |
|
||||
gh_pending_deployments --method POST --input=-
|
||||
|
||||
# Await completion.
|
||||
gh run watch "$run_id"
|
||||
|
||||
while {
|
||||
! draft=$(gh release view "$version" --json=isDraft --jq=.isDraft) \
|
||||
|| [ "$draft" = true ]
|
||||
}
|
||||
do
|
||||
sleep 20
|
||||
done
|
||||
|
||||
(
|
||||
cd "$fish_site"
|
||||
make new-release
|
||||
git add -u
|
||||
git add docs
|
||||
if git ls-files --others --exclude-standard | grep .; then
|
||||
exit 1
|
||||
fi
|
||||
git commit --message="$(printf %s "\
|
||||
| Release $version (release list update)
|
||||
|
|
||||
| Created by ../fish-shell/build_tools/release.sh
|
||||
" | sed 's,^\s*| \?,,')"
|
||||
# This takes care to support remote names that are different from
|
||||
# fish-shell remote name. Also, support detached HEAD state.
|
||||
git push "$fish_site_repo" HEAD:master
|
||||
git fetch "$fish_site_repo" \
|
||||
"$(git rev-parse HEAD):refs/remotes/origin/master"
|
||||
)
|
||||
|
||||
if [ -n "$integration_branch" ]; then {
|
||||
git push $remote "$version^{commit}":refs/heads/$integration_branch
|
||||
} else {
|
||||
changelog=$(cat - CHANGELOG.rst <<EOF
|
||||
fish ?.?.? (released ???)
|
||||
=========================
|
||||
|
||||
EOF
|
||||
)
|
||||
printf %s\\n "$changelog" >CHANGELOG.rst
|
||||
git add CHANGELOG.rst
|
||||
CreateCommit "start new cycle"
|
||||
git push $remote HEAD:master
|
||||
} fi
|
||||
|
||||
milestone_version="$(
|
||||
if echo "$version" | grep -q '\.0$'; then
|
||||
echo "$minor_version"
|
||||
else
|
||||
echo "$version"
|
||||
fi
|
||||
)"
|
||||
milestone_number() {
|
||||
gh_api_repo milestones?state=open |
|
||||
jq --arg name "fish $1" '
|
||||
.[] | select(.title == $name) | .number
|
||||
'
|
||||
}
|
||||
gh_api_repo milestones/"$(milestone_number "$milestone_version")" \
|
||||
--method PATCH --raw-field state=closed
|
||||
|
||||
next_minor_version=$(echo "$minor_version" |
|
||||
awk -F. '{ printf "%s.%s", $1, $2+1 }')
|
||||
if [ -z "$(milestone_number "$next_minor_version")" ]; then
|
||||
gh_api_repo milestones --method POST \
|
||||
--raw-field title="fish $next_minor_version"
|
||||
fi
|
||||
|
||||
exit
|
||||
|
||||
}
|
||||
@@ -1,53 +1,38 @@
|
||||
#!/usr/bin/env fish
|
||||
#
|
||||
# This runs Python files, fish scripts (*.fish), and Rust files
|
||||
# through their respective code formatting programs.
|
||||
# This runs C++ files and fish scripts (*.fish) through their respective code
|
||||
# formatting programs.
|
||||
#
|
||||
# `--all`: Format all eligible files instead of the ones specified as arguments.
|
||||
# `--check`: Instead of reformatting, fail if a file is not formatted correctly.
|
||||
# `--force`: Proceed without asking if uncommitted changes are detected.
|
||||
# Only relevant if `--all` is specified but `--check` is not specified.
|
||||
|
||||
set -l fish_files
|
||||
set -l python_files
|
||||
set -l rust_files
|
||||
set -l all no
|
||||
|
||||
argparse all check force -- $argv
|
||||
or exit $status
|
||||
|
||||
if set -l -q _flag_all
|
||||
if test "$argv[1]" = --all
|
||||
set all yes
|
||||
if set -q argv[1]
|
||||
echo "Unexpected arguments: '$argv'"
|
||||
exit 1
|
||||
end
|
||||
set -e argv[1]
|
||||
end
|
||||
|
||||
set -l workspace_root (realpath (status dirname)/..)
|
||||
if set -q argv[1]
|
||||
echo "Unexpected arguments: '$argv'"
|
||||
exit 1
|
||||
end
|
||||
|
||||
if test $all = yes
|
||||
if not set -l -q _flag_force; and not set -l -q _flag_check
|
||||
# Potential for false positives: Not all fish files are formatted, see the `fish_files`
|
||||
# definition below.
|
||||
set -l relevant_uncommitted_changes (git status --porcelain --short --untracked-files=all | sed -e 's/^ *[^ ]* *//' | grep -E '.*\.(fish|py|rs)$')
|
||||
if set -q relevant_uncommitted_changes[1]
|
||||
for changed_file in $relevant_uncommitted_changes
|
||||
echo $changed_file
|
||||
end
|
||||
echo
|
||||
echo 'You have uncommitted changes (listed above). Are you sure you want to restyle?'
|
||||
read -P 'y/N? ' -n1 -l ans
|
||||
if not string match -qi y -- $ans
|
||||
exit 1
|
||||
end
|
||||
set -l files (git status --porcelain --short --untracked-files=all | sed -e 's/^ *[^ ]* *//')
|
||||
if set -q files[1]
|
||||
echo
|
||||
echo 'You have uncommitted changes. Are you sure you want to restyle?'
|
||||
read -P 'y/N? ' -n1 -l ans
|
||||
if not string match -qi y -- $ans
|
||||
exit 1
|
||||
end
|
||||
end
|
||||
set fish_files $workspace_root/{benchmarks,build_tools,etc,share}/**.fish
|
||||
set python_files $workspace_root
|
||||
set fish_files share/**.fish
|
||||
set python_files {doc_src,share,tests}/**.py
|
||||
set rust_files fish-rust/src/**.rs
|
||||
else
|
||||
# Format the files specified as arguments.
|
||||
set -l files $argv
|
||||
# Extract just the fish files.
|
||||
set fish_files (string match -r '^.*\.fish$' -- $files)
|
||||
set python_files (string match -r '^.*\.py$' -- $files)
|
||||
set rust_files (string match -r '^.*\.rs$' -- $files)
|
||||
@@ -55,69 +40,37 @@ end
|
||||
|
||||
set -l red (set_color red)
|
||||
set -l green (set_color green)
|
||||
set -l yellow (set_color yellow)
|
||||
set -l blue (set_color blue)
|
||||
set -l normal (set_color normal)
|
||||
|
||||
function die -V red -V normal
|
||||
echo $red$argv[1]$normal
|
||||
exit 1
|
||||
end
|
||||
|
||||
# Run the fish reformatter if we have any fish files.
|
||||
if set -q fish_files[1]
|
||||
if not type -q fish_indent
|
||||
echo
|
||||
echo $yellow'Could not find `fish_indent` in `$PATH`.'$normal
|
||||
exit 127
|
||||
make fish_indent
|
||||
set PATH . $PATH
|
||||
end
|
||||
echo === Running "$green"fish_indent"$normal"
|
||||
if set -l -q _flag_check
|
||||
fish_indent --check -- $fish_files
|
||||
or die "Fish files are not formatted correctly."
|
||||
else
|
||||
fish_indent -w -- $fish_files
|
||||
end
|
||||
fish_indent -w -- $fish_files
|
||||
end
|
||||
|
||||
if set -q python_files[1]
|
||||
if not type -q ruff
|
||||
if not type -q black
|
||||
echo
|
||||
echo Please install "`black`" to style python
|
||||
echo
|
||||
echo $yellow'Please install `ruff` to style python'$normal
|
||||
exit 127
|
||||
end
|
||||
echo === Running "$green"ruff format"$normal"
|
||||
if set -l -q _flag_check
|
||||
ruff format --check $python_files
|
||||
or die "Python files are not formatted correctly."
|
||||
else
|
||||
ruff format $python_files
|
||||
echo === Running "$blue"black"$normal"
|
||||
black $python_files
|
||||
end
|
||||
end
|
||||
|
||||
if test $all = yes; or set -q rust_files[1]
|
||||
if not cargo fmt --version >/dev/null
|
||||
if set -q rust_files[1]
|
||||
if not type -q rustfmt
|
||||
echo
|
||||
echo Please install "`rustfmt`" to style rust
|
||||
echo
|
||||
echo $yellow'Please install "rustfmt" to style Rust, e.g. via:'
|
||||
echo "rustup component add rustfmt"$normal
|
||||
exit 127
|
||||
end
|
||||
|
||||
set -l edition_spec string match -r '^edition\s*=.*'
|
||||
test "$($edition_spec <Cargo.toml)" = "$($edition_spec <.rustfmt.toml)"
|
||||
or die "Cargo.toml and .rustfmt.toml use different editions"
|
||||
|
||||
echo === Running "$green"rustfmt"$normal"
|
||||
if set -l -q _flag_check
|
||||
if test $all = yes
|
||||
cargo fmt --all --check
|
||||
else
|
||||
rustfmt --check --files-with-diff $rust_files
|
||||
end
|
||||
or die "Rust files are not formatted correctly."
|
||||
else
|
||||
if test $all = yes
|
||||
cargo fmt --all
|
||||
else
|
||||
rustfmt $rust_files
|
||||
end
|
||||
echo === Running "$blue"rustfmt"$normal"
|
||||
rustfmt $rust_files
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
# /// script
|
||||
# requires-python = ">=3.5"
|
||||
# dependencies = [
|
||||
# "launchpadlib",
|
||||
# ]
|
||||
# ///
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
if __name__ == "__main__":
|
||||
launchpad = Launchpad.login_anonymously(
|
||||
"fish shell build script", "production", "~/.cache", version="devel"
|
||||
)
|
||||
ubu = launchpad.projects("ubuntu")
|
||||
print(
|
||||
"\n".join(
|
||||
x["name"]
|
||||
for x in ubu.series.entries
|
||||
if x["supported"] == True
|
||||
and x["name"] not in ("trusty", "xenial", "bionic", "focal")
|
||||
)
|
||||
)
|
||||
@@ -1,60 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -ex
|
||||
|
||||
command -v curl
|
||||
command -v gcloud
|
||||
command -v jq
|
||||
command -v rustup
|
||||
command -v updatecli
|
||||
command -v uv
|
||||
sort --version-sort </dev/null
|
||||
|
||||
# TODO This is copied from .github/actions/install-sphinx/action.yml
|
||||
uv lock --check --exclude-newer="$(awk -F'"' <uv.lock '/^exclude-newer[[:space:]]*=/ {print $2}')"
|
||||
|
||||
update_gh_action() {
|
||||
repo=$1
|
||||
version=$(curl -fsS "https://api.github.com/repos/$repo/releases/latest" | jq -r .tag_name)
|
||||
[ -n "$version" ]
|
||||
tag_oid=$(git ls-remote "https://github.com/$repo.git" "refs/tags/$version" | cut -f1)
|
||||
[ -n "$tag_oid" ]
|
||||
find .github/workflows -name '*.yml' -type f -exec \
|
||||
sed -i "s|uses: $repo@\S\+\( \+#.*\)\?|\
|
||||
uses: $repo@$tag_oid # $version, build_tools/update-dependencies.sh|g" {} +
|
||||
}
|
||||
update_gh_action actions/checkout
|
||||
update_gh_action actions/github-script
|
||||
update_gh_action actions/upload-artifact
|
||||
update_gh_action actions/download-artifact
|
||||
update_gh_action docker/login-action
|
||||
update_gh_action docker/build-push-action
|
||||
update_gh_action docker/metadata-action
|
||||
update_gh_action EmbarkStudios/cargo-deny-action
|
||||
update_gh_action dessant/lock-threads
|
||||
update_gh_action softprops/action-gh-release
|
||||
update_gh_action msys2/setup-msys2
|
||||
|
||||
updatecli "${@:-apply}"
|
||||
|
||||
# Python version constraints may have changed.
|
||||
uv lock --upgrade --exclude-newer="$(date --date='7 days ago' --iso-8601)"
|
||||
|
||||
from_gh() {
|
||||
repo=$1
|
||||
path=$2
|
||||
destination=$3
|
||||
contents=$(curl -fsS https://raw.githubusercontent.com/"${repo}"/refs/heads/master/"${path}")
|
||||
printf '%s\n' >"$destination" "$contents"
|
||||
}
|
||||
|
||||
from_gh ridiculousfish/widecharwidth widechar_width.rs crates/widecharwidth/src/widechar_width.rs
|
||||
from_gh ridiculousfish/littlecheck littlecheck/littlecheck.py tests/littlecheck.py
|
||||
from_gh catppuccin/fish 'themes/Catppuccin Frappe.theme' share/themes/catppuccin-frappe.theme
|
||||
from_gh catppuccin/fish 'themes/Catppuccin Macchiato.theme' share/themes/catppuccin-macchiato.theme
|
||||
from_gh catppuccin/fish 'themes/Catppuccin Mocha.theme' share/themes/catppuccin-mocha.theme
|
||||
|
||||
# Update Cargo.lock
|
||||
cargo update
|
||||
# Update Cargo.toml and Cargo.lock
|
||||
cargo +nightly -Zunstable-options update --breaking
|
||||
@@ -1,156 +0,0 @@
|
||||
#!/usr/bin/env fish
|
||||
|
||||
# Updates the files used for gettext translations.
|
||||
# By default, the whole xgettext + msgmerge pipeline runs,
|
||||
# which extracts the messages from the source files into $template_file,
|
||||
# and updates the PO files for each language from that.
|
||||
#
|
||||
# Use cases:
|
||||
# For developers:
|
||||
# - Run with no args to update all PO files after making changes to Rust/fish sources.
|
||||
# For translators:
|
||||
# - Specify the language you want to work on as an argument, which must be a file in the
|
||||
# localization/po/ directory. You can specify a language which does not have translations
|
||||
# yet by specifying the name of a file which does not yet exist.
|
||||
# Make sure to follow the naming convention.
|
||||
# For testing:
|
||||
# - Specify `--dry-run` to see if any updates to the PO files would by applied by this script.
|
||||
# If this flag is specified, the script will exit with an error if there are outstanding
|
||||
# changes, and will display the diff. Do not specify other flags if `--dry-run` is specified.
|
||||
#
|
||||
# Specify `--use-existing-template=DIR` to prevent running cargo for extracting an up-to-date
|
||||
# version of the localized strings. This flag is intended for testing setups which make it
|
||||
# inconvenient to run cargo here, but run it in an earlier step to ensure up-to-date values.
|
||||
# This argument is passed on to the `fish_xgettext.fish` script and has no other uses.
|
||||
# `DIR` must be the path to a gettext template file generated from our compilation process.
|
||||
# It can be obtained by running:
|
||||
# set -l DIR (mktemp -d)
|
||||
# FISH_GETTEXT_EXTRACTION_DIR=$DIR cargo check --features=gettext-extract
|
||||
|
||||
# The sort utility is locale-sensitive.
|
||||
# Ensure that sorting output is consistent by setting LC_ALL here.
|
||||
set -gx LC_ALL C.UTF-8
|
||||
|
||||
set -l build_tools (status dirname)
|
||||
set -l po_dir $build_tools/../localization/po
|
||||
|
||||
set -l extract
|
||||
|
||||
argparse dry-run use-existing-template= -- $argv
|
||||
or exit $status
|
||||
|
||||
if test -z $argv[1]
|
||||
# Update everything if not specified otherwise.
|
||||
set -g po_files $po_dir/*.po
|
||||
else
|
||||
set -l po_dir_id (stat --format='%d:%i' -- $po_dir)
|
||||
for arg in $argv
|
||||
set -l arg_dir_id (stat --format='%d:%i' -- (dirname $arg) 2>/dev/null)
|
||||
if test $po_dir_id != "$arg_dir_id"
|
||||
echo "Argument $arg is not a file in the directory $(realpath $po_dir)."
|
||||
echo "Non-option arguments must specify paths to files in this directory."
|
||||
echo ""
|
||||
echo "If you want to add a new language to the translations not the following:"
|
||||
echo "The filename must identify a language, with a two letter ISO 639-1 language code of the target language (e.g. 'pt' for Portuguese), and use the file extension '.po'."
|
||||
echo "Optionally, you can specify a regional variant (e.g. 'pt_BR')."
|
||||
echo "So valid filenames are of the shape 'll.po' or 'll_CC.po'."
|
||||
exit 1
|
||||
end
|
||||
if not basename $arg | grep -qE '^[a-z]{2,3}(_[A-Z]{2})?\.po$'
|
||||
echo "Filename does not match the expected format ('ll.po' or 'll_CC.po')."
|
||||
exit 1
|
||||
end
|
||||
end
|
||||
set -g po_files $argv
|
||||
end
|
||||
|
||||
set -g template_file (mktemp)
|
||||
# Protect from externally set $tmpdir leaking into this script.
|
||||
set -g tmpdir
|
||||
|
||||
function cleanup_exit
|
||||
set -l exit_status $status
|
||||
|
||||
rm $template_file
|
||||
|
||||
if set -g --query tmpdir[1]
|
||||
rm -r $tmpdir
|
||||
end
|
||||
|
||||
exit $exit_status
|
||||
end
|
||||
|
||||
if set -l --query extract
|
||||
set -l xgettext_args
|
||||
if set -l --query _flag_use_existing_template
|
||||
set xgettext_args --use-existing-template=$_flag_use_existing_template
|
||||
end
|
||||
$build_tools/fish_xgettext.fish $xgettext_args >$template_file
|
||||
or cleanup_exit
|
||||
end
|
||||
|
||||
if set -l --query _flag_dry_run
|
||||
# On a dry run, we do not modify localization/po/ but write to a temporary directory instead
|
||||
# and check if there is a difference between localization/po/ and the tmpdir after re-generating
|
||||
# the PO files.
|
||||
set -g tmpdir (mktemp -d)
|
||||
|
||||
# Ensure tmpdir has the same initial state as the po dir.
|
||||
cp -r $po_dir/* $tmpdir
|
||||
end
|
||||
|
||||
# This is used to identify lines which should be set here via $header_lines.
|
||||
# Make sure that this prefix does not appear elsewhere in the file and only contains characters
|
||||
# without special meaning in a sed pattern.
|
||||
set -g header_prefix "# fish-note-sections: "
|
||||
|
||||
function print_header
|
||||
set -l header_lines \
|
||||
"Translations are divided into sections, each starting with a fish-section-* pseudo-message." \
|
||||
"The first few sections are more important." \
|
||||
"Ignore the tier3 sections unless you have a lot of time."
|
||||
for line in $header_lines
|
||||
printf '%s%s\n' $header_prefix $line
|
||||
end
|
||||
end
|
||||
|
||||
function merge_po_files --argument-names template_file po_file
|
||||
msgmerge --no-wrap --update --no-fuzzy-matching --backup=none --quiet \
|
||||
$po_file $template_file
|
||||
or cleanup_exit
|
||||
set -l new_po_file (mktemp) # TODO Remove on failure.
|
||||
# Remove obsolete messages instead of keeping them as #~ entries.
|
||||
and msgattrib --no-wrap --no-obsolete -o $new_po_file $po_file
|
||||
or cleanup_exit
|
||||
|
||||
begin
|
||||
print_header
|
||||
# Paste PO file without old header lines.
|
||||
sed '/^'$header_prefix'/d' $new_po_file
|
||||
end >$po_file
|
||||
rm $new_po_file
|
||||
end
|
||||
|
||||
for po_file in $po_files
|
||||
if set --query tmpdir[1]
|
||||
set po_file $tmpdir/(basename $po_file)
|
||||
end
|
||||
if test -e $po_file
|
||||
merge_po_files $template_file $po_file
|
||||
else
|
||||
begin
|
||||
print_header
|
||||
cat $template_file
|
||||
end >$po_file
|
||||
end
|
||||
end
|
||||
|
||||
if set -g --query tmpdir[1]
|
||||
diff -ur $po_dir $tmpdir
|
||||
or begin
|
||||
echo ERROR: translations in localization/po/ are stale. Try running build_tools/update_translations.fish
|
||||
cleanup_exit
|
||||
end
|
||||
end
|
||||
|
||||
cleanup_exit
|
||||
@@ -1,16 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
channel=$1 # e.g. stable, testing
|
||||
package=$2 # e.g. rustc, sphinx
|
||||
|
||||
codename=$(
|
||||
curl -fsS https://ftp.debian.org/debian/dists/"${channel}"/Release |
|
||||
grep '^Codename:' | cut -d' ' -f2)
|
||||
curl -fsS https://sources.debian.org/api/src/"${package}"/ |
|
||||
jq -r --arg codename "${codename}" '
|
||||
.versions[] | select(.suites[] == $codename) | .version' |
|
||||
sed 's/^\([0-9]\+\.[0-9]\+\).*/\1/' |
|
||||
sort --version-sort |
|
||||
tail -1
|
||||
105
cmake/Docs.cmake
105
cmake/Docs.cmake
@@ -1,59 +1,86 @@
|
||||
find_program(SPHINX_EXECUTABLE NAMES sphinx-build
|
||||
HINTS
|
||||
$ENV{SPHINX_DIR}
|
||||
PATH_SUFFIXES bin
|
||||
DOC "Sphinx documentation generator")
|
||||
|
||||
include(FeatureSummary)
|
||||
|
||||
set(SPHINX_OUTPUT_DIR "${FISH_RUST_BUILD_DIR}/fish-docs")
|
||||
|
||||
set(FISH_INDENT_FOR_BUILDING_DOCS "" CACHE FILEPATH "Path to fish_indent executable for building HTML docs")
|
||||
|
||||
if(FISH_INDENT_FOR_BUILDING_DOCS)
|
||||
set(SPHINX_HTML_FISH_INDENT_DEP)
|
||||
else()
|
||||
set(FISH_INDENT_FOR_BUILDING_DOCS "${CMAKE_CURRENT_BINARY_DIR}/fish_indent")
|
||||
set(SPHINX_HTML_FISH_INDENT_DEP fish_indent)
|
||||
endif()
|
||||
|
||||
set(VARS_FOR_CARGO_SPHINX_WRAPPER
|
||||
"CARGO_TARGET_DIR=${FISH_RUST_BUILD_DIR}"
|
||||
"FISH_SPHINX=${SPHINX_EXECUTABLE}"
|
||||
)
|
||||
set(SPHINX_SRC_DIR "${CMAKE_CURRENT_SOURCE_DIR}/doc_src")
|
||||
set(SPHINX_ROOT_DIR "${CMAKE_CURRENT_BINARY_DIR}/user_doc")
|
||||
set(SPHINX_BUILD_DIR "${SPHINX_ROOT_DIR}/build")
|
||||
set(SPHINX_HTML_DIR "${SPHINX_ROOT_DIR}/html")
|
||||
set(SPHINX_MANPAGE_DIR "${SPHINX_ROOT_DIR}/man")
|
||||
|
||||
# sphinx-docs uses fish_indent for highlighting.
|
||||
# Prepend the output dir of fish_indent to PATH.
|
||||
add_custom_target(sphinx-docs
|
||||
COMMAND env ${VARS_FOR_CARGO_SPHINX_WRAPPER}
|
||||
${Rust_CARGO} xtask html-docs --fish-indent=${FISH_INDENT_FOR_BUILDING_DOCS}
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
|
||||
DEPENDS ${SPHINX_HTML_FISH_INDENT_DEP}
|
||||
mkdir -p ${SPHINX_HTML_DIR}/_static/
|
||||
COMMAND env PATH="${CMAKE_BINARY_DIR}:$$PATH"
|
||||
${SPHINX_EXECUTABLE}
|
||||
-j auto
|
||||
-q -b html
|
||||
-c "${SPHINX_SRC_DIR}"
|
||||
-d "${SPHINX_ROOT_DIR}/.doctrees-html"
|
||||
"${SPHINX_SRC_DIR}"
|
||||
"${SPHINX_HTML_DIR}"
|
||||
DEPENDS ${SPHINX_SRC_DIR}/fish_indent_lexer.py fish_indent
|
||||
COMMENT "Building HTML documentation with Sphinx")
|
||||
|
||||
# sphinx-manpages needs the fish_indent binary for the version number
|
||||
add_custom_target(sphinx-manpages
|
||||
COMMAND env ${VARS_FOR_CARGO_SPHINX_WRAPPER}
|
||||
${Rust_CARGO} xtask man-pages
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
|
||||
env PATH="${CMAKE_BINARY_DIR}:$$PATH"
|
||||
${SPHINX_EXECUTABLE}
|
||||
-j auto
|
||||
-q -b man
|
||||
-c "${SPHINX_SRC_DIR}"
|
||||
-d "${SPHINX_ROOT_DIR}/.doctrees-man"
|
||||
"${SPHINX_SRC_DIR}"
|
||||
# TODO: This only works if we only have section 1 manpages.
|
||||
"${SPHINX_MANPAGE_DIR}/man1"
|
||||
DEPENDS fish_indent
|
||||
COMMENT "Building man pages with Sphinx")
|
||||
|
||||
if(NOT DEFINED WITH_DOCS) # Don't check for legacy options if the new one is defined, to help bisecting.
|
||||
if(DEFINED BUILD_DOCS)
|
||||
message(FATAL_ERROR "the BUILD_DOCS option is no longer supported, use -DWITH_DOCS=ON|OFF")
|
||||
endif()
|
||||
if(DEFINED INSTALL_DOCS)
|
||||
message(FATAL_ERROR "the INSTALL_DOCS option is no longer supported, use -DWITH_DOCS=ON|OFF")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(SPHINX_EXECUTABLE)
|
||||
option(WITH_DOCS "build documentation (requires Sphinx)" ON)
|
||||
else()
|
||||
option(WITH_DOCS "build documentation (requires Sphinx)" OFF)
|
||||
endif()
|
||||
option(BUILD_DOCS "build documentation (requires Sphinx)" ON)
|
||||
else(SPHINX_EXECUTABLE)
|
||||
option(BUILD_DOCS "build documentation (requires Sphinx)" OFF)
|
||||
endif(SPHINX_EXECUTABLE)
|
||||
|
||||
if(WITH_DOCS AND NOT SPHINX_EXECUTABLE)
|
||||
if(BUILD_DOCS AND NOT SPHINX_EXECUTABLE)
|
||||
message(FATAL_ERROR "build documentation selected, but sphinx-build could not be found")
|
||||
endif()
|
||||
|
||||
add_feature_info(Documentation WITH_DOCS "user manual and documentation")
|
||||
if(IS_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/user_doc/html
|
||||
AND IS_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/user_doc/man)
|
||||
set(HAVE_PREBUILT_DOCS TRUE)
|
||||
else()
|
||||
set(HAVE_PREBUILT_DOCS FALSE)
|
||||
endif()
|
||||
|
||||
if(WITH_DOCS)
|
||||
if(BUILD_DOCS OR HAVE_PREBUILT_DOCS)
|
||||
set(INSTALL_DOCS ON)
|
||||
else()
|
||||
set(INSTALL_DOCS OFF)
|
||||
endif()
|
||||
|
||||
add_feature_info(Documentation INSTALL_DOCS "user manual and documentation")
|
||||
|
||||
if(BUILD_DOCS)
|
||||
configure_file("${SPHINX_SRC_DIR}/conf.py" "${SPHINX_BUILD_DIR}/conf.py" @ONLY)
|
||||
add_custom_target(doc ALL
|
||||
DEPENDS sphinx-docs sphinx-manpages)
|
||||
|
||||
# Group docs targets into a DocsTargets folder
|
||||
set_property(TARGET doc sphinx-docs sphinx-manpages
|
||||
PROPERTY FOLDER cmake/DocTargets)
|
||||
endif()
|
||||
|
||||
elseif(HAVE_PREBUILT_DOCS)
|
||||
if(NOT CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_CURRENT_BINARY_DIR)
|
||||
# Out of tree build - link the prebuilt documentation to the build tree
|
||||
add_custom_target(link_doc ALL)
|
||||
add_custom_command(TARGET link_doc
|
||||
COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_CURRENT_SOURCE_DIR}/user_doc ${CMAKE_CURRENT_BINARY_DIR}/user_doc
|
||||
POST_BUILD)
|
||||
endif()
|
||||
endif(BUILD_DOCS)
|
||||
|
||||
@@ -1,18 +1,298 @@
|
||||
#[=======================================================================[.rst:
|
||||
FindRust
|
||||
--------
|
||||
|
||||
Find Rust
|
||||
|
||||
This module finds an installed rustc compiler and the cargo build tool. If Rust
|
||||
is managed by rustup it determines the available toolchains and returns a
|
||||
concrete Rust version, not a rustup proxy.
|
||||
|
||||
Imported from Corrosion https://github.com/corrosion-rs/corrosion/
|
||||
|
||||
Copyright (c) 2018 Andrew Gaspar
|
||||
|
||||
Licensed under the MIT license
|
||||
|
||||
However this is absolutely gutted and reduced to the bare minimum.
|
||||
#]=======================================================================]
|
||||
include(FindPackageHandleStandardArgs)
|
||||
|
||||
cmake_minimum_required(VERSION 3.12)
|
||||
|
||||
# search for Cargo here and set up a bunch of cool flags and stuff
|
||||
include(FindPackageHandleStandardArgs)
|
||||
|
||||
list(APPEND CMAKE_MESSAGE_CONTEXT "FindRust")
|
||||
|
||||
# Print error message and return.
|
||||
macro(_findrust_failed)
|
||||
if("${Rust_FIND_REQUIRED}")
|
||||
message(FATAL_ERROR ${ARGN})
|
||||
elseif(NOT "${Rust_FIND_QUIETLY}")
|
||||
message(WARNING ${ARGN})
|
||||
endif()
|
||||
# Note: PARENT_SCOPE is the scope of the caller of the caller of this macro.
|
||||
set(Rust_FOUND "" PARENT_SCOPE)
|
||||
return()
|
||||
endmacro()
|
||||
|
||||
# Checks if the actual version of a Rust toolchain matches the VERSION requirements specified in find_package.
|
||||
function(_findrust_version_ok ACTUAL_VERSION OUT_IS_OK)
|
||||
if(DEFINED Rust_FIND_VERSION_RANGE)
|
||||
if(Rust_FIND_VERSION_RANGE_MAX STREQUAL "INCLUDE")
|
||||
set(COMPARSION_OPERATOR "VERSION_LESS_EQUAL")
|
||||
elseif(Rust_FIND_VERSION_RANGE_MAX STREQUAL "EXCLUDE")
|
||||
set(COMPARSION_OPERATOR "VERSION_LESS")
|
||||
else()
|
||||
message(FATAL_ERROR "Unexpected value in `<PackageName>_FIND_VERSION_RANGE_MAX`: "
|
||||
"`${Rust_FIND_VERSION_RANGE_MAX}`.")
|
||||
endif()
|
||||
if(("${ACTUAL_VERSION}" VERSION_GREATER_EQUAL "${Rust_FIND_VERSION_RANGE_MIN}")
|
||||
AND
|
||||
( "${ACTUAL_VERSION}" ${COMPARSION_OPERATOR} "${Rust_FIND_VERSION_RANGE_MAX}" )
|
||||
)
|
||||
set("${OUT_IS_OK}" TRUE PARENT_SCOPE)
|
||||
else()
|
||||
set("${OUT_IS_OK}" FALSE PARENT_SCOPE)
|
||||
endif()
|
||||
elseif(DEFINED Rust_FIND_VERSION)
|
||||
if(Rust_VERSION_EXACT)
|
||||
set(COMPARISON_OPERATOR VERSION_EQUAL)
|
||||
else()
|
||||
set(COMPARISON_OPERATOR VERSION_GREATER_EQUAL)
|
||||
endif()
|
||||
if(_TOOLCHAIN_${_TOOLCHAIN_SELECTED}_VERSION "${COMPARISON_OPERATOR}" Rust_FIND_VERSION)
|
||||
set("${OUT_IS_OK}" TRUE PARENT_SCOPE)
|
||||
else()
|
||||
set("${OUT_IS_OK}" FALSE PARENT_SCOPE)
|
||||
endif()
|
||||
else()
|
||||
# if no VERSION requirement was specified, the version is always okay.
|
||||
set("${OUT_IS_OK}" TRUE PARENT_SCOPE)
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
function(_corrosion_strip_target_triple input_triple_or_path output_triple)
|
||||
# If the target_triple is a path to a custom target specification file, then strip everything
|
||||
# except the filename from `target_triple`.
|
||||
get_filename_component(target_triple_ext "${input_triple_or_path}" EXT)
|
||||
set(target_triple "${input_triple_or_path}")
|
||||
if(target_triple_ext)
|
||||
if(target_triple_ext STREQUAL ".json")
|
||||
get_filename_component(target_triple "${input_triple_or_path}" NAME_WE)
|
||||
endif()
|
||||
endif()
|
||||
set(${output_triple} "${target_triple}" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
function(_corrosion_parse_target_triple target_triple out_arch out_vendor out_os out_env)
|
||||
_corrosion_strip_target_triple(${target_triple} target_triple)
|
||||
|
||||
# The vendor part may be left out from the target triple, and since `env` is also optional,
|
||||
# we determine if vendor is present by matching against a list of known vendors.
|
||||
set(known_vendors
|
||||
"apple"
|
||||
"esp[a-z0-9]*" # espressif, e.g. riscv32imc-esp-espidf or xtensa-esp32s3-none-elf
|
||||
"fortanix"
|
||||
"kmc"
|
||||
"pc"
|
||||
"nintendo"
|
||||
"nvidia"
|
||||
"openwrt"
|
||||
"alpine"
|
||||
"chimera"
|
||||
"unikraft"
|
||||
"unknown"
|
||||
"uwp" # aarch64-uwp-windows-msvc
|
||||
"wrs" # e.g. aarch64-wrs-vxworks
|
||||
"sony"
|
||||
"sun"
|
||||
)
|
||||
# todo: allow users to add additional vendors to the list via a cmake variable.
|
||||
list(JOIN known_vendors "|" known_vendors_joined)
|
||||
# vendor is optional - We detect if vendor is present by matching against a known list of
|
||||
# vendors. The next field is the OS, which we assume to always be present, while the last field
|
||||
# is again optional and contains the environment.
|
||||
string(REGEX MATCH
|
||||
"^([a-z0-9_\.]+)-((${known_vendors_joined})-)?([a-z0-9_]+)(-([a-z0-9_]+))?$"
|
||||
whole_match
|
||||
"${target_triple}"
|
||||
)
|
||||
if((NOT whole_match) AND (NOT CORROSION_NO_WARN_PARSE_TARGET_TRIPLE_FAILED))
|
||||
message(WARNING "Failed to parse target-triple `${target_triple}`."
|
||||
"Corrosion determines some information about the output artifacts based on OS "
|
||||
"specified in the Rust target-triple.\n"
|
||||
"Currently this is relevant for windows and darwin (mac) targets, since file "
|
||||
"extensions differ.\n"
|
||||
"Note: If you are targeting a different OS you can suppress this warning by"
|
||||
" setting the CMake cache variable "
|
||||
"`CORROSION_NO_WARN_PARSE_TARGET_TRIPLE_FAILED`."
|
||||
"Please consider opening an issue on github if you you need to add a new vendor to the list."
|
||||
)
|
||||
endif()
|
||||
|
||||
message(DEBUG "Parsed Target triple: arch: ${CMAKE_MATCH_1}, vendor: ${CMAKE_MATCH_3}, "
|
||||
"OS: ${CMAKE_MATCH_4}, env: ${CMAKE_MATCH_6}")
|
||||
|
||||
set("${out_arch}" "${CMAKE_MATCH_1}" PARENT_SCOPE)
|
||||
set("${out_vendor}" "${CMAKE_MATCH_3}" PARENT_SCOPE)
|
||||
set("${out_os}" "${CMAKE_MATCH_4}" PARENT_SCOPE)
|
||||
set("${out_env}" "${CMAKE_MATCH_6}" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
function(_corrosion_determine_libs_new target_triple out_libs)
|
||||
set(package_dir "${CMAKE_BINARY_DIR}/corrosion/required_libs")
|
||||
# Cleanup on reconfigure to get a cleans state (in case we change something in the future)
|
||||
file(REMOVE_RECURSE "${package_dir}")
|
||||
file(MAKE_DIRECTORY "${package_dir}")
|
||||
set(manifest "[package]\nname = \"required_libs\"\nedition = \"2018\"\nversion = \"0.1.0\"\n")
|
||||
string(APPEND manifest "\n[lib]\ncrate-type=[\"staticlib\"]\npath = \"lib.rs\"\n")
|
||||
string(APPEND manifest "\n[workspace]\n")
|
||||
file(WRITE "${package_dir}/Cargo.toml" "${manifest}")
|
||||
file(WRITE "${package_dir}/lib.rs" "pub fn add(left: usize, right: usize) -> usize {left + right}\n")
|
||||
|
||||
execute_process(
|
||||
COMMAND ${CMAKE_COMMAND} -E env
|
||||
"CARGO_BUILD_RUSTC=${Rust_COMPILER_CACHED}"
|
||||
${Rust_CARGO_CACHED} rustc --verbose --color never --target=${target_triple} -- --print=native-static-libs
|
||||
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/corrosion/required_libs"
|
||||
RESULT_VARIABLE cargo_build_result
|
||||
ERROR_VARIABLE cargo_build_error_message
|
||||
)
|
||||
if(cargo_build_result)
|
||||
message(DEBUG "Determining required native libraries - failed: ${cargo_build_result}.")
|
||||
message(TRACE "The cargo build error was: ${cargo_build_error_message}")
|
||||
message(DEBUG "Note: This is expected for Rust targets without std support")
|
||||
return()
|
||||
else()
|
||||
# The pattern starts with `native-static-libs:` and goes to the end of the line.
|
||||
if(cargo_build_error_message MATCHES "native-static-libs: ([^\r\n]+)\r?\n")
|
||||
string(REPLACE " " ";" "libs_list" "${CMAKE_MATCH_1}")
|
||||
set(stripped_lib_list "")
|
||||
|
||||
set(was_last_framework OFF)
|
||||
foreach(lib ${libs_list})
|
||||
# merge -framework;lib -> "-framework lib" as CMake does de-duplication of link libraries, and -framework prefix is required
|
||||
if (lib STREQUAL "-framework")
|
||||
set(was_last_framework ON)
|
||||
continue()
|
||||
endif()
|
||||
if (was_last_framework)
|
||||
list(APPEND stripped_lib_list "-framework ${lib}")
|
||||
set(was_last_framework OFF)
|
||||
continue()
|
||||
endif()
|
||||
# Strip leading `-l` (unix) and potential .lib suffix (windows)
|
||||
string(REGEX REPLACE "^-l" "" "stripped_lib" "${lib}")
|
||||
string(REGEX REPLACE "\.lib$" "" "stripped_lib" "${stripped_lib}")
|
||||
list(APPEND stripped_lib_list "${stripped_lib}")
|
||||
endforeach()
|
||||
set(libs_list "${stripped_lib_list}")
|
||||
# Special case `msvcrt` to link with the debug version in Debug mode.
|
||||
list(TRANSFORM libs_list REPLACE "^msvcrt$" "\$<\$<CONFIG:Debug>:msvcrtd>")
|
||||
else()
|
||||
message(DEBUG "Determining required native libraries - failed: Regex match failure.")
|
||||
message(DEBUG "`native-static-libs` not found in: `${cargo_build_error_message}`")
|
||||
return()
|
||||
endif()
|
||||
endif()
|
||||
set("${out_libs}" "${libs_list}" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
if (NOT "${Rust_TOOLCHAIN}" STREQUAL "$CACHE{Rust_TOOLCHAIN}")
|
||||
# Promote Rust_TOOLCHAIN to a cache variable if it is not already a cache variable
|
||||
set(Rust_TOOLCHAIN ${Rust_TOOLCHAIN} CACHE STRING "Requested rustup toolchain" FORCE)
|
||||
endif()
|
||||
|
||||
set(_RESOLVE_RUSTUP_TOOLCHAINS_DESC "Indicates whether to descend into the toolchain pointed to by rustup")
|
||||
set(Rust_RESOLVE_RUSTUP_TOOLCHAINS ON CACHE BOOL ${_RESOLVE_RUSTUP_TOOLCHAINS_DESC})
|
||||
|
||||
# This block checks to see if we're prioritizing a rustup-managed toolchain.
|
||||
if (DEFINED Rust_TOOLCHAIN)
|
||||
# If the user specifies `Rust_TOOLCHAIN`, then look for `rustup` first, rather than `rustc`.
|
||||
find_program(Rust_RUSTUP rustup PATHS "$ENV{HOME}/.cargo/bin")
|
||||
if(NOT Rust_RUSTUP)
|
||||
if(NOT "${Rust_FIND_QUIETLY}")
|
||||
message(
|
||||
WARNING "CMake variable `Rust_TOOLCHAIN` specified, but `rustup` was not found. "
|
||||
"Ignoring toolchain and looking for a Rust toolchain not managed by rustup.")
|
||||
endif()
|
||||
endif()
|
||||
else()
|
||||
# If we aren't definitely using a rustup toolchain, look for rustc first - the user may have
|
||||
# a toolchain installed via a method other than rustup higher in the PATH, which should be
|
||||
# preferred. However, if the first-found rustc is a rustup proxy, then we'll revert to
|
||||
# finding the preferred toolchain via rustup.
|
||||
|
||||
# Uses `Rust_COMPILER` to let user-specified `rustc` win. But we will still "override" the
|
||||
# user's setting if it is pointing to `rustup`. Default rustup install path is provided as a
|
||||
# backup if a toolchain cannot be found in the user's PATH.
|
||||
|
||||
if (DEFINED Rust_COMPILER)
|
||||
set(_Rust_COMPILER_TEST "${Rust_COMPILER}")
|
||||
set(_USER_SPECIFIED_RUSTC ON)
|
||||
if(NOT (EXISTS "${_Rust_COMPILER_TEST}" AND NOT IS_DIRECTORY "${_Rust_COMPILER_TEST}"))
|
||||
set(_ERROR_MESSAGE "Rust_COMPILER was set to `${Rust_COMPILER}`, but this file does "
|
||||
"not exist."
|
||||
)
|
||||
_findrust_failed(${_ERROR_MESSAGE})
|
||||
return()
|
||||
endif()
|
||||
else()
|
||||
find_program(_Rust_COMPILER_TEST rustc PATHS "$ENV{HOME}/.cargo/bin")
|
||||
if(NOT EXISTS "${_Rust_COMPILER_TEST}")
|
||||
set(_ERROR_MESSAGE "`rustc` not found in PATH or `$ENV{HOME}/.cargo/bin`.\n"
|
||||
"Hint: Check if `rustc` is in PATH or manually specify the location "
|
||||
"by setting `Rust_COMPILER` to the path to `rustc`.")
|
||||
_findrust_failed(${_ERROR_MESSAGE})
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Check if the discovered rustc is actually a "rustup" proxy.
|
||||
execute_process(
|
||||
COMMAND
|
||||
${CMAKE_COMMAND} -E env
|
||||
RUSTUP_FORCE_ARG0=rustup
|
||||
"${_Rust_COMPILER_TEST}" --version
|
||||
OUTPUT_VARIABLE _RUSTC_VERSION_RAW
|
||||
ERROR_VARIABLE _RUSTC_VERSION_STDERR
|
||||
RESULT_VARIABLE _RUSTC_VERSION_RESULT
|
||||
)
|
||||
|
||||
if(NOT (_RUSTC_VERSION_RESULT EQUAL "0"))
|
||||
_findrust_failed("`${_Rust_COMPILER_TEST} --version` failed with ${_RUSTC_VERSION_RESULT}\n"
|
||||
"rustc stderr:\n${_RUSTC_VERSION_STDERR}"
|
||||
)
|
||||
endif()
|
||||
|
||||
if (_RUSTC_VERSION_RAW MATCHES "rustup [0-9\\.]+")
|
||||
if (_USER_SPECIFIED_RUSTC)
|
||||
message(
|
||||
WARNING "User-specified Rust_COMPILER pointed to rustup's rustc proxy. Corrosion's "
|
||||
"FindRust will always try to evaluate to an actual Rust toolchain, and so the "
|
||||
"user-specified Rust_COMPILER will be discarded in favor of the default "
|
||||
"rustup-managed toolchain."
|
||||
)
|
||||
|
||||
unset(Rust_COMPILER)
|
||||
unset(Rust_COMPILER CACHE)
|
||||
endif()
|
||||
|
||||
# Get `rustup` next to the `rustc` proxy
|
||||
get_filename_component(_RUST_PROXIES_PATH "${_Rust_COMPILER_TEST}" DIRECTORY)
|
||||
find_program(Rust_RUSTUP rustup HINTS "${_RUST_PROXIES_PATH}" NO_DEFAULT_PATH)
|
||||
endif()
|
||||
|
||||
unset(_Rust_COMPILER_TEST CACHE)
|
||||
endif()
|
||||
|
||||
# At this point, the only thing we should have evaluated is a path to `rustup` _if that's what the
|
||||
# best source for a Rust toolchain was determined to be_.
|
||||
if (NOT Rust_RUSTUP)
|
||||
set(Rust_RESOLVE_RUSTUP_TOOLCHAINS OFF CACHE BOOL ${_RESOLVE_RUSTUP_TOOLCHAINS_DESC} FORCE)
|
||||
endif()
|
||||
|
||||
# List of user variables that will override any toolchain-provided setting
|
||||
set(_Rust_USER_VARS Rust_COMPILER Rust_CARGO Rust_CARGO_TARGET)
|
||||
set(_Rust_USER_VARS Rust_COMPILER Rust_CARGO Rust_CARGO_TARGET Rust_CARGO_HOST_TARGET)
|
||||
foreach(_VAR ${_Rust_USER_VARS})
|
||||
if (DEFINED "${_VAR}")
|
||||
set(${_VAR}_CACHED "${${_VAR}}" CACHE INTERNAL "Internal cache of ${_VAR}")
|
||||
@@ -21,55 +301,482 @@ foreach(_VAR ${_Rust_USER_VARS})
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
if (NOT DEFINED Rust_CARGO_CACHED)
|
||||
find_program(Rust_CARGO_CACHED cargo PATHS "$ENV{HOME}/.cargo/bin")
|
||||
endif()
|
||||
# Discover what toolchains are installed by rustup, if the discovered `rustc` is a proxy from
|
||||
# `rustup` and the user hasn't explicitly requested to override this behavior, then select either
|
||||
# the default toolchain, or the requested toolchain Rust_TOOLCHAIN
|
||||
if (Rust_RESOLVE_RUSTUP_TOOLCHAINS)
|
||||
execute_process(
|
||||
COMMAND
|
||||
"${Rust_RUSTUP}" toolchain list --verbose
|
||||
OUTPUT_VARIABLE _TOOLCHAINS_RAW
|
||||
)
|
||||
|
||||
if (NOT EXISTS "${Rust_CARGO_CACHED}")
|
||||
message(FATAL_ERROR "The cargo executable ${Rust_CARGO_CACHED} was not found. "
|
||||
"Consider setting `Rust_CARGO_CACHED` to the absolute path of `cargo`."
|
||||
)
|
||||
endif()
|
||||
string(REPLACE "\n" ";" _TOOLCHAINS_RAW "${_TOOLCHAINS_RAW}")
|
||||
set(_DISCOVERED_TOOLCHAINS "")
|
||||
set(_DISCOVERED_TOOLCHAINS_RUSTC_PATH "")
|
||||
set(_DISCOVERED_TOOLCHAINS_CARGO_PATH "")
|
||||
set(_DISCOVERED_TOOLCHAINS_VERSION "")
|
||||
|
||||
if (NOT DEFINED Rust_COMPILER_CACHED)
|
||||
find_program(Rust_COMPILER_CACHED rustc PATHS "$ENV{HOME}/.cargo/bin")
|
||||
endif()
|
||||
foreach(_TOOLCHAIN_RAW ${_TOOLCHAINS_RAW})
|
||||
if (_TOOLCHAIN_RAW MATCHES "([a-zA-Z0-9\\._\\-]+)[ \t\r\n]?(\\(default\\) \\(override\\)|\\(default\\)|\\(override\\))?[ \t\r\n]+(.+)")
|
||||
set(_TOOLCHAIN "${CMAKE_MATCH_1}")
|
||||
set(_TOOLCHAIN_TYPE "${CMAKE_MATCH_2}")
|
||||
|
||||
set(_TOOLCHAIN_PATH "${CMAKE_MATCH_3}")
|
||||
set(_TOOLCHAIN_${_TOOLCHAIN}_PATH "${CMAKE_MATCH_3}")
|
||||
|
||||
if (_TOOLCHAIN_TYPE MATCHES ".*\\(default\\).*")
|
||||
set(_TOOLCHAIN_DEFAULT "${_TOOLCHAIN}")
|
||||
endif()
|
||||
|
||||
if (_TOOLCHAIN_TYPE MATCHES ".*\\(override\\).*")
|
||||
set(_TOOLCHAIN_OVERRIDE "${_TOOLCHAIN}")
|
||||
endif()
|
||||
|
||||
execute_process(
|
||||
COMMAND
|
||||
"${_TOOLCHAIN_PATH}/bin/rustc" --version
|
||||
OUTPUT_VARIABLE _TOOLCHAIN_RAW_VERSION
|
||||
)
|
||||
if (_TOOLCHAIN_RAW_VERSION MATCHES "rustc ([0-9]+)\\.([0-9]+)\\.([0-9]+)(-nightly)?")
|
||||
list(APPEND _DISCOVERED_TOOLCHAINS "${_TOOLCHAIN}")
|
||||
list(APPEND _DISCOVERED_TOOLCHAINS_RUSTC_PATH "${_TOOLCHAIN_PATH}/bin/rustc")
|
||||
list(APPEND _DISCOVERED_TOOLCHAINS_VERSION "${CMAKE_MATCH_1}.${CMAKE_MATCH_2}.${CMAKE_MATCH_3}")
|
||||
|
||||
# We need this variable to determine the default toolchain, since `foreach(... IN ZIP_LISTS ...)`
|
||||
# requires CMake 3.17. As a workaround we define this variable to lookup the version when iterating
|
||||
# through the `_DISCOVERED_TOOLCHAINS` lists.
|
||||
set(_TOOLCHAIN_${_TOOLCHAIN}_VERSION "${CMAKE_MATCH_1}.${CMAKE_MATCH_2}.${CMAKE_MATCH_3}")
|
||||
if(CMAKE_MATCH_4)
|
||||
set(_TOOLCHAIN_${_TOOLCHAIN}_IS_NIGHTLY "TRUE")
|
||||
else()
|
||||
set(_TOOLCHAIN_${_TOOLCHAIN}_IS_NIGHTLY "FALSE")
|
||||
endif()
|
||||
if(EXISTS "${_TOOLCHAIN_PATH}/bin/cargo")
|
||||
list(APPEND _DISCOVERED_TOOLCHAINS_CARGO_PATH "${_TOOLCHAIN_PATH}/bin/cargo")
|
||||
else()
|
||||
list(APPEND _DISCOVERED_TOOLCHAINS_CARGO_PATH "NOTFOUND")
|
||||
endif()
|
||||
else()
|
||||
message(AUTHOR_WARNING "Unexpected output from `rustc --version` for Toolchain `${_TOOLCHAIN}`: "
|
||||
"`${_TOOLCHAIN_RAW_VERSION}`.\n"
|
||||
"Ignoring this toolchain."
|
||||
)
|
||||
endif()
|
||||
else()
|
||||
message(AUTHOR_WARNING "Didn't recognize toolchain: ${_TOOLCHAIN_RAW}. Ignoring this toolchain.\n"
|
||||
"Rustup toolchain list output( `${Rust_RUSTUP} toolchain list --verbose`):\n"
|
||||
"${_TOOLCHAINS_RAW}"
|
||||
)
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
# Expose a list of available rustup toolchains.
|
||||
list(LENGTH _DISCOVERED_TOOLCHAINS _toolchain_len)
|
||||
list(LENGTH _DISCOVERED_TOOLCHAINS_RUSTC_PATH _toolchain_rustc_len)
|
||||
list(LENGTH _DISCOVERED_TOOLCHAINS_CARGO_PATH _toolchain_cargo_len)
|
||||
list(LENGTH _DISCOVERED_TOOLCHAINS_VERSION _toolchain_version_len)
|
||||
if(NOT
|
||||
(_toolchain_len EQUAL _toolchain_rustc_len
|
||||
AND _toolchain_cargo_len EQUAL _toolchain_version_len
|
||||
AND _toolchain_len EQUAL _toolchain_cargo_len)
|
||||
)
|
||||
message(FATAL_ERROR "Internal error - list length mismatch."
|
||||
"List lengths: ${_toolchain_len} toolchains, ${_toolchain_rustc_len} rustc, ${_toolchain_cargo_len} cargo,"
|
||||
" ${_toolchain_version_len} version. The lengths should be the same."
|
||||
)
|
||||
endif()
|
||||
|
||||
set(Rust_RUSTUP_TOOLCHAINS CACHE INTERNAL "List of available Rustup toolchains" "${_DISCOVERED_TOOLCHAINS}")
|
||||
set(Rust_RUSTUP_TOOLCHAINS_RUSTC_PATH
|
||||
CACHE INTERNAL
|
||||
"List of the rustc paths corresponding to the toolchain at the same index in `Rust_RUSTUP_TOOLCHAINS`."
|
||||
"${_DISCOVERED_TOOLCHAINS_RUSTC_PATH}"
|
||||
)
|
||||
set(Rust_RUSTUP_TOOLCHAINS_CARGO_PATH
|
||||
CACHE INTERNAL
|
||||
"List of the cargo paths corresponding to the toolchain at the same index in `Rust_RUSTUP_TOOLCHAINS`. \
|
||||
May also be `NOTFOUND` if the toolchain does not have a cargo executable."
|
||||
"${_DISCOVERED_TOOLCHAINS_CARGO_PATH}"
|
||||
)
|
||||
set(Rust_RUSTUP_TOOLCHAINS_VERSION
|
||||
CACHE INTERNAL
|
||||
"List of the rust toolchain version corresponding to the toolchain at the same index in \
|
||||
`Rust_RUSTUP_TOOLCHAINS`."
|
||||
"${_DISCOVERED_TOOLCHAINS_VERSION}"
|
||||
)
|
||||
|
||||
# Rust_TOOLCHAIN is preferred over a requested version if it is set.
|
||||
if (NOT DEFINED Rust_TOOLCHAIN)
|
||||
if (NOT DEFINED _TOOLCHAIN_OVERRIDE)
|
||||
set(_TOOLCHAIN_SELECTED "${_TOOLCHAIN_DEFAULT}")
|
||||
else()
|
||||
set(_TOOLCHAIN_SELECTED "${_TOOLCHAIN_OVERRIDE}")
|
||||
endif()
|
||||
# Check default toolchain first.
|
||||
_findrust_version_ok("_TOOLCHAIN_${_TOOLCHAIN_SELECTED}_VERSION" _VERSION_OK)
|
||||
if(NOT "${_VERSION_OK}")
|
||||
foreach(_TOOLCHAIN "${_DISCOVERED_TOOLCHAINS}")
|
||||
_findrust_version_ok("_TOOLCHAIN_${_TOOLCHAIN}_VERSION" _VERSION_OK)
|
||||
if("${_VERSION_OK}")
|
||||
set(_TOOLCHAIN_SELECTED "${_TOOLCHAIN}")
|
||||
break()
|
||||
endif()
|
||||
endforeach()
|
||||
# Check if we found a suitable version in the for loop.
|
||||
if(NOT "${_VERSION_OK}")
|
||||
string(REPLACE ";" "\n" _DISCOVERED_TOOLCHAINS "${_DISCOVERED_TOOLCHAINS}")
|
||||
_findrust_failed("Failed to find a Rust toolchain matching the version requirements of "
|
||||
"${Rust_FIND_VERSION}. Available toolchains: ${_DISCOVERED_TOOLCHAINS}")
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(Rust_TOOLCHAIN "${_TOOLCHAIN_SELECTED}" CACHE STRING "The rustup toolchain to use")
|
||||
set_property(CACHE Rust_TOOLCHAIN PROPERTY STRINGS "${_DISCOVERED_TOOLCHAINS}")
|
||||
|
||||
if(NOT Rust_FIND_QUIETLY)
|
||||
message(STATUS "Rust Toolchain: ${Rust_TOOLCHAIN}")
|
||||
endif()
|
||||
|
||||
if (NOT Rust_TOOLCHAIN IN_LIST _DISCOVERED_TOOLCHAINS)
|
||||
# If the precise toolchain wasn't found, try appending the default host
|
||||
execute_process(
|
||||
COMMAND
|
||||
"${Rust_RUSTUP}" show
|
||||
RESULT_VARIABLE _SHOW_RESULT
|
||||
OUTPUT_VARIABLE _SHOW_RAW
|
||||
)
|
||||
if(NOT "${_SHOW_RESULT}" EQUAL "0")
|
||||
_findrust_failed("Command `${Rust_RUSTUP} show` failed")
|
||||
endif()
|
||||
|
||||
if (_SHOW_RAW MATCHES "Default host: ([a-zA-Z0-9_\\-]*)\n")
|
||||
set(_DEFAULT_HOST "${CMAKE_MATCH_1}")
|
||||
else()
|
||||
_findrust_failed("Failed to parse \"Default host\" from `${Rust_RUSTUP} show`. Got: ${_SHOW_RAW}")
|
||||
endif()
|
||||
|
||||
if (NOT "${Rust_TOOLCHAIN}-${_DEFAULT_HOST}" IN_LIST _DISCOVERED_TOOLCHAINS)
|
||||
set(_NOT_FOUND_MESSAGE "Could not find toolchain '${Rust_TOOLCHAIN}'\n"
|
||||
"Available toolchains:\n"
|
||||
)
|
||||
foreach(_TOOLCHAIN ${_DISCOVERED_TOOLCHAINS})
|
||||
list(APPEND _NOT_FOUND_MESSAGE " `${_TOOLCHAIN}`\n")
|
||||
endforeach()
|
||||
_findrust_failed(${_NOT_FOUND_MESSAGE})
|
||||
endif()
|
||||
|
||||
set(_RUSTUP_TOOLCHAIN_FULL "${Rust_TOOLCHAIN}-${_DEFAULT_HOST}")
|
||||
else()
|
||||
set(_RUSTUP_TOOLCHAIN_FULL "${Rust_TOOLCHAIN}")
|
||||
endif()
|
||||
|
||||
set(_RUST_TOOLCHAIN_PATH "${_TOOLCHAIN_${_RUSTUP_TOOLCHAIN_FULL}_PATH}")
|
||||
if(NOT "${Rust_FIND_QUIETLY}")
|
||||
message(VERBOSE "Rust toolchain ${_RUSTUP_TOOLCHAIN_FULL}")
|
||||
message(VERBOSE "Rust toolchain path ${_RUST_TOOLCHAIN_PATH}")
|
||||
endif()
|
||||
|
||||
# Is overridden if the user specifies `Rust_COMPILER` explicitly.
|
||||
find_program(
|
||||
Rust_COMPILER_CACHED
|
||||
rustc
|
||||
HINTS "${_RUST_TOOLCHAIN_PATH}/bin"
|
||||
NO_DEFAULT_PATH)
|
||||
elseif (Rust_RUSTUP)
|
||||
get_filename_component(_RUST_TOOLCHAIN_PATH "${Rust_RUSTUP}" DIRECTORY)
|
||||
get_filename_component(_RUST_TOOLCHAIN_PATH "${_RUST_TOOLCHAIN_PATH}" DIRECTORY)
|
||||
find_program(
|
||||
Rust_COMPILER_CACHED
|
||||
rustc
|
||||
HINTS "${_RUST_TOOLCHAIN_PATH}/bin"
|
||||
NO_DEFAULT_PATH)
|
||||
else()
|
||||
find_program(Rust_COMPILER_CACHED rustc)
|
||||
if (EXISTS "${Rust_COMPILER_CACHED}")
|
||||
# rustc is expected to be at `<toolchain_path>/bin/rustc`.
|
||||
get_filename_component(_RUST_TOOLCHAIN_PATH "${Rust_COMPILER_CACHED}" DIRECTORY)
|
||||
get_filename_component(_RUST_TOOLCHAIN_PATH "${_RUST_TOOLCHAIN_PATH}" DIRECTORY)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${Rust_COMPILER_CACHED}")
|
||||
message(FATAL_ERROR "The rustc executable ${Rust_COMPILER} was not found. "
|
||||
"Consider setting `Rust_COMPILER` to the absolute path of `rustc`."
|
||||
set(_NOT_FOUND_MESSAGE "The rustc executable was not found. "
|
||||
"Rust not installed or ~/.cargo/bin not added to path?\n"
|
||||
"Hint: Consider setting `Rust_COMPILER` to the absolute path of `rustc`."
|
||||
)
|
||||
_findrust_failed(${_NOT_FOUND_MESSAGE})
|
||||
endif()
|
||||
|
||||
if (Rust_RESOLVE_RUSTUP_TOOLCHAINS)
|
||||
set(_NOT_FOUND_MESSAGE "Rust was detected to be managed by rustup, but failed to find `cargo` "
|
||||
"next to `rustc` in `${_RUST_TOOLCHAIN_PATH}/bin`. This can happen for custom toolchains, "
|
||||
"if cargo was not built. "
|
||||
"Please manually specify the path to a compatible `cargo` by setting `Rust_CARGO`."
|
||||
)
|
||||
find_program(
|
||||
Rust_CARGO_CACHED
|
||||
cargo
|
||||
HINTS "${_RUST_TOOLCHAIN_PATH}/bin"
|
||||
NO_DEFAULT_PATH
|
||||
)
|
||||
# note: maybe can use find_package_handle_standard_args here, if we remove the _CACHED postfix.
|
||||
# not sure why that is here...
|
||||
if(NOT EXISTS "${Rust_CARGO_CACHED}")
|
||||
_findrust_failed(${_NOT_FOUND_MESSAGE})
|
||||
endif()
|
||||
set(Rust_TOOLCHAIN_IS_RUSTUP_MANAGED TRUE CACHE INTERNAL "" FORCE)
|
||||
else()
|
||||
set(_NOT_FOUND_MESSAGE "Failed to find `cargo` in PATH and `${_RUST_TOOLCHAIN_PATH}/bin`.\n"
|
||||
"Please ensure cargo is in PATH or manually specify the path to a compatible `cargo` by "
|
||||
"setting `Rust_CARGO`."
|
||||
)
|
||||
# On some systems (e.g. NixOS) cargo is not managed by rustup and also not next to rustc.
|
||||
find_program(
|
||||
Rust_CARGO_CACHED
|
||||
cargo
|
||||
HINTS "${_RUST_TOOLCHAIN_PATH}/bin"
|
||||
)
|
||||
# note: maybe can use find_package_handle_standard_args here, if we remove the _CACHED postfix.
|
||||
# not sure why that is here...
|
||||
if(NOT EXISTS "${Rust_CARGO_CACHED}")
|
||||
_findrust_failed(${_NOT_FOUND_MESSAGE})
|
||||
endif()
|
||||
endif()
|
||||
|
||||
execute_process(
|
||||
COMMAND "${Rust_CARGO_CACHED}" --version --verbose
|
||||
OUTPUT_VARIABLE _CARGO_VERSION_RAW
|
||||
RESULT_VARIABLE _CARGO_VERSION_RESULT
|
||||
)
|
||||
# todo: check if cargo is a required component!
|
||||
if(NOT ( "${_CARGO_VERSION_RESULT}" EQUAL "0" ))
|
||||
_findrust_failed("Failed to get cargo version.\n"
|
||||
"`${Rust_CARGO_CACHED} --version` failed with error: `${_CARGO_VERSION_RESULT}"
|
||||
)
|
||||
endif()
|
||||
|
||||
# todo: don't set cache variables here, but let find_package_handle_standard_args do the promotion
|
||||
# later.
|
||||
if (_CARGO_VERSION_RAW MATCHES "cargo ([0-9]+)\\.([0-9]+)\\.([0-9]+)")
|
||||
set(Rust_CARGO_VERSION_MAJOR "${CMAKE_MATCH_1}" CACHE INTERNAL "" FORCE)
|
||||
set(Rust_CARGO_VERSION_MINOR "${CMAKE_MATCH_2}" CACHE INTERNAL "" FORCE)
|
||||
set(Rust_CARGO_VERSION_PATCH "${CMAKE_MATCH_3}" CACHE INTERNAL "" FORCE)
|
||||
set(Rust_CARGO_VERSION "${Rust_CARGO_VERSION_MAJOR}.${Rust_CARGO_VERSION_MINOR}.${Rust_CARGO_VERSION_PATCH}" CACHE INTERNAL "" FORCE)
|
||||
# Workaround for the version strings where the `cargo ` prefix is missing.
|
||||
elseif(_CARGO_VERSION_RAW MATCHES "([0-9]+)\\.([0-9]+)\\.([0-9]+)")
|
||||
set(Rust_CARGO_VERSION_MAJOR "${CMAKE_MATCH_1}" CACHE INTERNAL "" FORCE)
|
||||
set(Rust_CARGO_VERSION_MINOR "${CMAKE_MATCH_2}" CACHE INTERNAL "" FORCE)
|
||||
set(Rust_CARGO_VERSION_PATCH "${CMAKE_MATCH_3}" CACHE INTERNAL "" FORCE)
|
||||
set(Rust_CARGO_VERSION "${Rust_CARGO_VERSION_MAJOR}.${Rust_CARGO_VERSION_MINOR}.${Rust_CARGO_VERSION_PATCH}" CACHE INTERNAL "" FORCE)
|
||||
else()
|
||||
_findrust_failed(
|
||||
"Failed to parse cargo version. `cargo --version` evaluated to (${_CARGO_VERSION_RAW}). "
|
||||
"Expected a <Major>.<Minor>.<Patch> version triple."
|
||||
)
|
||||
endif()
|
||||
|
||||
# Figure out the target by just using the host target.
|
||||
# If you want to cross-compile, you'll have to set Rust_CARGO_TARGET
|
||||
if(NOT Rust_CARGO_TARGET_CACHED)
|
||||
execute_process(
|
||||
execute_process(
|
||||
COMMAND "${Rust_COMPILER_CACHED}" --version --verbose
|
||||
OUTPUT_VARIABLE _RUSTC_VERSION_RAW
|
||||
RESULT_VARIABLE _RUSTC_VERSION_RESULT
|
||||
)
|
||||
)
|
||||
|
||||
if(NOT ( "${_RUSTC_VERSION_RESULT}" EQUAL "0" ))
|
||||
message(FATAL_ERROR "Failed to get rustc version.\n"
|
||||
"${Rust_COMPILER} --version failed with error: `${_RUSTC_VERSION_RESULT}`")
|
||||
endif()
|
||||
if(NOT ( "${_RUSTC_VERSION_RESULT}" EQUAL "0" ))
|
||||
_findrust_failed("Failed to get rustc version.\n"
|
||||
"${Rust_COMPILER_CACHED} --version failed with error: `${_RUSTC_VERSION_RESULT}`")
|
||||
endif()
|
||||
|
||||
if (_RUSTC_VERSION_RAW MATCHES "host: ([a-zA-Z0-9_\\-]*)\n")
|
||||
if (_RUSTC_VERSION_RAW MATCHES "rustc ([0-9]+)\\.([0-9]+)\\.([0-9]+)(-nightly)?")
|
||||
set(Rust_VERSION_MAJOR "${CMAKE_MATCH_1}" CACHE INTERNAL "" FORCE)
|
||||
set(Rust_VERSION_MINOR "${CMAKE_MATCH_2}" CACHE INTERNAL "" FORCE)
|
||||
set(Rust_VERSION_PATCH "${CMAKE_MATCH_3}" CACHE INTERNAL "" FORCE)
|
||||
set(Rust_VERSION "${Rust_VERSION_MAJOR}.${Rust_VERSION_MINOR}.${Rust_VERSION_PATCH}" CACHE INTERNAL "" FORCE)
|
||||
if(CMAKE_MATCH_4)
|
||||
set(Rust_IS_NIGHTLY 1 CACHE INTERNAL "" FORCE)
|
||||
else()
|
||||
set(Rust_IS_NIGHTLY 0 CACHE INTERNAL "" FORCE)
|
||||
endif()
|
||||
else()
|
||||
_findrust_failed("Failed to parse rustc version. `${Rust_COMPILER_CACHED} --version --verbose` "
|
||||
"evaluated to:\n`${_RUSTC_VERSION_RAW}`"
|
||||
)
|
||||
endif()
|
||||
|
||||
if (_RUSTC_VERSION_RAW MATCHES "host: ([a-zA-Z0-9_\\-]*)\n")
|
||||
set(Rust_DEFAULT_HOST_TARGET "${CMAKE_MATCH_1}")
|
||||
else()
|
||||
message(FATAL_ERROR
|
||||
"Failed to parse rustc host target. `rustc --version --verbose` evaluated to:\n${_RUSTC_VERSION_RAW}"
|
||||
set(Rust_CARGO_HOST_TARGET_CACHED "${Rust_DEFAULT_HOST_TARGET}" CACHE STRING "Host triple")
|
||||
else()
|
||||
_findrust_failed(
|
||||
"Failed to parse rustc host target. `rustc --version --verbose` evaluated to:\n${_RUSTC_VERSION_RAW}"
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(CMAKE_CROSSCOMPILING)
|
||||
message(FATAL_ERROR "CMake is in cross-compiling mode."
|
||||
"Manually set `Rust_CARGO_TARGET`."
|
||||
if (_RUSTC_VERSION_RAW MATCHES "LLVM version: ([0-9]+)\\.([0-9]+)(\\.([0-9]+))?")
|
||||
set(Rust_LLVM_VERSION_MAJOR "${CMAKE_MATCH_1}" CACHE INTERNAL "" FORCE)
|
||||
set(Rust_LLVM_VERSION_MINOR "${CMAKE_MATCH_2}" CACHE INTERNAL "" FORCE)
|
||||
# With the Rust toolchain 1.44.1 the reported LLVM version is 9.0, i.e. without a patch version.
|
||||
# Since cmake regex does not support non-capturing groups, just ignore Match 3.
|
||||
set(Rust_LLVM_VERSION_PATCH "${CMAKE_MATCH_4}" CACHE INTERNAL "" FORCE)
|
||||
set(Rust_LLVM_VERSION "${Rust_LLVM_VERSION_MAJOR}.${Rust_LLVM_VERSION_MINOR}.${Rust_LLVM_VERSION_PATCH}" CACHE INTERNAL "" FORCE)
|
||||
elseif(NOT Rust_FIND_QUIETLY)
|
||||
message(
|
||||
WARNING
|
||||
"Failed to parse rustc LLVM version. `rustc --version --verbose` evaluated to:\n${_RUSTC_VERSION_RAW}"
|
||||
)
|
||||
endif()
|
||||
set(Rust_CARGO_TARGET_CACHED "${Rust_DEFAULT_HOST_TARGET}" CACHE STRING "Target triple")
|
||||
endif()
|
||||
|
||||
if (NOT Rust_CARGO_TARGET_CACHED)
|
||||
unset(_CARGO_ARCH)
|
||||
unset(_CARGO_ABI)
|
||||
if (WIN32)
|
||||
if (CMAKE_VS_PLATFORM_NAME)
|
||||
string(TOLOWER "${CMAKE_VS_PLATFORM_NAME}" LOWER_VS_PLATFORM_NAME)
|
||||
if ("${LOWER_VS_PLATFORM_NAME}" STREQUAL "win32")
|
||||
set(_CARGO_ARCH i686)
|
||||
elseif("${LOWER_VS_PLATFORM_NAME}" STREQUAL "x64")
|
||||
set(_CARGO_ARCH x86_64)
|
||||
elseif("${LOWER_VS_PLATFORM_NAME}" STREQUAL "arm64")
|
||||
set(_CARGO_ARCH aarch64)
|
||||
else()
|
||||
message(WARNING "VS Platform '${CMAKE_VS_PLATFORM_NAME}' not recognized")
|
||||
endif()
|
||||
endif()
|
||||
# Fallback path
|
||||
if(NOT DEFINED _CARGO_ARCH)
|
||||
# Possible values for windows when not cross-compiling taken from here:
|
||||
# https://learn.microsoft.com/en-us/windows/win32/winprog64/wow64-implementation-details
|
||||
# When cross-compiling the user is expected to supply the value, so we match more variants.
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "^(AMD64|amd64|x86_64)$")
|
||||
set(_CARGO_ARCH x86_64)
|
||||
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^(ARM64|arm64|aarch64)$")
|
||||
set(_CARGO_ARCH aarch64)
|
||||
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^(X86|x86|i686)$")
|
||||
set(_CARGO_ARCH i686)
|
||||
elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "i586")
|
||||
set(_CARGO_ARCH i586)
|
||||
elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "IA64")
|
||||
message(FATAL_ERROR "No rust target for Intel Itanium.")
|
||||
elseif(NOT "${CMAKE_SYSTEM_PROCESSOR}")
|
||||
message(WARNING "Failed to detect target architecture. Please set `CMAKE_SYSTEM_PROCESSOR`"
|
||||
" to your target architecture or set `Rust_CARGO_TARGET` to your cargo target triple."
|
||||
)
|
||||
else()
|
||||
message(WARNING "Failed to detect target architecture. Please set "
|
||||
"`Rust_CARGO_TARGET` to your cargo target triple."
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(_CARGO_VENDOR "pc-windows")
|
||||
|
||||
# The MSVC Generators will always target the msvc ABI.
|
||||
# For other generators we check the compiler ID and compiler target (if present)
|
||||
# If no compiler is set and we are not cross-compiling then we just choose the
|
||||
# default rust host target.
|
||||
if(DEFINED MSVC
|
||||
OR "${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC"
|
||||
OR "${CMAKE_C_COMPILER_ID}" STREQUAL "MSVC"
|
||||
OR "${CMAKE_CXX_COMPILER_TARGET}" MATCHES "-msvc$"
|
||||
OR "${CMAKE_C_COMPILER_TARGET}" MATCHES "-msvc$"
|
||||
)
|
||||
set(_CARGO_ABI msvc)
|
||||
elseif("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU"
|
||||
OR "${CMAKE_C_COMPILER_ID}" STREQUAL "GNU"
|
||||
OR "${CMAKE_CXX_COMPILER_TARGET}" MATCHES "-gnu$"
|
||||
OR "${CMAKE_C_COMPILER_TARGET}" MATCHES "-gnu$"
|
||||
OR (NOT CMAKE_CROSSCOMPILING AND "${Rust_DEFAULT_HOST_TARGET}" MATCHES "-gnu$")
|
||||
)
|
||||
set(_CARGO_ABI gnu)
|
||||
elseif(NOT "${CMAKE_CROSSCOMPILING}" AND "${Rust_DEFAULT_HOST_TARGET}" MATCHES "-msvc$")
|
||||
# We first check if the gnu branch matches to ensure this fallback is only used
|
||||
# if no compiler is enabled.
|
||||
set(_CARGO_ABI msvc)
|
||||
else()
|
||||
message(WARNING "Could not determine the target ABI. Please specify `Rust_CARGO_TARGET` manually.")
|
||||
endif()
|
||||
|
||||
if(DEFINED _CARGO_ARCH AND DEFINED _CARGO_VENDOR AND DEFINED _CARGO_ABI)
|
||||
set(Rust_CARGO_TARGET_CACHED "${_CARGO_ARCH}-${_CARGO_VENDOR}-${_CARGO_ABI}"
|
||||
CACHE STRING "Target triple")
|
||||
endif()
|
||||
elseif (ANDROID)
|
||||
if (CMAKE_ANDROID_ARCH_ABI STREQUAL armeabi-v7a)
|
||||
if (CMAKE_ANDROID_ARM_MODE)
|
||||
set(_Rust_ANDROID_TARGET armv7-linux-androideabi)
|
||||
else ()
|
||||
set(_Rust_ANDROID_TARGET thumbv7neon-linux-androideabi)
|
||||
endif()
|
||||
elseif (CMAKE_ANDROID_ARCH_ABI STREQUAL arm64-v8a)
|
||||
set(_Rust_ANDROID_TARGET aarch64-linux-android)
|
||||
elseif (CMAKE_ANDROID_ARCH_ABI STREQUAL x86)
|
||||
set(_Rust_ANDROID_TARGET i686-linux-android)
|
||||
elseif (CMAKE_ANDROID_ARCH_ABI STREQUAL x86_64)
|
||||
set(_Rust_ANDROID_TARGET x86_64-linux-android)
|
||||
endif()
|
||||
|
||||
if (_Rust_ANDROID_TARGET)
|
||||
set(Rust_CARGO_TARGET_CACHED "${_Rust_ANDROID_TARGET}" CACHE STRING "Target triple")
|
||||
endif()
|
||||
endif()
|
||||
# Fallback to the default host target
|
||||
if(NOT Rust_CARGO_TARGET_CACHED)
|
||||
if(CMAKE_CROSSCOMPILING)
|
||||
message(WARNING "CMake is in cross-compiling mode, but the cargo target-triple could not be inferred."
|
||||
"Falling back to the default host target. Please consider manually setting `Rust_CARGO_TARGET`."
|
||||
)
|
||||
endif()
|
||||
set(Rust_CARGO_TARGET_CACHED "${Rust_DEFAULT_HOST_TARGET}" CACHE STRING "Target triple")
|
||||
endif()
|
||||
|
||||
message(STATUS "Rust Target: ${Rust_CARGO_TARGET_CACHED}")
|
||||
endif()
|
||||
|
||||
if(Rust_CARGO_TARGET_CACHED STREQUAL Rust_DEFAULT_HOST_TARGET)
|
||||
set(Rust_CROSSCOMPILING FALSE CACHE INTERNAL "Rust is configured for cross-compiling")
|
||||
else()
|
||||
set(Rust_CROSSCOMPILING TRUE CACHE INTERNAL "Rust is configured for cross-compiling")
|
||||
endif()
|
||||
|
||||
_corrosion_parse_target_triple("${Rust_CARGO_TARGET_CACHED}" rust_arch rust_vendor rust_os rust_env)
|
||||
_corrosion_parse_target_triple("${Rust_CARGO_HOST_TARGET_CACHED}" rust_host_arch rust_host_vendor rust_host_os rust_host_env)
|
||||
|
||||
set(Rust_CARGO_TARGET_ARCH "${rust_arch}" CACHE INTERNAL "Target architecture")
|
||||
set(Rust_CARGO_TARGET_VENDOR "${rust_vendor}" CACHE INTERNAL "Target vendor")
|
||||
set(Rust_CARGO_TARGET_OS "${rust_os}" CACHE INTERNAL "Target Operating System")
|
||||
set(Rust_CARGO_TARGET_ENV "${rust_env}" CACHE INTERNAL "Target environment")
|
||||
|
||||
set(Rust_CARGO_HOST_ARCH "${rust_host_arch}" CACHE INTERNAL "Host architecture")
|
||||
set(Rust_CARGO_HOST_VENDOR "${rust_host_vendor}" CACHE INTERNAL "Host vendor")
|
||||
set(Rust_CARGO_HOST_OS "${rust_host_os}" CACHE INTERNAL "Host Operating System")
|
||||
set(Rust_CARGO_HOST_ENV "${rust_host_env}" CACHE INTERNAL "Host environment")
|
||||
|
||||
if(NOT DEFINED CACHE{Rust_CARGO_TARGET_LINK_NATIVE_LIBS})
|
||||
message(STATUS "Determining required link libraries for target ${Rust_CARGO_TARGET_CACHED}")
|
||||
unset(required_native_libs)
|
||||
_corrosion_determine_libs_new("${Rust_CARGO_TARGET_CACHED}" required_native_libs)
|
||||
if(DEFINED required_native_libs)
|
||||
message(STATUS "Required static libs for target ${Rust_CARGO_TARGET_CACHED}: ${required_native_libs}" )
|
||||
endif()
|
||||
# In very recent corrosion versions it is possible to override the rust compiler version
|
||||
# per target, so to be totally correct we would need to determine the libraries for
|
||||
# every installed Rust version, that the user could choose from.
|
||||
# In practice there aren't likely going to be any major differences, so we just do it once
|
||||
# for the target and once for the host target (if cross-compiling).
|
||||
set(Rust_CARGO_TARGET_LINK_NATIVE_LIBS "${required_native_libs}" CACHE INTERNAL
|
||||
"Required native libraries when linking Rust static libraries")
|
||||
endif()
|
||||
|
||||
if(Rust_CROSSCOMPILING AND NOT DEFINED CACHE{Rust_CARGO_HOST_TARGET_LINK_NATIVE_LIBS})
|
||||
message(STATUS "Determining required link libraries for target ${Rust_CARGO_HOST_TARGET_CACHED}")
|
||||
unset(host_libs)
|
||||
_corrosion_determine_libs_new("${Rust_CARGO_HOST_TARGET_CACHED}" host_libs)
|
||||
if(DEFINED host_libs)
|
||||
message(STATUS "Required static libs for host target ${Rust_CARGO_HOST_TARGET_CACHED}: ${host_libs}" )
|
||||
endif()
|
||||
set(Rust_CARGO_HOST_TARGET_LINK_NATIVE_LIBS "${host_libs}" CACHE INTERNAL
|
||||
"Required native libraries when linking Rust static libraries for the host target")
|
||||
endif()
|
||||
|
||||
# Set the input variables as non-cache variables so that the variables are available after
|
||||
@@ -82,6 +789,24 @@ endforeach()
|
||||
|
||||
find_package_handle_standard_args(
|
||||
Rust
|
||||
REQUIRED_VARS Rust_COMPILER Rust_CARGO Rust_CARGO_TARGET
|
||||
REQUIRED_VARS Rust_COMPILER Rust_VERSION Rust_CARGO Rust_CARGO_VERSION Rust_CARGO_TARGET Rust_CARGO_HOST_TARGET
|
||||
VERSION_VAR Rust_VERSION
|
||||
)
|
||||
|
||||
|
||||
if(NOT TARGET Rust::Rustc)
|
||||
add_executable(Rust::Rustc IMPORTED GLOBAL)
|
||||
set_property(
|
||||
TARGET Rust::Rustc
|
||||
PROPERTY IMPORTED_LOCATION "${Rust_COMPILER_CACHED}"
|
||||
)
|
||||
|
||||
add_executable(Rust::Cargo IMPORTED GLOBAL)
|
||||
set_property(
|
||||
TARGET Rust::Cargo
|
||||
PROPERTY IMPORTED_LOCATION "${Rust_CARGO_CACHED}"
|
||||
)
|
||||
set(Rust_FOUND true)
|
||||
endif()
|
||||
|
||||
list(POP_BACK CMAKE_MESSAGE_CONTEXT)
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
set(CMAKE_INSTALL_MESSAGE NEVER)
|
||||
|
||||
set(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/fish ${CMAKE_CURRENT_BINARY_DIR}/fish_indent ${CMAKE_CURRENT_BINARY_DIR}/fish_key_reader)
|
||||
|
||||
set(prefix ${CMAKE_INSTALL_PREFIX})
|
||||
set(bindir ${CMAKE_INSTALL_BINDIR})
|
||||
set(sysconfdir ${CMAKE_INSTALL_SYSCONFDIR})
|
||||
@@ -28,19 +30,17 @@ set(extra_confdir
|
||||
|
||||
|
||||
# These are the man pages that go in system manpath; all manpages go in the fish-specific manpath.
|
||||
set(MANUALS ${SPHINX_OUTPUT_DIR}/man/man1/fish.1
|
||||
${SPHINX_OUTPUT_DIR}/man/man1/fish_indent.1
|
||||
${SPHINX_OUTPUT_DIR}/man/man1/fish_key_reader.1
|
||||
${SPHINX_OUTPUT_DIR}/man/man1/fish-doc.1
|
||||
${SPHINX_OUTPUT_DIR}/man/man1/fish-tutorial.1
|
||||
${SPHINX_OUTPUT_DIR}/man/man1/fish-language.1
|
||||
${SPHINX_OUTPUT_DIR}/man/man1/fish-interactive.1
|
||||
${SPHINX_OUTPUT_DIR}/man/man1/fish-terminal-compatibility.1
|
||||
${SPHINX_OUTPUT_DIR}/man/man1/fish-completions.1
|
||||
${SPHINX_OUTPUT_DIR}/man/man1/fish-prompt-tutorial.1
|
||||
${SPHINX_OUTPUT_DIR}/man/man1/fish-for-bash-users.1
|
||||
${SPHINX_OUTPUT_DIR}/man/man1/fish-faq.1
|
||||
)
|
||||
set(MANUALS ${CMAKE_CURRENT_BINARY_DIR}/user_doc/man/man1/fish.1
|
||||
${CMAKE_CURRENT_BINARY_DIR}/user_doc/man/man1/fish_indent.1
|
||||
${CMAKE_CURRENT_BINARY_DIR}/user_doc/man/man1/fish_key_reader.1
|
||||
${CMAKE_CURRENT_BINARY_DIR}/user_doc/man/man1/fish-doc.1
|
||||
${CMAKE_CURRENT_BINARY_DIR}/user_doc/man/man1/fish-tutorial.1
|
||||
${CMAKE_CURRENT_BINARY_DIR}/user_doc/man/man1/fish-language.1
|
||||
${CMAKE_CURRENT_BINARY_DIR}/user_doc/man/man1/fish-interactive.1
|
||||
${CMAKE_CURRENT_BINARY_DIR}/user_doc/man/man1/fish-completions.1
|
||||
${CMAKE_CURRENT_BINARY_DIR}/user_doc/man/man1/fish-prompt-tutorial.1
|
||||
${CMAKE_CURRENT_BINARY_DIR}/user_doc/man/man1/fish-for-bash-users.1
|
||||
${CMAKE_CURRENT_BINARY_DIR}/user_doc/man/man1/fish-faq.1)
|
||||
|
||||
# Determine which man page we don't want to install.
|
||||
# On OS X, don't install a man page for open, since we defeat fish's open
|
||||
@@ -73,30 +73,22 @@ function(FISH_TRY_CREATE_DIRS)
|
||||
endforeach()
|
||||
endfunction(FISH_TRY_CREATE_DIRS)
|
||||
|
||||
install(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/fish
|
||||
install(PROGRAMS ${PROGRAMS}
|
||||
PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ
|
||||
GROUP_EXECUTE WORLD_READ WORLD_EXECUTE
|
||||
DESTINATION ${bindir})
|
||||
|
||||
if(NOT IS_ABSOLUTE ${bindir})
|
||||
set(abs_bindir "\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/${bindir}")
|
||||
else()
|
||||
set(abs_bindir "\$ENV{DESTDIR}${bindir}")
|
||||
endif()
|
||||
install(CODE "file(CREATE_LINK ${abs_bindir}/fish ${abs_bindir}/fish_indent)")
|
||||
install(CODE "file(CREATE_LINK ${abs_bindir}/fish ${abs_bindir}/fish_key_reader)")
|
||||
|
||||
fish_create_dirs(${sysconfdir}/fish/conf.d ${sysconfdir}/fish/completions
|
||||
${sysconfdir}/fish/functions)
|
||||
install(FILES etc/config.fish DESTINATION ${sysconfdir}/fish/)
|
||||
|
||||
fish_create_dirs(${rel_datadir}/fish ${rel_datadir}/fish/completions
|
||||
${rel_datadir}/fish/functions
|
||||
${rel_datadir}/fish/functions ${rel_datadir}/fish/groff
|
||||
${rel_datadir}/fish/man/man1 ${rel_datadir}/fish/tools
|
||||
${rel_datadir}/fish/tools/web_config
|
||||
${rel_datadir}/fish/tools/web_config/js
|
||||
${rel_datadir}/fish/prompts
|
||||
${rel_datadir}/fish/themes
|
||||
${rel_datadir}/fish/tools/web_config/sample_prompts
|
||||
${rel_datadir}/fish/tools/web_config/themes
|
||||
)
|
||||
|
||||
configure_file(share/__fish_build_paths.fish.in share/__fish_build_paths.fish)
|
||||
@@ -114,9 +106,9 @@ configure_file(fish.pc.in fish.pc.noversion @ONLY)
|
||||
add_custom_command(OUTPUT fish.pc
|
||||
COMMAND sed '/Version/d' fish.pc.noversion > fish.pc
|
||||
COMMAND printf "Version: " >> fish.pc
|
||||
COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/build_tools/git_version_gen.sh >> fish.pc
|
||||
COMMAND sed 's/FISH_BUILD_VERSION=//\;s/\"//g' ${FBVF} >> fish.pc
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
|
||||
DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/fish.pc.noversion)
|
||||
DEPENDS CHECK-FISH-BUILD-VERSION-FILE ${CMAKE_CURRENT_BINARY_DIR}/fish.pc.noversion)
|
||||
|
||||
add_custom_target(build_fish_pc ALL DEPENDS fish.pc)
|
||||
|
||||
@@ -131,23 +123,18 @@ install(DIRECTORY share/functions/
|
||||
DESTINATION ${rel_datadir}/fish/functions
|
||||
FILES_MATCHING PATTERN "*.fish")
|
||||
|
||||
install(DIRECTORY share/prompts/
|
||||
DESTINATION ${rel_datadir}/fish/prompts
|
||||
FILES_MATCHING PATTERN "*.fish")
|
||||
|
||||
install(DIRECTORY share/themes/
|
||||
DESTINATION ${rel_datadir}/fish/themes
|
||||
FILES_MATCHING PATTERN "*.theme")
|
||||
install(DIRECTORY share/groff
|
||||
DESTINATION ${rel_datadir}/fish)
|
||||
|
||||
# CONDEMNED_PAGE is managed by the conditional above
|
||||
# Building the man pages is optional: if sphinx isn't installed, they're not built
|
||||
install(DIRECTORY ${SPHINX_OUTPUT_DIR}/man/man1/
|
||||
install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/user_doc/man/man1/
|
||||
DESTINATION ${rel_datadir}/fish/man/man1
|
||||
FILES_MATCHING
|
||||
PATTERN "*.1"
|
||||
PATTERN ${CONDEMNED_PAGE} EXCLUDE)
|
||||
|
||||
install(PROGRAMS share/tools/create_manpage_completions.py
|
||||
install(PROGRAMS share/tools/create_manpage_completions.py share/tools/deroff.py
|
||||
DESTINATION ${rel_datadir}/fish/tools/)
|
||||
|
||||
install(DIRECTORY share/tools/web_config
|
||||
@@ -157,16 +144,34 @@ install(DIRECTORY share/tools/web_config
|
||||
PATTERN "*.css"
|
||||
PATTERN "*.html"
|
||||
PATTERN "*.py"
|
||||
PATTERN "*.js")
|
||||
PATTERN "*.js"
|
||||
PATTERN "*.theme"
|
||||
PATTERN "*.fish")
|
||||
|
||||
# Building the man pages is optional: if Sphinx isn't installed, they're not built
|
||||
install(FILES ${MANUALS} DESTINATION ${mandir}/man1/ OPTIONAL)
|
||||
install(DIRECTORY ${SPHINX_OUTPUT_DIR}/html/ # Trailing slash is important!
|
||||
install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/user_doc/html/ # Trailing slash is important!
|
||||
DESTINATION ${docdir} OPTIONAL)
|
||||
install(FILES CHANGELOG.rst DESTINATION ${docdir})
|
||||
|
||||
# These files are built by cmake/gettext.cmake, but using GETTEXT_PROCESS_PO_FILES's
|
||||
# INSTALL_DESTINATION leads to them being installed as ${lang}.gmo, not fish.mo
|
||||
# The ${languages} array comes from cmake/gettext.cmake
|
||||
if(GETTEXT_FOUND)
|
||||
foreach(lang ${languages})
|
||||
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/${lang}.gmo DESTINATION
|
||||
${CMAKE_INSTALL_LOCALEDIR}/${lang}/LC_MESSAGES/ RENAME fish.mo)
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
if (NOT APPLE)
|
||||
install(FILES fish.desktop DESTINATION ${rel_datadir}/applications)
|
||||
install(FILES ${SPHINX_SRC_DIR}/python_docs_theme/static/fish.png DESTINATION ${rel_datadir}/pixmaps)
|
||||
endif()
|
||||
|
||||
# Group install targets into a InstallTargets folder
|
||||
set_property(TARGET build_fish_pc
|
||||
set_property(TARGET build_fish_pc CHECK-FISH-BUILD-VERSION-FILE
|
||||
tests_buildroot_target
|
||||
PROPERTY FOLDER cmake/InstallTargets)
|
||||
|
||||
# Make a target build_root that installs into the buildroot directory, for testing.
|
||||
|
||||
@@ -24,15 +24,15 @@ add_executable(fish_macapp EXCLUDE_FROM_ALL
|
||||
|
||||
# Compute the version. Note this is done at generation time, not build time,
|
||||
# so cmake must be re-run after version changes for the app to be updated. But
|
||||
# generally this will be run by make_macos_pkg.sh which always re-runs cmake.
|
||||
# generally this will be run by make_pkg.sh which always re-runs cmake.
|
||||
execute_process(
|
||||
COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/build_tools/git_version_gen.sh
|
||||
COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/build_tools/git_version_gen.sh --stdout
|
||||
COMMAND cut -d- -f1
|
||||
OUTPUT_VARIABLE FISH_SHORT_VERSION
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
|
||||
|
||||
# Note CMake appends .app, so the real output name will be fish.app.
|
||||
# Note CMake appends .app, so the real output name will be fish.app.
|
||||
# This target does not include the 'base' resource.
|
||||
set_target_properties(fish_macapp PROPERTIES OUTPUT_NAME "fish")
|
||||
|
||||
|
||||
@@ -1,15 +1,19 @@
|
||||
include(FeatureSummary)
|
||||
# Trying to build using the resolved toolchain causes all kinds of weird errors
|
||||
# Just let rustup do its job
|
||||
set(Rust_RESOLVE_RUSTUP_TOOLCHAINS Off)
|
||||
|
||||
include(FindRust)
|
||||
find_package(Rust REQUIRED)
|
||||
|
||||
set(FISH_RUST_BUILD_DIR "${CMAKE_BINARY_DIR}/cargo")
|
||||
set(FISH_RUST_BUILD_DIR "${CMAKE_BINARY_DIR}/cargo/build")
|
||||
|
||||
if(DEFINED ASAN)
|
||||
list(APPEND CARGO_FLAGS "-Z" "build-std")
|
||||
list(APPEND FISH_CRATE_FEATURES "asan")
|
||||
endif()
|
||||
if(DEFINED TSAN)
|
||||
list(APPEND CARGO_FLAGS "-Z" "build-std")
|
||||
list(APPEND FISH_CARGO_FEATURES_LIST "tsan")
|
||||
list(APPEND FISH_CRATE_FEATURES "tsan")
|
||||
endif()
|
||||
|
||||
if (Rust_CARGO_TARGET)
|
||||
@@ -18,19 +22,40 @@ else()
|
||||
set(rust_target_dir "${FISH_RUST_BUILD_DIR}/${Rust_CARGO_HOST_TARGET}")
|
||||
endif()
|
||||
|
||||
set(rust_profile $<IF:$<CONFIG:Debug>,debug,$<IF:$<CONFIG:RelWithDebInfo>,release-with-debug,release>>)
|
||||
set(rust_profile $<IF:$<CONFIG:Debug>,debug,release>)
|
||||
set(rust_debugflags "$<$<CONFIG:Debug>:-g>$<$<CONFIG:RelWithDebInfo>:-g>")
|
||||
|
||||
if (NOT DEFINED WITH_MESSAGE_LOCALIZATION) # Don't check for legacy options if the new one is defined, to help bisecting.
|
||||
if(DEFINED WITH_GETTEXT)
|
||||
message(FATAL_ERROR "the WITH_GETTEXT option is no longer supported, use -DWITH_MESSAGE_LOCALIZATION=ON|OFF")
|
||||
endif()
|
||||
endif()
|
||||
option(WITH_MESSAGE_LOCALIZATION "Build with localization support. Requires `msgfmt` to work." ON)
|
||||
# Enable gettext feature unless explicitly disabled.
|
||||
if(NOT DEFINED WITH_MESSAGE_LOCALIZATION OR "${WITH_MESSAGE_LOCALIZATION}")
|
||||
list(APPEND FISH_CARGO_FEATURES_LIST "localize-messages")
|
||||
|
||||
# Temporary hack to propogate CMake flags/options to build.rs. We need to get CMake to evaluate the
|
||||
# truthiness of the strings if they are set.
|
||||
set(CMAKE_WITH_GETTEXT "1")
|
||||
if(DEFINED WITH_GETTEXT AND NOT "${WITH_GETTEXT}")
|
||||
set(CMAKE_WITH_GETTEXT "0")
|
||||
endif()
|
||||
|
||||
add_feature_info(Translation WITH_MESSAGE_LOCALIZATION "message localization (requires gettext)")
|
||||
if(FISH_CRATE_FEATURES)
|
||||
set(FEATURES_ARG ${FISH_CRATE_FEATURES})
|
||||
list(PREPEND FEATURES_ARG "--features")
|
||||
endif()
|
||||
|
||||
list(JOIN FISH_CARGO_FEATURES_LIST , FISH_CARGO_FEATURES)
|
||||
get_property(
|
||||
RUSTC_EXECUTABLE
|
||||
TARGET Rust::Rustc PROPERTY IMPORTED_LOCATION
|
||||
)
|
||||
|
||||
# Tell Cargo where our build directory is so it can find Cargo.toml.
|
||||
set(VARS_FOR_CARGO
|
||||
"FISH_BUILD_DIR=${CMAKE_BINARY_DIR}"
|
||||
"PREFIX=${CMAKE_INSTALL_PREFIX}"
|
||||
# Temporary hack to propogate CMake flags/options to build.rs.
|
||||
"CMAKE_WITH_GETTEXT=${CMAKE_WITH_GETTEXT}"
|
||||
"DOCDIR=${CMAKE_INSTALL_FULL_DOCDIR}"
|
||||
"DATADIR=${CMAKE_INSTALL_FULL_DATADIR}"
|
||||
"SYSCONFDIR=${CMAKE_INSTALL_FULL_SYSCONFDIR}"
|
||||
"BINDIR=${CMAKE_INSTALL_FULL_BINDIR}"
|
||||
"LOCALEDIR=${CMAKE_INSTALL_FULL_LOCALEDIR}"
|
||||
"CARGO_TARGET_DIR=${FISH_RUST_BUILD_DIR}"
|
||||
"CARGO_BUILD_RUSTC=${RUSTC_EXECUTABLE}"
|
||||
"${FISH_PCRE2_BUILDFLAG}"
|
||||
"RUSTFLAGS=$ENV{RUSTFLAGS} ${rust_debugflags}"
|
||||
)
|
||||
|
||||
@@ -1,31 +1,136 @@
|
||||
# This adds ctest support to the project
|
||||
enable_testing()
|
||||
|
||||
# Put in a tests folder to reduce the top level targets in IDEs.
|
||||
set(CMAKE_FOLDER tests)
|
||||
|
||||
# We will use 125 as a reserved exit code to indicate that a test has been skipped, i.e. it did not
|
||||
# pass but it should not be considered a failed test run, either.
|
||||
set(SKIP_RETURN_CODE 125)
|
||||
|
||||
# Even though we are using CMake's ctest for testing, we still define our own `make test` target
|
||||
# rather than use its default for many reasons:
|
||||
# * CMake doesn't run tests in-proc or even add each tests as an individual node in the ninja
|
||||
# dependency tree, instead it just bundles all tests into a target called `test` that always just
|
||||
# shells out to `ctest`, so there are no build-related benefits to not doing that ourselves.
|
||||
# * CMake devs insist that it is appropriate for `make test` to never depend on `make all`, i.e.
|
||||
# running `make test` does not require any of the binaries to be built before testing.
|
||||
# * The only way to have a test depend on a binary is to add a fake test with a name like
|
||||
# "build_fish" that executes CMake recursively to build the `fish` target.
|
||||
# * Circling back to the point about individual tests not being actual Makefile targets, CMake does
|
||||
# not offer any way to execute a named test via the `make`/`ninja`/whatever interface; the only
|
||||
# way to manually invoke test `foo` is to to manually run `ctest` and specify a regex matching
|
||||
# `foo` as an argument, e.g. `ctest -R ^foo$`... which is really crazy.
|
||||
|
||||
# The top-level test target is "fish_run_tests".
|
||||
add_custom_target(fish_run_tests
|
||||
COMMAND env FISH_FORCE_COLOR=1
|
||||
FISH_SOURCE_DIR=${CMAKE_SOURCE_DIR}
|
||||
${CMAKE_CTEST_COMMAND} --force-new-ctest-process # --verbose
|
||||
--output-on-failure --progress
|
||||
DEPENDS tests_dir funcs_dir tests_buildroot_target
|
||||
USES_TERMINAL
|
||||
)
|
||||
|
||||
# If CMP0037 is available, also make an alias "test" target.
|
||||
# Note that this policy may not be available, in which case definining such a target silently fails.
|
||||
cmake_policy(PUSH)
|
||||
if(POLICY CMP0037)
|
||||
cmake_policy(SET CMP0037 OLD)
|
||||
add_custom_target(test DEPENDS fish_run_tests)
|
||||
endif()
|
||||
cmake_policy(POP)
|
||||
|
||||
# The "test" directory.
|
||||
set(TEST_DIR ${CMAKE_CURRENT_BINARY_DIR}/test)
|
||||
|
||||
# The directory into which fish is installed.
|
||||
set(TEST_INSTALL_DIR ${TEST_DIR}/buildroot)
|
||||
|
||||
# The directory where the tests expect to find the fish root (./bin, etc)
|
||||
set(TEST_ROOT_DIR ${TEST_DIR}/root)
|
||||
|
||||
# Copy needed directories for out-of-tree builds
|
||||
if(NOT FISH_IN_TREE_BUILD)
|
||||
add_custom_target(funcs_dir)
|
||||
add_custom_command(TARGET funcs_dir
|
||||
COMMAND mkdir -p ${CMAKE_BINARY_DIR}/share
|
||||
# Don't run ln twice or it will create a new link in the link.
|
||||
COMMAND test -e ${CMAKE_BINARY_DIR}/share/functions || ln -sf
|
||||
${CMAKE_SOURCE_DIR}/share/functions/ ${CMAKE_BINARY_DIR}/share/functions
|
||||
COMMENT "Symlinking fish functions to binary dir"
|
||||
VERBATIM)
|
||||
|
||||
add_custom_target(tests_dir DEPENDS tests)
|
||||
add_custom_command(TARGET tests_dir
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory
|
||||
${CMAKE_SOURCE_DIR}/tests/ ${CMAKE_BINARY_DIR}/tests/
|
||||
COMMENT "Copying test files to binary dir"
|
||||
VERBATIM)
|
||||
endif()
|
||||
|
||||
# Copy littlecheck.py
|
||||
configure_file(build_tools/littlecheck.py littlecheck.py COPYONLY)
|
||||
|
||||
# Copy pexpect_helper.py
|
||||
configure_file(build_tools/pexpect_helper.py pexpect_helper.py COPYONLY)
|
||||
|
||||
# Suppress generating Xcode schemes for all tests, there's too many.
|
||||
set(CMAKE_XCODE_GENERATE_SCHEME 0)
|
||||
|
||||
# CMake being CMake, you can't just add a DEPENDS argument to add_test to make it depend on any of
|
||||
# your binaries actually being built before `make test` is executed (requiring `make all` first),
|
||||
# and the only dependency a test can have is on another test. So we make building fish
|
||||
# prerequisites to our entire top-level `test` target.
|
||||
function(add_test_target NAME)
|
||||
string(REPLACE "/" "-" NAME ${NAME})
|
||||
add_custom_target("test_${NAME}" COMMAND ${CMAKE_CTEST_COMMAND} --output-on-failure -R "^${NAME}$$"
|
||||
DEPENDS tests_dir funcs_dir tests_buildroot_target USES_TERMINAL )
|
||||
endfunction()
|
||||
|
||||
add_custom_target(tests_buildroot_target
|
||||
# Make the directory in which to run tests:
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory ${TEST_INSTALL_DIR}
|
||||
COMMAND env DESTDIR=${TEST_INSTALL_DIR} ${CMAKE_COMMAND}
|
||||
--build ${CMAKE_CURRENT_BINARY_DIR} --target install
|
||||
# Put fish_test_helper there too:
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_BINARY_DIR}/fish_test_helper
|
||||
${TEST_INSTALL_DIR}/${CMAKE_INSTALL_PREFIX}/bin
|
||||
# Also symlink fish to where the tests expect it to be:
|
||||
COMMAND ${CMAKE_COMMAND} -E create_symlink
|
||||
${TEST_INSTALL_DIR}/${CMAKE_INSTALL_PREFIX}
|
||||
${TEST_ROOT_DIR}
|
||||
DEPENDS fish fish_test_helper)
|
||||
|
||||
FILE(GLOB FISH_CHECKS CONFIGURE_DEPENDS ${CMAKE_SOURCE_DIR}/tests/checks/*.fish)
|
||||
foreach(CHECK ${FISH_CHECKS})
|
||||
get_filename_component(CHECK_NAME ${CHECK} NAME)
|
||||
add_custom_target(
|
||||
test_${CHECK_NAME}
|
||||
COMMAND ${CMAKE_SOURCE_DIR}/tests/test_driver.py ${CMAKE_CURRENT_BINARY_DIR}
|
||||
checks/${CHECK_NAME}
|
||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/tests
|
||||
DEPENDS fish fish_indent fish_key_reader
|
||||
USES_TERMINAL
|
||||
get_filename_component(CHECK ${CHECK} NAME_WE)
|
||||
add_test(NAME ${CHECK_NAME}
|
||||
COMMAND sh ${CMAKE_CURRENT_BINARY_DIR}/tests/test_driver.sh
|
||||
${CMAKE_CURRENT_BINARY_DIR}/tests/test.fish ${CHECK}
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/tests
|
||||
)
|
||||
set_tests_properties(${CHECK_NAME} PROPERTIES SKIP_RETURN_CODE ${SKIP_RETURN_CODE})
|
||||
set_tests_properties(${CHECK_NAME} PROPERTIES ENVIRONMENT FISH_FORCE_COLOR=1)
|
||||
add_test_target("${CHECK_NAME}")
|
||||
endforeach(CHECK)
|
||||
|
||||
FILE(GLOB PEXPECTS CONFIGURE_DEPENDS ${CMAKE_SOURCE_DIR}/tests/pexpects/*.py)
|
||||
foreach(PEXPECT ${PEXPECTS})
|
||||
get_filename_component(PEXPECT ${PEXPECT} NAME)
|
||||
add_custom_target(
|
||||
test_${PEXPECT}
|
||||
COMMAND ${CMAKE_SOURCE_DIR}/tests/test_driver.py ${CMAKE_CURRENT_BINARY_DIR}
|
||||
pexpects/${PEXPECT}
|
||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/tests
|
||||
DEPENDS fish fish_indent fish_key_reader
|
||||
USES_TERMINAL
|
||||
add_test(NAME ${PEXPECT}
|
||||
COMMAND sh ${CMAKE_CURRENT_BINARY_DIR}/tests/test_driver.sh
|
||||
${CMAKE_CURRENT_BINARY_DIR}/tests/interactive.fish ${PEXPECT}
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/tests
|
||||
)
|
||||
set_tests_properties(${PEXPECT} PROPERTIES SKIP_RETURN_CODE ${SKIP_RETURN_CODE})
|
||||
set_tests_properties(${PEXPECT} PROPERTIES ENVIRONMENT FISH_FORCE_COLOR=1)
|
||||
add_test_target("${PEXPECT}")
|
||||
endforeach(PEXPECT)
|
||||
|
||||
# Rust stuff.
|
||||
set(cargo_test_flags)
|
||||
# Rust stuff.
|
||||
if(DEFINED ASAN)
|
||||
# Rust w/ -Zsanitizer=address requires explicitly specifying the --target triple or else linker
|
||||
# errors pertaining to asan symbols will ensue.
|
||||
@@ -46,25 +151,10 @@ if(DEFINED Rust_CARGO_TARGET)
|
||||
list(APPEND cargo_test_flags "--lib")
|
||||
endif()
|
||||
|
||||
set(max_concurrency_flag)
|
||||
if(DEFINED ENV{FISH_TEST_MAX_CONCURRENCY})
|
||||
list(APPEND max_concurrency_flag "--max-concurrency" $ENV{FISH_TEST_MAX_CONCURRENCY})
|
||||
endif()
|
||||
|
||||
# The top-level test target is "fish_run_tests".
|
||||
add_custom_target(fish_run_tests
|
||||
# TODO: This should be replaced with a unified solution, possibly build_tools/check.sh.
|
||||
COMMAND ${CMAKE_SOURCE_DIR}/tests/test_driver.py ${max_concurrency_flag} ${CMAKE_CURRENT_BINARY_DIR}
|
||||
COMMAND env ${VARS_FOR_CARGO}
|
||||
${Rust_CARGO}
|
||||
test
|
||||
--no-default-features
|
||||
--features=${FISH_CARGO_FEATURES}
|
||||
${CARGO_FLAGS}
|
||||
--workspace
|
||||
--target-dir ${rust_target_dir}
|
||||
${cargo_test_flags}
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
|
||||
DEPENDS fish fish_indent fish_key_reader
|
||||
USES_TERMINAL
|
||||
add_test(
|
||||
NAME "cargo-test"
|
||||
COMMAND env ${VARS_FOR_CARGO} cargo test ${CARGO_FLAGS} --workspace --target-dir ${rust_target_dir} ${cargo_test_flags}
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
|
||||
)
|
||||
set_tests_properties("cargo-test" PROPERTIES SKIP_RETURN_CODE ${SKIP_RETURN_CODE})
|
||||
add_test_target("cargo-test")
|
||||
|
||||
55
cmake/Version.cmake
Normal file
55
cmake/Version.cmake
Normal file
@@ -0,0 +1,55 @@
|
||||
# This file adds commands to manage the FISH-BUILD-VERSION-FILE (hereafter
|
||||
# FBVF). This file exists in the build directory and is used to populate the
|
||||
# documentation and also the version string in fish_version.o (printed with
|
||||
# `echo $version` and also fish --version). The essential idea is that we are
|
||||
# going to invoke git_version_gen.sh, which will update the
|
||||
# FISH-BUILD-VERSION-FILE only if it needs to change; this is what makes
|
||||
# incremental rebuilds fast.
|
||||
#
|
||||
# This code is delicate, with the chief subtlety revolving around Ninja. A
|
||||
# natural and naive approach would tell the generated build system that FBVF is
|
||||
# a dependency of fish_version.o, and that git_version_gen.sh updates it. Make
|
||||
# will then invoke the script, check the timestamp on fish_version.o and FBVF,
|
||||
# see that FBVF is earlier, and then not rebuild fish_version.o. Ninja,
|
||||
# however, decides what to build up-front and will unconditionally rebuild
|
||||
# fish_version.o.
|
||||
#
|
||||
# To avoid this with Ninja, we want to hook into its 'restat' option which we
|
||||
# can do through the BYPRODUCTS feature of CMake. See
|
||||
# https://cmake.org/cmake/help/latest/policy/CMP0058.html
|
||||
#
|
||||
# Unfortunately BYPRODUCTS behaves strangely with the Makefile generator: it
|
||||
# marks FBVF as generated and then CMake itself will `touch` it on every build,
|
||||
# meaning that using BYPRODUCTS will cause fish_version.o to be rebuilt
|
||||
# unconditionally with the Makefile generator. Thus we want to use the
|
||||
# natural-and-naive approach for Makefiles.
|
||||
|
||||
# **IMPORTANT** If you touch these build rules, please test both Ninja and
|
||||
# Makefile generators with both a clean and dirty git tree. Verify that both
|
||||
# generated build systems rebuild fish when the git tree goes from dirty to
|
||||
# clean (and vice versa), and verify they do NOT rebuild it when the git tree
|
||||
# stays the same (incremental builds must be fast).
|
||||
|
||||
# Just a handy abbreviation.
|
||||
set(FBVF FISH-BUILD-VERSION-FILE)
|
||||
|
||||
# TODO: find a cleaner way to do this.
|
||||
IF (${CMAKE_GENERATOR} STREQUAL Ninja)
|
||||
set(FBVF-OUTPUT fish-build-version-witness.txt)
|
||||
set(CFBVF-BYPRODUCTS ${FBVF})
|
||||
else(${CMAKE_GENERATOR} STREQUAL Ninja)
|
||||
set(FBVF-OUTPUT ${FBVF})
|
||||
set(CFBVF-BYPRODUCTS)
|
||||
endif(${CMAKE_GENERATOR} STREQUAL Ninja)
|
||||
|
||||
# Set up the version targets
|
||||
add_custom_target(CHECK-FISH-BUILD-VERSION-FILE
|
||||
COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/build_tools/git_version_gen.sh ${CMAKE_CURRENT_BINARY_DIR}
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
BYPRODUCTS ${CFBVF-BYPRODUCTS})
|
||||
|
||||
add_custom_command(OUTPUT ${FBVF-OUTPUT}
|
||||
DEPENDS CHECK-FISH-BUILD-VERSION-FILE)
|
||||
|
||||
# Abbreviation for the target.
|
||||
set(CFBVF CHECK-FISH-BUILD-VERSION-FILE)
|
||||
22
cmake/gettext.cmake
Normal file
22
cmake/gettext.cmake
Normal file
@@ -0,0 +1,22 @@
|
||||
set(languages de en fr pl pt_BR sv zh_CN)
|
||||
|
||||
include(FeatureSummary)
|
||||
|
||||
option(WITH_GETTEXT "translate messages if gettext is available" ON)
|
||||
if(WITH_GETTEXT)
|
||||
find_package(Gettext)
|
||||
endif()
|
||||
add_feature_info(gettext GETTEXT_FOUND "translate messages with gettext")
|
||||
|
||||
# Define translations
|
||||
if(GETTEXT_FOUND)
|
||||
# Group pofile targets into their own folder, as there's a lot of them.
|
||||
set(CMAKE_FOLDER pofiles)
|
||||
foreach(lang ${languages})
|
||||
# Our translations aren't set up entirely as CMake expects, so installation is done in
|
||||
# cmake/Install.cmake instead of using INSTALL_DESTINATION
|
||||
gettext_process_po_files(${lang} ALL
|
||||
PO_FILES po/${lang}.po)
|
||||
endforeach()
|
||||
set(CMAKE_FOLDER)
|
||||
endif()
|
||||
@@ -1,504 +0,0 @@
|
||||
fish (4.5.0-1) stable; urgency=medium
|
||||
|
||||
* Release of new version 4.5.0.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/4.5.0 for details.
|
||||
|
||||
-- Johannes Altmanninger <aclopte@gmail.com> Tue, 17 Feb 2026 11:32:33 +1100
|
||||
|
||||
fish (4.4.0-1) stable; urgency=medium
|
||||
|
||||
* Release of new version 4.4.0.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/4.4.0 for details.
|
||||
|
||||
-- Johannes Altmanninger <aclopte@gmail.com> Tue, 03 Feb 2026 12:11:51 +1100
|
||||
|
||||
fish (4.3.3-1) stable; urgency=medium
|
||||
|
||||
* Release of new version 4.3.3.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/4.3.3 for details.
|
||||
|
||||
-- Johannes Altmanninger <aclopte@gmail.com> Wed, 07 Jan 2026 08:34:20 +0100
|
||||
|
||||
fish (4.3.2-1) stable; urgency=medium
|
||||
|
||||
* Release of new version 4.3.2.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/4.3.2 for details.
|
||||
|
||||
-- Johannes Altmanninger <aclopte@gmail.com> Tue, 30 Dec 2025 17:21:04 +0100
|
||||
|
||||
fish (4.3.1-1) stable; urgency=medium
|
||||
|
||||
* Release of new version 4.3.1.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/4.3.1 for details.
|
||||
|
||||
-- Johannes Altmanninger <aclopte@gmail.com> Sun, 28 Dec 2025 16:54:44 +0100
|
||||
|
||||
fish (4.3.0-1) stable; urgency=medium
|
||||
|
||||
* Release of new version 4.3.0.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/4.3.0 for details.
|
||||
|
||||
-- Johannes Altmanninger <aclopte@gmail.com> Sun, 28 Dec 2025 10:20:47 +0100
|
||||
|
||||
fish (4.2.1-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 4.2.1.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/4.2.1 for details.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Thu, 13 Nov 2025 20:42:43 +0800
|
||||
|
||||
fish (4.2.0-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 4.2.0.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/4.2.0 for details.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Mon, 10 Nov 2025 19:29:03 +0800
|
||||
|
||||
fish (4.1.2-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 4.1.2.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/4.1.2 for details.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Wed, 08 Oct 2025 13:46:45 +0800
|
||||
|
||||
fish (4.1.1-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 4.1.1.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/4.1.1 for details.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Fri, 03 Oct 2025 16:43:43 +0800
|
||||
|
||||
fish (4.0.8-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 4.0.8.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/4.0.8 for details.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Thu, 18 Sep 2025 22:17:43 +0800
|
||||
|
||||
fish (4.0.6-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 4.0.6.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/4.0.6 for details.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Wed, 17 Sep 2025 12:27:09 +0800
|
||||
|
||||
fish (4.0.2-2) testing; urgency=medium
|
||||
|
||||
* Fix tests on Debian.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Sun, 20 Apr 2025 23:08:14 +0800
|
||||
|
||||
fish (4.0.2-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 4.0.2.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/4.0.2 for details.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Sun, 20 Apr 2025 21:24:18 +0800
|
||||
|
||||
fish (4.0.1-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 4.0.1.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Thu, 13 Mar 2025 11:30:21 +0800
|
||||
|
||||
fish (4.0.0-2) testing; urgency=medium
|
||||
|
||||
* Fix tests on Debian.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Thu, 27 Feb 2025 21:50:33 +0800
|
||||
|
||||
fish (4.0.0-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 4.0.0.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/4.0.0 for details.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Thu, 27 Feb 2025 19:22:30 +0800
|
||||
|
||||
fish (4.0.1-1) testing; urgency=medium
|
||||
|
||||
* Release of new beta version 4.0b1.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/4.0b1 for details.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Tue, 17 Dec 2024 23:42:25 +0800
|
||||
|
||||
fish (3.7.1-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.7.1.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.7.1 for details.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Tue, 19 Mar 2024 13:26:22 +0800
|
||||
|
||||
fish (3.7.0-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.7.0.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.7.0 for details.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Mon, 01 Jan 2024 23:32:55 +0800
|
||||
|
||||
fish (3.6.4-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.6.4.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.6.4 for details.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Tue, 05 Dec 2023 22:34:09 +0800
|
||||
|
||||
fish (3.6.3-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.6.3.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.6.3 for details.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Tue, 05 Dec 2023 00:11:12 +0800
|
||||
|
||||
fish (3.6.2-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.6.2.
|
||||
* Includes a fix for CVE-2023-49284.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.6.2 for details.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Mon, 04 Dec 2023 23:16:42 +0800
|
||||
|
||||
fish (3.6.1-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.6.1.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.6.1 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Sat, 25 Mar 2023 17:22:12 +0800
|
||||
|
||||
fish (3.6.0-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.6.0.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.6.0 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Sat, 07 Jan 2023 22:41:32 +0800
|
||||
|
||||
fish (3.5.1-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.5.1.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.5.1 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Wed, 20 Jul 2022 21:54:09 +0800
|
||||
|
||||
fish (3.5.0-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.5.0.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.5.0 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Thu, 16 Jun 2022 19:45:33 +0800
|
||||
|
||||
fish (3.4.0-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.4.0.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.4.0 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Sat, 12 Mar 2022 23:24:22 +0800
|
||||
|
||||
fish (3.3.1-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.3.1.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.3.1 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Tue, 06 Jul 2021 23:22:36 +0800
|
||||
|
||||
fish (3.3.0-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.3.0.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.3.0 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Mon, 28 Jun 2021 23:06:36 +0800
|
||||
|
||||
fish (3.2.2-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.2.2.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.2.2 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Wed, 07 Apr 2021 21:10:54 +0800
|
||||
|
||||
fish (3.2.1-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.2.1.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.2.1 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Thu, 18 Mar 2021 12:08:13 +0800
|
||||
|
||||
fish (3.2.0-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.2.0.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.2.0 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Mon, 01 Mar 2021 21:22:39 +0800
|
||||
|
||||
fish (3.1.2-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.1.2.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.1.2 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Wed, 29 Apr 2020 11:22:35 +0800
|
||||
|
||||
fish (3.1.1-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.1.1.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.1.1 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Mon, 27 Apr 2020 22:45:35 +0800
|
||||
|
||||
fish (3.1.0-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.1.0.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.1.0 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Wed, 12 Feb 2020 22:34:53 +0800
|
||||
|
||||
fish (3.1.1-1) testing; urgency=medium
|
||||
|
||||
* Release of new beta version 3.1b1.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.1b1 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Sun, 26 Jan 2020 21:42:46 +0800
|
||||
|
||||
fish (3.0.2-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.0.2.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.0.2 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Tue, 19 Feb 2019 21:45:05 +0800
|
||||
|
||||
fish (3.0.1-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.0.1.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.0.1 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Mon, 11 Feb 2019 20:23:55 +0800
|
||||
|
||||
fish (3.0.0-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 3.0.0.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.0.0 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Fri, 28 Dec 2018 21:10:28 +0800
|
||||
|
||||
fish (3.0.1-1) testing; urgency=medium
|
||||
|
||||
* Release of new beta version 3.0b1.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/3.0b1 for
|
||||
significant changes, which includes backward incompatibility.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Tue, 11 Dec 2018 22:59:15 +0800
|
||||
|
||||
fish (2.7.1-1) testing; urgency=medium
|
||||
|
||||
* Release of new bug fix version 2.7.1. On all Linux platforms, this is
|
||||
release will behave identically to 2.7.0.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Sat, 23 Dec 2017 00:43:12 +0800
|
||||
|
||||
fish (2.7.0-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 2.7.0.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/2.7.0 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Thu, 23 Nov 2017 18:38:21 +0800
|
||||
|
||||
fish (2.7.1-1) testing; urgency=medium
|
||||
|
||||
* Release of new beta version 2.7b1.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/2.7b1 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Tue, 31 Oct 2017 20:32:29 +0800
|
||||
|
||||
fish (2.6.0-1) testing; urgency=medium
|
||||
|
||||
* Relase of new version 2.6.0.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/2.6.0 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Sat, 03 Jun 2017 20:51:50 +0800
|
||||
|
||||
fish (2.6b1-1) testing; urgency=medium
|
||||
|
||||
* Release of new beta version 2.6b1.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/2.6b1 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Sun, 14 May 2017 10:47:39 +0800
|
||||
|
||||
fish (2.5.0-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 2.5.0.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/2.5.0 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Fri, 03 Feb 2017 09:52:57 +0800
|
||||
|
||||
fish (2.5b1-1) testing; urgency=medium
|
||||
|
||||
* Release of new beta version 2.5b1.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/2.5b1 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Sat, 14 Jan 2017 08:49:34 +0800
|
||||
|
||||
fish (2.4.0-2) testing; urgency=medium
|
||||
|
||||
* Change recommendation of xdg-utils to suggestion (closes
|
||||
https://github.com/fish-shell/fish-shell/issues/3534).
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Wed, 09 Nov 2016 22:56:16 +0800
|
||||
|
||||
fish (2.4.0-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 2.4.0.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/2.4.0 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Tue, 08 Nov 2016 11:29:57 +0800
|
||||
|
||||
fish (2.4b1-1) testing; urgency=medium
|
||||
|
||||
* Release of new beta version 2.4b1.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/2.4b1 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Tue, 18 Oct 2016 22:25:26 +0800
|
||||
|
||||
fish (2.3.1-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 2.3.1.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Sun, 03 Jul 2016 21:21:51 +0800
|
||||
|
||||
fish (2.3.0-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 2.3.0.
|
||||
|
||||
See http://fishshell.com/release_notes.html for significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Sat, 21 May 2016 06:59:54 +0800
|
||||
|
||||
fish (2.3b2-1) testing; urgency=medium
|
||||
|
||||
* Release of new beta version 2.3b2.
|
||||
|
||||
See https://github.com/fish-shell/fish-shell/releases/tag/2.3b2 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Thu, 05 May 2016 06:22:37 +0800
|
||||
|
||||
fish (2.3.1-1) testing; urgency=medium
|
||||
|
||||
* Release of new beta version 2.3b1.
|
||||
|
||||
See http://github.com/fish-shell/fish-shell/releases/tag/2.3b1 for
|
||||
significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Tue, 19 Apr 2016 21:07:17 +0800
|
||||
|
||||
fish (2.2.0-2) testing; urgency=medium
|
||||
|
||||
* Binary rebuild only to resynchronise repository state.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Mon, 15 Feb 2016 22:45:05 +0800
|
||||
|
||||
fish (2.2.0-1) testing; urgency=medium
|
||||
|
||||
* Release of new version 2.2.0.
|
||||
|
||||
See http://fishshell.com/release_notes.html for significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Sun, 12 Jul 2015 18:52:29 +0800
|
||||
|
||||
fish (2.2b1-1) testing; urgency=low
|
||||
|
||||
* Release of new beta version 2.2b1.
|
||||
|
||||
See http://fishshell.com/staging/release_notes.html for significant
|
||||
changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Thu, 07 May 2015 14:48:21 +0800
|
||||
|
||||
fish (2.1.1-1) unstable; urgency=high
|
||||
|
||||
* Release of new version 2.1.1.
|
||||
|
||||
See http://fishshell.com/release_notes.html for significant changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Sun, 07 Sep 2014 17:06:31 +0800
|
||||
|
||||
fish (2.1.0-1) unstable; urgency=low
|
||||
|
||||
* Release of new version 2.1.0.
|
||||
|
||||
See http://fishshell.com/staging/release_notes.html for significant
|
||||
changes.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Sat, 19 Oct 2013 14:35:23 +0800
|
||||
|
||||
fish (2.0.0-0) unstable; urgency=low
|
||||
|
||||
* Initial release of fish 2.0.0.
|
||||
|
||||
-- David Adam <zanchey@ucc.gu.uwa.edu.au> Thu, 19 Jul 2012 23:17:58 +0800
|
||||
@@ -1,28 +0,0 @@
|
||||
#!/usr/bin/make -f
|
||||
# -*- makefile -*-
|
||||
|
||||
ifeq (,$(filter terse,$(DEB_BUILD_OPTIONS)))
|
||||
export DH_VERBOSE=1
|
||||
# VERBOSE to satisfy Debian policy 4.9, introduced in version 4.2.0
|
||||
export CARGO_TERM_VERBOSE=true
|
||||
endif
|
||||
# The LTO profile sets CFLAGS/CXXFLAGS which confuse the compilation process; disable it
|
||||
# LTO is still performed by rustc based on Cargo.toml
|
||||
export DEB_BUILD_MAINT_OPTIONS=optimize=-lto
|
||||
|
||||
%:
|
||||
dh $@ --buildsystem=cmake --builddirectory=build
|
||||
|
||||
# Setting the build system is still required, because otherwise the GNUmakefile gets picked up
|
||||
override_dh_auto_configure:
|
||||
ln -s cargo-vendor/vendor vendor
|
||||
dh_auto_configure -- -DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DRust_CARGO=$$(command -v cargo-1.85 || command -v cargo) \
|
||||
-DRust_COMPILER=$$(command -v rustc-1.85 || command -v rustc)
|
||||
|
||||
override_dh_clean:
|
||||
dh_clean --exclude=Cargo.toml.orig
|
||||
-unlink vendor
|
||||
|
||||
override_dh_auto_test:
|
||||
cd build && make fish_run_tests
|
||||
@@ -1,3 +0,0 @@
|
||||
# The vendor tarball drops a new version of .cargo/config into place. Representing this as a patch
|
||||
# in automated workflows is tricky, so for our purposes auto-commit is fine.
|
||||
auto-commit
|
||||
@@ -1,13 +0,0 @@
|
||||
[package]
|
||||
name = "fish-build-helper"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
version = "0.0.0"
|
||||
repository.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
rsconf.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
@@ -1,104 +0,0 @@
|
||||
use std::{borrow::Cow, env, os::unix::ffi::OsStrExt as _, path::Path};
|
||||
|
||||
pub fn env_var(name: &str) -> Option<String> {
|
||||
let err = match env::var(name) {
|
||||
Ok(p) => return Some(p),
|
||||
Err(err) => err,
|
||||
};
|
||||
use env::VarError::*;
|
||||
match err {
|
||||
NotPresent => None,
|
||||
NotUnicode(os_string) => {
|
||||
panic!(
|
||||
"Environment variable {name} is not valid Unicode: {:?}",
|
||||
os_string.as_bytes()
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn workspace_root() -> &'static Path {
|
||||
let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR"));
|
||||
manifest_dir.ancestors().nth(2).unwrap()
|
||||
}
|
||||
|
||||
fn cargo_target_dir() -> Cow<'static, Path> {
|
||||
option_env!("CARGO_TARGET_DIR")
|
||||
.map(|d| Cow::Borrowed(Path::new(d)))
|
||||
.unwrap_or(Cow::Owned(workspace_root().join("target")))
|
||||
}
|
||||
|
||||
pub fn fish_build_dir() -> Cow<'static, Path> {
|
||||
option_env!("FISH_CMAKE_BINARY_DIR")
|
||||
.map(|d| Cow::Borrowed(Path::new(d)))
|
||||
.unwrap_or(cargo_target_dir())
|
||||
}
|
||||
|
||||
pub fn fish_doc_dir() -> Cow<'static, Path> {
|
||||
cargo_target_dir().join("fish-docs").into()
|
||||
}
|
||||
|
||||
// TODO Move this to rsconf
|
||||
pub fn rebuild_if_path_changed<P: AsRef<Path>>(path: P) {
|
||||
rsconf::rebuild_if_path_changed(path.as_ref().to_str().unwrap());
|
||||
}
|
||||
|
||||
// TODO Move this to rsconf
|
||||
pub fn rebuild_if_paths_changed<P: AsRef<Path>, I: IntoIterator<Item = P>>(paths: I) {
|
||||
for path in paths {
|
||||
rsconf::rebuild_if_path_changed(path.as_ref().to_str().unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
pub fn rebuild_if_embedded_path_changed<P: AsRef<Path>>(path: P) {
|
||||
// Not necessary in debug builds, where rust-embed reads from the filesystem.
|
||||
if cfg!(any(not(debug_assertions), windows)) {
|
||||
rebuild_if_path_changed(path);
|
||||
}
|
||||
}
|
||||
|
||||
// Target OS for compiling our crates, as opposed to the build script.
|
||||
pub fn target_os() -> String {
|
||||
env_var("CARGO_CFG_TARGET_OS").unwrap()
|
||||
}
|
||||
|
||||
pub fn target_os_is_apple() -> bool {
|
||||
matches!(target_os().as_str(), "ios" | "macos")
|
||||
}
|
||||
|
||||
/// Detect if we're being compiled for a BSD-derived OS, allowing targeting code conditionally with
|
||||
/// `#[cfg(bsd)]`.
|
||||
///
|
||||
/// Rust offers fine-grained conditional compilation per-os for the popular operating systems, but
|
||||
/// doesn't necessarily include less-popular forks nor does it group them into families more
|
||||
/// specific than "windows" vs "unix" so we can conditionally compile code for BSD systems.
|
||||
pub fn target_os_is_bsd() -> bool {
|
||||
let target_os = target_os();
|
||||
let is_bsd = target_os.ends_with("bsd") || target_os == "dragonfly";
|
||||
if matches!(
|
||||
target_os.as_str(),
|
||||
"dragonfly" | "freebsd" | "netbsd" | "openbsd"
|
||||
) {
|
||||
assert!(is_bsd, "Target incorrectly detected as not BSD!");
|
||||
}
|
||||
is_bsd
|
||||
}
|
||||
|
||||
pub fn target_os_is_cygwin() -> bool {
|
||||
target_os() == "cygwin"
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! as_os_strs {
|
||||
[ $( $x:expr, )* ] => {
|
||||
{
|
||||
use std::ffi::OsStr;
|
||||
fn as_os_str<S: AsRef<OsStr> + ?Sized>(s: &S) -> &OsStr {
|
||||
s.as_ref()
|
||||
}
|
||||
&[
|
||||
$( as_os_str($x), )*
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
[package]
|
||||
name = "fish-build-man-pages"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
version = "0.0.0"
|
||||
repository.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[build-dependencies]
|
||||
fish-build-helper.workspace = true
|
||||
rsconf.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
@@ -1,101 +0,0 @@
|
||||
use std::path::Path;
|
||||
|
||||
use fish_build_helper::{as_os_strs, fish_doc_dir};
|
||||
|
||||
fn main() {
|
||||
let sec1_dir = fish_doc_dir().join("man").join("man1");
|
||||
// Running `cargo clippy` on a clean build directory panics, because when rust-embed
|
||||
// tries to embed a directory which does not exist it will panic.
|
||||
let _ = std::fs::create_dir_all(&sec1_dir);
|
||||
if !cfg!(clippy) {
|
||||
build_man(&sec1_dir);
|
||||
}
|
||||
}
|
||||
|
||||
fn build_man(sec1_dir: &Path) {
|
||||
use fish_build_helper::{env_var, workspace_root};
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
let workspace_root = workspace_root();
|
||||
let doc_src_dir = workspace_root.join("doc_src");
|
||||
let doctrees_dir = fish_doc_dir().join(".doctrees-man");
|
||||
|
||||
fish_build_helper::rebuild_if_paths_changed([
|
||||
&workspace_root.join("CHANGELOG.rst"),
|
||||
&workspace_root.join("CONTRIBUTING.rst"),
|
||||
&doc_src_dir,
|
||||
]);
|
||||
|
||||
let args = as_os_strs![
|
||||
"-j",
|
||||
"auto",
|
||||
"-q",
|
||||
"-b",
|
||||
"man",
|
||||
"-c",
|
||||
&doc_src_dir,
|
||||
// doctree path - put this *above* the man1 dir to exclude it.
|
||||
// this is ~6M
|
||||
"-d",
|
||||
&doctrees_dir,
|
||||
&doc_src_dir,
|
||||
&sec1_dir,
|
||||
];
|
||||
|
||||
rsconf::rebuild_if_env_changed("FISH_BUILD_DOCS");
|
||||
if env_var("FISH_BUILD_DOCS") == Some("0".to_owned()) {
|
||||
rsconf::warn!("Skipping man pages because $FISH_BUILD_DOCS is set to 0");
|
||||
return;
|
||||
}
|
||||
|
||||
// We run sphinx to build the man pages.
|
||||
// Every error here is fatal so cargo doesn't cache the result
|
||||
// - if we skipped the docs with sphinx not installed, installing it would not then build the docs.
|
||||
// That means you need to explicitly set $FISH_BUILD_DOCS=0 (`FISH_BUILD_DOCS=0 cargo install --path .`),
|
||||
// which is unfortunate - but the docs are pretty important because they're also used for --help.
|
||||
let sphinx_build = match Command::new(option_env!("FISH_SPHINX").unwrap_or("sphinx-build"))
|
||||
.args(args)
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()
|
||||
{
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||
assert_ne!(
|
||||
env_var("FISH_BUILD_DOCS"),
|
||||
Some("1".to_owned()),
|
||||
"Could not find sphinx-build required to build man pages.\n\
|
||||
Install Sphinx or disable building the docs by setting $FISH_BUILD_DOCS=0."
|
||||
);
|
||||
rsconf::warn!(
|
||||
"Could not find sphinx-build required to build man pages. \
|
||||
If you install Sphinx now, you need to trigger a rebuild to include man pages. \
|
||||
For example by running `touch doc_src` followed by the build command."
|
||||
);
|
||||
return;
|
||||
}
|
||||
Err(e) => {
|
||||
// Another error - permissions wrong etc
|
||||
panic!("Error starting sphinx-build to build man pages: {:?}", e);
|
||||
}
|
||||
Ok(sphinx_build) => sphinx_build,
|
||||
};
|
||||
|
||||
match sphinx_build.wait_with_output() {
|
||||
Err(err) => {
|
||||
panic!(
|
||||
"Error waiting for sphinx-build to build man pages: {:?}",
|
||||
err
|
||||
);
|
||||
}
|
||||
Ok(out) => {
|
||||
if !out.stderr.is_empty() {
|
||||
rsconf::warn!("sphinx-build: {}", String::from_utf8_lossy(&out.stderr));
|
||||
}
|
||||
assert_eq!(&String::from_utf8_lossy(&out.stdout), "");
|
||||
assert!(
|
||||
out.status.success(),
|
||||
"sphinx-build failed to build the man pages."
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
[package]
|
||||
name = "fish-color"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
version = "0.0.0"
|
||||
repository.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
fish-common.workspace = true
|
||||
fish-widestring.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
@@ -1,16 +0,0 @@
|
||||
[package]
|
||||
name = "fish-common"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
version = "0.0.0"
|
||||
repository.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
bitflags.workspace = true
|
||||
fish-widestring.workspace = true
|
||||
libc.workspace = true
|
||||
nix.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
@@ -1,709 +0,0 @@
|
||||
use bitflags::bitflags;
|
||||
use fish_widestring::{L, char_offset, wstr};
|
||||
use libc::{SIG_IGN, SIGTTOU, STDIN_FILENO};
|
||||
use std::cell::{Cell, RefCell};
|
||||
use std::io::Read;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::os::fd::{AsRawFd, BorrowedFd, RawFd};
|
||||
use std::os::unix::ffi::OsStrExt as _;
|
||||
use std::sync::OnceLock;
|
||||
use std::sync::atomic::{AtomicI32, AtomicU32, Ordering};
|
||||
use std::{env, mem, time};
|
||||
|
||||
pub const PACKAGE_NAME: &str = env!("CARGO_PKG_NAME");
|
||||
|
||||
// Highest legal ASCII value.
|
||||
pub const ASCII_MAX: char = 127 as char;
|
||||
|
||||
// Highest legal 16-bit Unicode value.
|
||||
pub const UCS2_MAX: char = '\u{FFFF}';
|
||||
|
||||
// Highest legal byte value.
|
||||
pub const BYTE_MAX: char = 0xFF as char;
|
||||
|
||||
// Unicode BOM value.
|
||||
pub const UTF8_BOM_WCHAR: char = '\u{FEFF}';
|
||||
|
||||
// Use Unicode "non-characters" for internal characters as much as we can. This
|
||||
// gives us 32 "characters" for internal use that we can guarantee should not
|
||||
// appear in our input stream. See http://www.unicode.org/faq/private_use.html.
|
||||
pub const RESERVED_CHAR_BASE: char = '\u{FDD0}';
|
||||
pub const RESERVED_CHAR_END: char = '\u{FDF0}';
|
||||
// Split the available non-character values into two ranges to ensure there are
|
||||
// no conflicts among the places we use these special characters.
|
||||
pub const EXPAND_RESERVED_BASE: char = RESERVED_CHAR_BASE;
|
||||
pub const EXPAND_RESERVED_END: char = char_offset(EXPAND_RESERVED_BASE, 16);
|
||||
pub const WILDCARD_RESERVED_BASE: char = EXPAND_RESERVED_END;
|
||||
pub const WILDCARD_RESERVED_END: char = char_offset(WILDCARD_RESERVED_BASE, 16);
|
||||
// Make sure the ranges defined above don't exceed the range for non-characters.
|
||||
// This is to make sure we didn't do something stupid in subdividing the
|
||||
// Unicode range for our needs.
|
||||
const _: () = assert!(WILDCARD_RESERVED_END <= RESERVED_CHAR_END);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum EscapeStringStyle {
|
||||
Script(EscapeFlags),
|
||||
Url,
|
||||
Var,
|
||||
Regex,
|
||||
}
|
||||
|
||||
impl Default for EscapeStringStyle {
|
||||
fn default() -> Self {
|
||||
Self::Script(EscapeFlags::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&wstr> for EscapeStringStyle {
|
||||
type Error = &'static wstr;
|
||||
fn try_from(s: &wstr) -> Result<Self, Self::Error> {
|
||||
use EscapeStringStyle::*;
|
||||
match s {
|
||||
s if s == "script" => Ok(Self::default()),
|
||||
s if s == "var" => Ok(Var),
|
||||
s if s == "url" => Ok(Url),
|
||||
s if s == "regex" => Ok(Regex),
|
||||
_ => Err(L!("Invalid escape style")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
/// Flags for the [`escape_string()`] function. These are only applicable when the escape style is
|
||||
/// [`EscapeStringStyle::Script`].
|
||||
#[derive(Copy, Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct EscapeFlags: u32 {
|
||||
/// Do not escape special fish syntax characters like the semicolon. Only escape non-printable
|
||||
/// characters and backslashes.
|
||||
const NO_PRINTABLES = 1 << 0;
|
||||
/// Do not try to use 'simplified' quoted escapes, and do not use empty quotes as the empty
|
||||
/// string.
|
||||
const NO_QUOTED = 1 << 1;
|
||||
/// Do not escape tildes.
|
||||
const NO_TILDE = 1 << 2;
|
||||
/// Replace non-printable control characters with Unicode symbols.
|
||||
const SYMBOLIC = 1 << 3;
|
||||
/// Escape ,
|
||||
const COMMA = 1 << 4;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum UnescapeStringStyle {
|
||||
Script(UnescapeFlags),
|
||||
Url,
|
||||
Var,
|
||||
}
|
||||
|
||||
impl Default for UnescapeStringStyle {
|
||||
fn default() -> Self {
|
||||
Self::Script(UnescapeFlags::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&wstr> for UnescapeStringStyle {
|
||||
type Error = &'static wstr;
|
||||
fn try_from(s: &wstr) -> Result<Self, Self::Error> {
|
||||
use UnescapeStringStyle::*;
|
||||
match s {
|
||||
s if s == "script" => Ok(Self::default()),
|
||||
s if s == "var" => Ok(Var),
|
||||
s if s == "url" => Ok(Url),
|
||||
_ => Err(L!("Invalid escape style")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
/// Flags for unescape_string functions.
|
||||
#[derive(Copy, Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct UnescapeFlags: u32 {
|
||||
/// escape special fish syntax characters like the semicolon
|
||||
const SPECIAL = 1 << 0;
|
||||
/// allow incomplete escape sequences
|
||||
const INCOMPLETE = 1 << 1;
|
||||
/// don't handle backslash escapes
|
||||
const NO_BACKSLASHES = 1 << 2;
|
||||
}
|
||||
}
|
||||
|
||||
/// This function attempts to distinguish between a console session (at the actual login vty) and a
|
||||
/// session within a terminal emulator inside a desktop environment or over SSH. Unfortunately
|
||||
/// there are few values of $TERM that we can interpret as being exclusively console sessions, and
|
||||
/// most common operating systems do not use them. The value is cached for the duration of the fish
|
||||
/// session. We err on the side of assuming it's not a console session. This approach isn't
|
||||
/// bullet-proof and that's OK.
|
||||
pub fn is_console_session() -> bool {
|
||||
static IS_CONSOLE_SESSION: OnceLock<bool> = OnceLock::new();
|
||||
// TODO(terminal-workaround)
|
||||
*IS_CONSOLE_SESSION.get_or_init(|| {
|
||||
nix::unistd::ttyname(unsafe { std::os::fd::BorrowedFd::borrow_raw(STDIN_FILENO) })
|
||||
.is_ok_and(|buf| {
|
||||
// Check if the tty matches /dev/(console|dcons|tty[uv\d])
|
||||
let is_console_tty = match buf.as_os_str().as_bytes() {
|
||||
b"/dev/console" => true,
|
||||
b"/dev/dcons" => true,
|
||||
bytes => bytes.strip_prefix(b"/dev/tty").is_some_and(|rest| {
|
||||
matches!(rest.first(), Some(b'u' | b'v' | b'0'..=b'9'))
|
||||
}),
|
||||
};
|
||||
|
||||
// and that $TERM is simple, e.g. `xterm` or `vt100`, not `xterm-something` or `sun-color`.
|
||||
is_console_tty && env::var_os("TERM").is_none_or(|t| !t.as_bytes().contains(&b'-'))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/// Exits without invoking destructors (via _exit), useful for code after fork.
|
||||
pub fn exit_without_destructors(code: libc::c_int) -> ! {
|
||||
unsafe { libc::_exit(code) };
|
||||
}
|
||||
|
||||
/// The character to use where the text has been truncated.
|
||||
pub fn get_ellipsis_char() -> char {
|
||||
'\u{2026}' // ('…')
|
||||
}
|
||||
|
||||
/// The character or string to use where text has been truncated (ellipsis if possible, otherwise
|
||||
/// ...)
|
||||
pub fn get_ellipsis_str() -> &'static wstr {
|
||||
L!("\u{2026}")
|
||||
}
|
||||
|
||||
// Only pub for `src/common.rs`
|
||||
pub static OBFUSCATION_READ_CHAR: AtomicU32 = AtomicU32::new(0);
|
||||
|
||||
pub fn get_obfuscation_read_char() -> char {
|
||||
char::from_u32(OBFUSCATION_READ_CHAR.load(Ordering::Relaxed)).unwrap()
|
||||
}
|
||||
|
||||
/// Call read, blocking and repeating on EINTR. Exits on EAGAIN.
|
||||
/// Return the number of bytes read, or 0 on EOF, or an error.
|
||||
pub fn read_blocked(fd: RawFd, buf: &mut [u8]) -> nix::Result<usize> {
|
||||
loop {
|
||||
let res = nix::unistd::read(unsafe { BorrowedFd::borrow_raw(fd) }, buf);
|
||||
if let Err(nix::Error::EINTR) = res {
|
||||
continue;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ReadExt {
|
||||
/// Like [`std::io::Read::read_to_end`], but does not retry on EINTR.
|
||||
fn read_to_end_interruptible(&mut self, buf: &mut Vec<u8>) -> std::io::Result<()>;
|
||||
}
|
||||
|
||||
impl<T: Read + ?Sized> ReadExt for T {
|
||||
fn read_to_end_interruptible(&mut self, buf: &mut Vec<u8>) -> std::io::Result<()> {
|
||||
let mut chunk = [0_u8; 4096];
|
||||
loop {
|
||||
match self.read(&mut chunk)? {
|
||||
0 => return Ok(()),
|
||||
n => buf.extend_from_slice(&chunk[..n]),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A rusty port of the C++ `write_loop()` function from `common.cpp`. This should be deprecated in
|
||||
/// favor of native rust read/write methods at some point.
|
||||
pub fn safe_write_loop<Fd: AsRawFd>(fd: &Fd, buf: &[u8]) -> std::io::Result<()> {
|
||||
let fd = fd.as_raw_fd();
|
||||
let mut total = 0;
|
||||
while total < buf.len() {
|
||||
match nix::unistd::write(unsafe { BorrowedFd::borrow_raw(fd) }, &buf[total..]) {
|
||||
Ok(written) => {
|
||||
total += written;
|
||||
}
|
||||
Err(err) => {
|
||||
if matches!(err, nix::Error::EAGAIN | nix::Error::EINTR) {
|
||||
continue;
|
||||
}
|
||||
return Err(std::io::Error::from(err));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub use safe_write_loop as write_loop;
|
||||
|
||||
pub const fn help_section_exists(section: &str) -> bool {
|
||||
let haystack = include_str!("../../../share/help_sections");
|
||||
let needle = section;
|
||||
|
||||
let needle = needle.as_bytes();
|
||||
let haystack = haystack.as_bytes();
|
||||
let nlen = needle.len();
|
||||
let mut line_start = 0;
|
||||
let mut i = 0;
|
||||
while i <= haystack.len() {
|
||||
if i == haystack.len() || haystack[i] == b'\n' {
|
||||
let line_len = i - line_start;
|
||||
|
||||
if line_len == nlen {
|
||||
let mut j = 0;
|
||||
while j < nlen && haystack[line_start + j] == needle[j] {
|
||||
j += 1;
|
||||
}
|
||||
if j == nlen {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
line_start = i + 1;
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! help_section {
|
||||
($section:expr) => {{
|
||||
const {
|
||||
assert!($crate::help_section_exists($section));
|
||||
}
|
||||
|
||||
$section
|
||||
}};
|
||||
}
|
||||
|
||||
pub type Timepoint = f64;
|
||||
|
||||
/// Return the number of seconds from the UNIX epoch, with subsecond precision. This function uses
|
||||
/// the gettimeofday function and will have the same precision as that function.
|
||||
pub fn timef() -> Timepoint {
|
||||
match time::SystemTime::now().duration_since(time::UNIX_EPOCH) {
|
||||
Ok(difference) => difference.as_secs() as f64,
|
||||
Err(until_epoch) => -(until_epoch.duration().as_secs() as f64),
|
||||
}
|
||||
}
|
||||
|
||||
/// Be able to restore the term's foreground process group.
|
||||
/// This is set during startup and not modified after.
|
||||
static INITIAL_FG_PROCESS_GROUP: AtomicI32 = AtomicI32::new(-1); // HACK, should be pid_t
|
||||
const _: () = assert!(mem::size_of::<i32>() >= mem::size_of::<libc::pid_t>());
|
||||
|
||||
/// Save the value of tcgetpgrp so we can restore it on exit.
|
||||
pub fn save_term_foreground_process_group() {
|
||||
INITIAL_FG_PROCESS_GROUP.store(unsafe { libc::tcgetpgrp(STDIN_FILENO) }, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
pub fn restore_term_foreground_process_group_for_exit() {
|
||||
// We wish to restore the tty to the initial owner. There's two ways this can go wrong:
|
||||
// 1. We may steal the tty from someone else (#7060).
|
||||
// 2. The call to tcsetpgrp may deliver SIGSTOP to us, and we will not exit.
|
||||
// Hanging on exit seems worse, so ensure that SIGTTOU is ignored so we do not get SIGSTOP.
|
||||
// Note initial_fg_process_group == 0 is possible with Linux pid namespaces.
|
||||
// This is called during shutdown and from a signal handler. We don't bother to complain on
|
||||
// failure because doing so is unlikely to be noticed.
|
||||
// Safety: All of getpgrp, signal, and tcsetpgrp are async-signal-safe.
|
||||
let initial_fg_process_group = INITIAL_FG_PROCESS_GROUP.load(Ordering::Relaxed);
|
||||
if initial_fg_process_group > 0 && initial_fg_process_group != unsafe { libc::getpgrp() } {
|
||||
unsafe {
|
||||
libc::signal(SIGTTOU, SIG_IGN);
|
||||
libc::tcsetpgrp(STDIN_FILENO, initial_fg_process_group);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A wrapper around Cell which supports modifying the contents, scoped to a region of code.
|
||||
/// This provides a somewhat nicer API than ScopedRefCell because you can directly modify the value,
|
||||
/// instead of requiring an accessor function which returns a mutable reference to a field.
|
||||
pub struct ScopedCell<T>(Cell<T>);
|
||||
|
||||
impl<T> Deref for ScopedCell<T> {
|
||||
type Target = Cell<T>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> DerefMut for ScopedCell<T> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Copy> ScopedCell<T> {
|
||||
pub fn new(value: T) -> Self {
|
||||
Self(Cell::new(value))
|
||||
}
|
||||
|
||||
/// Temporarily modify a value in the ScopedCell, restoring it when the returned object is dropped.
|
||||
///
|
||||
/// This is useful when you want to apply a change for the duration of a scope
|
||||
/// without having to manually restore the previous value.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use fish_common::ScopedCell;
|
||||
///
|
||||
/// let cell = ScopedCell::new(5);
|
||||
/// assert_eq!(cell.get(), 5);
|
||||
///
|
||||
/// {
|
||||
/// let _guard = cell.scoped_mod(|v| *v += 10);
|
||||
/// assert_eq!(cell.get(), 15);
|
||||
/// }
|
||||
///
|
||||
/// // Restored after scope
|
||||
/// assert_eq!(cell.get(), 5);
|
||||
/// ```
|
||||
pub fn scoped_mod<'a, Modifier: FnOnce(&mut T)>(
|
||||
&'a self,
|
||||
modifier: Modifier,
|
||||
) -> impl ScopeGuarding + 'a {
|
||||
let mut val = self.get();
|
||||
modifier(&mut val);
|
||||
let saved = self.replace(val);
|
||||
ScopeGuard::new(self, move |cell| cell.set(saved))
|
||||
}
|
||||
}
|
||||
|
||||
/// A wrapper around RefCell which supports modifying the contents, scoped to a region of code.
|
||||
pub struct ScopedRefCell<T>(RefCell<T>);
|
||||
|
||||
impl<T> Deref for ScopedRefCell<T> {
|
||||
type Target = RefCell<T>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> DerefMut for ScopedRefCell<T> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> ScopedRefCell<T> {
|
||||
pub fn new(value: T) -> Self {
|
||||
Self(RefCell::new(value))
|
||||
}
|
||||
|
||||
/// Temporarily modify a field in the ScopedRefCell, restoring it when the returned guard is dropped.
|
||||
///
|
||||
/// This is useful when you want to change part of a data structure for the duration of a scope,
|
||||
/// and automatically restore the original value afterward.
|
||||
///
|
||||
/// The `accessor` function selects the field to modify by returning a mutable reference to it.
|
||||
///
|
||||
/// # Example
|
||||
/// ```
|
||||
/// use fish_common::ScopedRefCell;
|
||||
///
|
||||
/// struct State { flag: bool }
|
||||
///
|
||||
/// let cell = ScopedRefCell::new(State { flag: false });
|
||||
/// assert_eq!(cell.borrow().flag, false);
|
||||
///
|
||||
/// {
|
||||
/// let _guard = cell.scoped_set(true, |s| &mut s.flag);
|
||||
/// assert_eq!(cell.borrow().flag, true);
|
||||
/// }
|
||||
///
|
||||
/// // Restored after scope
|
||||
/// assert_eq!(cell.borrow().flag, false);
|
||||
/// ```
|
||||
pub fn scoped_set<'a, Accessor, Value: 'a>(
|
||||
&'a self,
|
||||
value: Value,
|
||||
accessor: Accessor,
|
||||
) -> impl ScopeGuarding + 'a
|
||||
where
|
||||
Accessor: Fn(&mut T) -> &mut Value + 'a,
|
||||
{
|
||||
let mut data = self.borrow_mut();
|
||||
let mut saved = std::mem::replace(accessor(&mut data), value);
|
||||
ScopeGuard::new(self, move |cell| {
|
||||
let mut data = cell.borrow_mut();
|
||||
std::mem::swap((accessor)(&mut data), &mut saved);
|
||||
})
|
||||
}
|
||||
|
||||
/// Convenience method for replacing the entire contents of the ScopedRefCell, restoring it when dropped.
|
||||
///
|
||||
/// Equivalent to `scoped_set(value, |s| s)`.
|
||||
///
|
||||
/// # Example
|
||||
/// ```
|
||||
/// use fish_common::ScopedRefCell;
|
||||
///
|
||||
/// let cell = ScopedRefCell::new(10);
|
||||
/// assert_eq!(*cell.borrow(), 10);
|
||||
///
|
||||
/// {
|
||||
/// let _guard = cell.scoped_replace(99);
|
||||
/// assert_eq!(*cell.borrow(), 99);
|
||||
/// }
|
||||
///
|
||||
/// assert_eq!(*cell.borrow(), 10);
|
||||
/// ```
|
||||
pub fn scoped_replace<'a>(&'a self, value: T) -> impl ScopeGuarding + 'a {
|
||||
self.scoped_set(value, |s| s)
|
||||
}
|
||||
}
|
||||
|
||||
/// A RAII cleanup object. Unlike in C++ where there is no borrow checker, we can't just provide a
|
||||
/// callback that modifies live objects willy-nilly because then there would be two &mut references
|
||||
/// to the same object - the original variables we keep around to use and their captured references
|
||||
/// held by the closure until its scope expires.
|
||||
///
|
||||
/// Instead we have a `ScopeGuard` type that takes exclusive ownership of (a mutable reference to)
|
||||
/// the object to be managed. In lieu of keeping the original value around, we obtain a regular or
|
||||
/// mutable reference to it via ScopeGuard's [`Deref`] and [`DerefMut`] impls.
|
||||
///
|
||||
/// The `ScopeGuard` is considered to be the exclusively owner of the passed value for the
|
||||
/// duration of its lifetime. If you need to use the value again, use `ScopeGuard` to shadow the
|
||||
/// value and obtain a reference to it via the `ScopeGuard` itself:
|
||||
///
|
||||
/// ```rust
|
||||
/// use std::io::prelude::*;
|
||||
/// use fish_common::ScopeGuard;
|
||||
///
|
||||
/// let file = std::fs::File::create("/dev/null").unwrap();
|
||||
/// // Create a scope guard to write to the file when the scope expires.
|
||||
/// // To be able to still use the file, shadow `file` with the ScopeGuard itself.
|
||||
/// let mut file = ScopeGuard::new(file, |mut file| file.write_all(b"goodbye\n").unwrap());
|
||||
/// // Now write to the file normally "through" the capturing ScopeGuard instance.
|
||||
/// file.write_all(b"hello\n").unwrap();
|
||||
///
|
||||
/// // hello will be written first, then goodbye.
|
||||
/// ```
|
||||
pub struct ScopeGuard<T, F: FnOnce(T)>(Option<(T, F)>);
|
||||
|
||||
impl<T, F: FnOnce(T)> ScopeGuard<T, F> {
|
||||
/// Creates a new `ScopeGuard` wrapping `value`. The `on_drop` callback is executed when the
|
||||
/// ScopeGuard's lifetime expires or when it is manually dropped.
|
||||
pub fn new(value: T, on_drop: F) -> Self {
|
||||
Self(Some((value, on_drop)))
|
||||
}
|
||||
|
||||
/// Invokes the callback, consuming the ScopeGuard.
|
||||
pub fn commit(guard: Self) {
|
||||
std::mem::drop(guard);
|
||||
}
|
||||
|
||||
/// Cancels the invocation of the callback, returning the original wrapped value.
|
||||
pub fn cancel(mut guard: Self) -> T {
|
||||
let (value, _) = guard.0.take().expect("Should always have Some value");
|
||||
value
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, F: FnOnce(T)> Deref for ScopeGuard<T, F> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0.as_ref().unwrap().0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, F: FnOnce(T)> DerefMut for ScopeGuard<T, F> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0.as_mut().unwrap().0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, F: FnOnce(T)> Drop for ScopeGuard<T, F> {
|
||||
fn drop(&mut self) {
|
||||
if let Some((value, on_drop)) = self.0.take() {
|
||||
on_drop(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait expressing what ScopeGuard can do. This is necessary because our scoped cells return an
|
||||
/// `impl Trait` object and therefore methods on ScopeGuard which take a self parameter cannot be
|
||||
/// used.
|
||||
pub trait ScopeGuarding: DerefMut + Sized {
|
||||
/// Invokes the callback, consuming the guard.
|
||||
fn commit(guard: Self) {
|
||||
std::mem::drop(guard);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, F: FnOnce(T)> ScopeGuarding for ScopeGuard<T, F> {}
|
||||
|
||||
pub const fn assert_send<T: Send>() {}
|
||||
pub const fn assert_sync<T: Sync>() {}
|
||||
|
||||
/// Asserts that a slice is alphabetically sorted by a <code>&[wstr]</code> `name` field.
|
||||
///
|
||||
/// Mainly useful for static asserts/const eval.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This function panics if the given slice is unsorted.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use fish_widestring::{L, wstr};
|
||||
/// use fish_common::assert_sorted_by_name;
|
||||
///
|
||||
/// const COLORS: &[(&wstr, u32)] = &[
|
||||
/// // must be in alphabetical order
|
||||
/// (L!("blue"), 0x0000ff),
|
||||
/// (L!("green"), 0x00ff00),
|
||||
/// (L!("red"), 0xff0000),
|
||||
/// ];
|
||||
///
|
||||
/// assert_sorted_by_name!(COLORS, 0);
|
||||
/// ```
|
||||
///
|
||||
/// While this example would fail to compile:
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// use fish_widestring::{L, wstr};
|
||||
/// use fish_common::assert_sorted_by_name;
|
||||
///
|
||||
/// const COLORS: &[(&wstr, u32)] = &[
|
||||
/// // not in alphabetical order
|
||||
/// (L!("green"), 0x00ff00),
|
||||
/// (L!("blue"), 0x0000ff),
|
||||
/// (L!("red"), 0xff0000),
|
||||
/// ];
|
||||
///
|
||||
/// assert_sorted_by_name!(COLORS, 0);
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! assert_sorted_by_name {
|
||||
($slice:expr, $field:tt) => {
|
||||
const _: () = {
|
||||
use std::cmp::Ordering;
|
||||
|
||||
// ugly const eval workarounds below.
|
||||
const fn cmp_i32(lhs: i32, rhs: i32) -> Ordering {
|
||||
match lhs - rhs {
|
||||
..=-1 => Ordering::Less,
|
||||
0 => Ordering::Equal,
|
||||
1.. => Ordering::Greater,
|
||||
}
|
||||
}
|
||||
|
||||
const fn cmp_slice(s1: &[char], s2: &[char]) -> Ordering {
|
||||
let mut i = 0;
|
||||
while i < s1.len() && i < s2.len() {
|
||||
match cmp_i32(s1[i] as i32, s2[i] as i32) {
|
||||
Ordering::Equal => i += 1,
|
||||
other => return other,
|
||||
}
|
||||
}
|
||||
cmp_i32(s1.len() as i32, s2.len() as i32)
|
||||
}
|
||||
|
||||
let mut i = 1;
|
||||
while i < $slice.len() {
|
||||
let prev = $slice[i - 1].$field.as_char_slice();
|
||||
let cur = $slice[i].$field.as_char_slice();
|
||||
if matches!(cmp_slice(prev, cur), Ordering::Greater) {
|
||||
panic!("array must be sorted");
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
};
|
||||
};
|
||||
($slice:expr) => {
|
||||
assert_sorted_by_name!($slice, name);
|
||||
};
|
||||
}
|
||||
|
||||
pub trait Named {
|
||||
fn name(&self) -> &'static wstr;
|
||||
}
|
||||
|
||||
/// Return a reference to the first entry with the given name, assuming the entries are sorted by
|
||||
/// name. Return None if not found.
|
||||
pub fn get_by_sorted_name<T: Named>(name: &wstr, vals: &'static [T]) -> Option<&'static T> {
|
||||
match vals.binary_search_by_key(&name, |val| val.name()) {
|
||||
Ok(index) => Some(&vals[index]),
|
||||
Err(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Given an input string, return a prefix of the string up to the first NUL character,
|
||||
/// or the entire string if there is no NUL character.
|
||||
pub fn truncate_at_nul(input: &wstr) -> &wstr {
|
||||
match input.chars().position(|c| c == '\0') {
|
||||
Some(nul_pos) => &input[..nul_pos],
|
||||
None => input,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_scoped_cell() {
|
||||
let cell = ScopedCell::new(42);
|
||||
|
||||
{
|
||||
let _guard = cell.scoped_mod(|x| *x += 1);
|
||||
assert_eq!(cell.get(), 43);
|
||||
}
|
||||
|
||||
assert_eq!(cell.get(), 42);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scoped_refcell() {
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
struct Data {
|
||||
x: i32,
|
||||
y: i32,
|
||||
}
|
||||
|
||||
let cell = ScopedRefCell::new(Data { x: 1, y: 2 });
|
||||
|
||||
{
|
||||
let _guard = cell.scoped_set(10, |d| &mut d.x);
|
||||
assert_eq!(cell.borrow().x, 10);
|
||||
}
|
||||
assert_eq!(cell.borrow().x, 1);
|
||||
|
||||
{
|
||||
let _guard = cell.scoped_replace(Data { x: 42, y: 99 });
|
||||
assert_eq!(*cell.borrow(), Data { x: 42, y: 99 });
|
||||
}
|
||||
assert_eq!(*cell.borrow(), Data { x: 1, y: 2 });
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scope_guard() {
|
||||
let relaxed = std::sync::atomic::Ordering::Relaxed;
|
||||
let counter = std::sync::atomic::AtomicUsize::new(0);
|
||||
{
|
||||
let guard = ScopeGuard::new(123, |arg| {
|
||||
assert_eq!(arg, 123);
|
||||
counter.fetch_add(1, relaxed);
|
||||
});
|
||||
assert_eq!(counter.load(relaxed), 0);
|
||||
std::mem::drop(guard);
|
||||
assert_eq!(counter.load(relaxed), 1);
|
||||
}
|
||||
// commit also invokes the callback.
|
||||
{
|
||||
let guard = ScopeGuard::new(123, |arg| {
|
||||
assert_eq!(arg, 123);
|
||||
counter.fetch_add(1, relaxed);
|
||||
});
|
||||
assert_eq!(counter.load(relaxed), 1);
|
||||
ScopeGuard::commit(guard);
|
||||
assert_eq!(counter.load(relaxed), 2);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_truncate_at_nul() {
|
||||
assert_eq!(truncate_at_nul(L!("abc\0def")), L!("abc"));
|
||||
assert_eq!(truncate_at_nul(L!("abc")), L!("abc"));
|
||||
assert_eq!(truncate_at_nul(L!("\0abc")), L!(""));
|
||||
}
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
[package]
|
||||
name = "fish-fallback"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
version = "0.0.0"
|
||||
repository.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
fish-common.workspace = true
|
||||
fish-widecharwidth.workspace = true
|
||||
fish-widestring.workspace = true
|
||||
libc.workspace = true
|
||||
widestring.workspace = true
|
||||
|
||||
[build-dependencies]
|
||||
fish-build-helper.workspace = true
|
||||
rsconf.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
@@ -1,5 +0,0 @@
|
||||
use fish_build_helper::target_os_is_cygwin;
|
||||
|
||||
fn main() {
|
||||
rsconf::declare_cfg("cygwin", target_os_is_cygwin());
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
[package]
|
||||
name = "fish-gettext-extraction"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
version = "0.0.0"
|
||||
repository.workspace = true
|
||||
license.workspace = true
|
||||
description = "proc-macro for extracting strings for gettext translation"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
fish-tempfile.workspace = true
|
||||
proc-macro2.workspace = true
|
||||
|
||||
[build-dependencies]
|
||||
rsconf.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
@@ -1,3 +0,0 @@
|
||||
fn main() {
|
||||
rsconf::rebuild_if_env_changed("FISH_GETTEXT_EXTRACTION_DIR");
|
||||
}
|
||||
@@ -1,122 +0,0 @@
|
||||
extern crate proc_macro;
|
||||
use fish_tempfile::random_filename;
|
||||
use proc_macro::TokenStream;
|
||||
use std::{ffi::OsString, io::Write as _, path::PathBuf};
|
||||
|
||||
fn unescape_multiline_rust_string(s: String) -> String {
|
||||
if !s.contains('\n') {
|
||||
return s;
|
||||
}
|
||||
let mut unescaped = String::new();
|
||||
enum State {
|
||||
Ground,
|
||||
Escaped,
|
||||
ContinuationLineLeadingWhitespace,
|
||||
}
|
||||
use State::*;
|
||||
let mut state = Ground;
|
||||
for c in s.chars() {
|
||||
match state {
|
||||
Ground => match c {
|
||||
'\\' => state = Escaped,
|
||||
_ => {
|
||||
unescaped.push(c);
|
||||
}
|
||||
},
|
||||
Escaped => match c {
|
||||
'\\' => {
|
||||
unescaped.push('\\');
|
||||
state = Ground;
|
||||
}
|
||||
'\n' => state = ContinuationLineLeadingWhitespace,
|
||||
_ => panic!("Unsupported escape sequence '\\{c}' in message string '{s}'"),
|
||||
},
|
||||
ContinuationLineLeadingWhitespace => match c {
|
||||
' ' | '\t' => (),
|
||||
_ => {
|
||||
unescaped.push(c);
|
||||
state = Ground;
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
unescaped
|
||||
}
|
||||
|
||||
// Each entry is written to a fresh file to avoid race conditions arising when there are multiple
|
||||
// unsynchronized writers to the same file.
|
||||
fn write_po_entry_to_file(message: &TokenStream, dir: &OsString) {
|
||||
let message_string = unescape_multiline_rust_string(message.to_string());
|
||||
assert!(
|
||||
!message_string.contains('\n'),
|
||||
"Gettext strings may not contain unescaped newlines. Unescaped newline found in '{message_string}'"
|
||||
);
|
||||
let msgid_without_quotes = &message_string[1..(message_string.len() - 1)];
|
||||
// We don't want leading or trailing whitespace in our messages.
|
||||
let trimmed_msgid = msgid_without_quotes.trim();
|
||||
assert_eq!(msgid_without_quotes, trimmed_msgid);
|
||||
assert!(!trimmed_msgid.starts_with("\\n"));
|
||||
assert!(!trimmed_msgid.ends_with("\\n"));
|
||||
assert!(!trimmed_msgid.starts_with("\\t"));
|
||||
assert!(!trimmed_msgid.ends_with("\\t"));
|
||||
// Crude check for format strings. This might result in false positives.
|
||||
let format_string_annotation = if message_string.contains('%') {
|
||||
"#, c-format\n"
|
||||
} else {
|
||||
""
|
||||
};
|
||||
let po_entry = format!("{format_string_annotation}msgid {message_string}\nmsgstr \"\"\n\n");
|
||||
|
||||
let dir = PathBuf::from(dir);
|
||||
let (path, result) =
|
||||
fish_tempfile::create_file_with_retry(|| dir.join(random_filename(OsString::new())));
|
||||
let mut file = result.unwrap_or_else(|e| {
|
||||
panic!("Failed to create temporary file {path:?}:\n{e}");
|
||||
});
|
||||
file.write_all(po_entry.as_bytes()).unwrap();
|
||||
}
|
||||
|
||||
/// The `message` is passed through unmodified.
|
||||
/// If `FISH_GETTEXT_EXTRACTION_DIR` is defined in the environment,
|
||||
/// the message ID is written into a new file in this directory,
|
||||
/// so that it can then be used for generating gettext PO files.
|
||||
/// The `message` must be a string literal.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This macro panics if the `FISH_GETTEXT_EXTRACTION_DIR` variable is set and `message` has an
|
||||
/// unexpected format.
|
||||
/// Note that for example `concat!(...)` cannot be passed to this macro, because expansion works
|
||||
/// outside in, meaning this macro would still see the `concat!` macro invocation, instead of a
|
||||
/// string literal.
|
||||
#[proc_macro]
|
||||
pub fn gettext_extract(message: TokenStream) -> TokenStream {
|
||||
if let Some(dir_path) = std::env::var_os("FISH_GETTEXT_EXTRACTION_DIR") {
|
||||
let pm2_message = proc_macro2::TokenStream::from(message.clone());
|
||||
let mut token_trees = pm2_message.into_iter();
|
||||
let first_token = token_trees
|
||||
.next()
|
||||
.expect("gettext_extract got empty token stream. Expected one token.");
|
||||
assert!(
|
||||
token_trees.next().is_none(),
|
||||
"Invalid number of tokens passed to gettext_extract. Expected one token, but got more."
|
||||
);
|
||||
let proc_macro2::TokenTree::Group(group) = first_token else {
|
||||
panic!("Expected group in gettext_extract, but got: {first_token:?}");
|
||||
};
|
||||
let mut group_tokens = group.stream().into_iter();
|
||||
let first_group_token = group_tokens
|
||||
.next()
|
||||
.expect("gettext_extract expected one group token but got none.");
|
||||
assert!(
|
||||
group_tokens.next().is_none(),
|
||||
"Invalid number of tokens in group passed to gettext_extract. Expected one token, but got more."
|
||||
);
|
||||
if let proc_macro2::TokenTree::Literal(_) = first_group_token {
|
||||
write_po_entry_to_file(&message, &dir_path);
|
||||
} else {
|
||||
panic!("Expected literal in gettext_extract, but got: {first_group_token:?}");
|
||||
}
|
||||
}
|
||||
message
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
[package]
|
||||
name = "fish-gettext-maps"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
version = "0.0.0"
|
||||
repository.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
phf.workspace = true
|
||||
|
||||
[build-dependencies]
|
||||
fish-build-helper.workspace = true
|
||||
fish-gettext-mo-file-parser.workspace = true
|
||||
phf_codegen.workspace = true
|
||||
rsconf.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
@@ -1,166 +0,0 @@
|
||||
use std::{
|
||||
ffi::OsStr,
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
};
|
||||
|
||||
use fish_build_helper::env_var;
|
||||
|
||||
fn main() {
|
||||
let cache_dir =
|
||||
PathBuf::from(fish_build_helper::fish_build_dir()).join("fish-localization-map-cache");
|
||||
embed_localizations(&cache_dir);
|
||||
|
||||
fish_build_helper::rebuild_if_path_changed(
|
||||
fish_build_helper::workspace_root()
|
||||
.join("localization")
|
||||
.join("po"),
|
||||
);
|
||||
}
|
||||
|
||||
fn embed_localizations(cache_dir: &Path) {
|
||||
use fish_gettext_mo_file_parser::parse_mo_file;
|
||||
use std::{
|
||||
fs::File,
|
||||
io::{BufWriter, Write as _},
|
||||
};
|
||||
|
||||
let po_dir = fish_build_helper::workspace_root()
|
||||
.join("localization")
|
||||
.join("po");
|
||||
|
||||
// Ensure that the directory is created, because clippy cannot compile the code if the
|
||||
// directory does not exist.
|
||||
std::fs::create_dir_all(cache_dir).unwrap();
|
||||
|
||||
let localization_map_path =
|
||||
Path::new(&env_var("OUT_DIR").unwrap()).join("localization_maps.rs");
|
||||
let mut localization_map_file = BufWriter::new(File::create(&localization_map_path).unwrap());
|
||||
|
||||
// This will become a map which maps from language identifiers to maps containing localizations
|
||||
// for the respective language.
|
||||
let mut catalogs = phf_codegen::Map::new();
|
||||
|
||||
match Command::new("msgfmt").arg("-h").output() {
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||
rsconf::warn!(
|
||||
"Could not find msgfmt required to build message catalogs. \
|
||||
Localization will not work. \
|
||||
If you install gettext now, you need to trigger a rebuild to include localization support. \
|
||||
For example by running `touch localization/po` followed by the build command."
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
panic!("Error when trying to run `msgfmt -h`: {e:?}");
|
||||
}
|
||||
Ok(output) => {
|
||||
let has_check_format =
|
||||
String::from_utf8_lossy(&output.stdout).contains("--check-format");
|
||||
for dir_entry_result in po_dir.read_dir().unwrap() {
|
||||
let dir_entry = dir_entry_result.unwrap();
|
||||
let po_file_path = dir_entry.path();
|
||||
if po_file_path.extension() != Some(OsStr::new("po")) {
|
||||
continue;
|
||||
}
|
||||
let lang = po_file_path
|
||||
.file_stem()
|
||||
.expect("All entries in the po directory must be regular files.");
|
||||
let language = lang.to_str().unwrap().to_owned();
|
||||
|
||||
// Each language gets its own static map for the mapping from message in the source code to
|
||||
// the localized version.
|
||||
let map_name = format!("LANG_MAP_{language}");
|
||||
|
||||
let cached_map_path = cache_dir.join(lang);
|
||||
|
||||
// Include the file containing the map for this language in the main generated file.
|
||||
writeln!(
|
||||
&mut localization_map_file,
|
||||
"include!(\"{}\");",
|
||||
cached_map_path.display()
|
||||
)
|
||||
.unwrap();
|
||||
// Map from the language identifier to the map containing the localizations for this
|
||||
// language.
|
||||
catalogs.entry(language, format!("&{map_name}"));
|
||||
|
||||
if let Ok(metadata) = std::fs::metadata(&cached_map_path) {
|
||||
// Cached map file exists, but might be outdated.
|
||||
let cached_map_mtime = metadata.modified().unwrap();
|
||||
let po_mtime = dir_entry.metadata().unwrap().modified().unwrap();
|
||||
if cached_map_mtime > po_mtime {
|
||||
// Cached map file is considered up-to-date.
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Generate the map file.
|
||||
|
||||
// Try to create new MO data and load it into `mo_data`.
|
||||
let mut tmp_mo_file = None;
|
||||
let output = {
|
||||
let mut cmd = &mut Command::new("msgfmt");
|
||||
if has_check_format {
|
||||
cmd = cmd.arg("--check-format");
|
||||
} else {
|
||||
tmp_mo_file = Some(cache_dir.join("messages.mo"));
|
||||
}
|
||||
cmd.arg(format!(
|
||||
"--output-file={}",
|
||||
tmp_mo_file
|
||||
.as_ref()
|
||||
.map_or("-", |path| path.to_str().unwrap())
|
||||
))
|
||||
.arg(&po_file_path)
|
||||
.output()
|
||||
.unwrap()
|
||||
};
|
||||
assert!(
|
||||
output.status.success(),
|
||||
"msgfmt failed:\n{}",
|
||||
String::from_utf8(output.stderr).unwrap()
|
||||
);
|
||||
let mo_data =
|
||||
tmp_mo_file.map_or(output.stdout, |path| std::fs::read(path).unwrap());
|
||||
|
||||
// Extract map from MO data.
|
||||
let language_localizations = parse_mo_file(&mo_data).unwrap();
|
||||
|
||||
// This file will contain the localization map for the current language.
|
||||
let mut cached_map_file = File::create(&cached_map_path).unwrap();
|
||||
let mut single_language_localization_map = phf_codegen::Map::new();
|
||||
|
||||
// The values will be written into the source code as is, meaning escape sequences and
|
||||
// double quotes in the data will be interpreted by the Rust compiler, which is undesirable.
|
||||
// Converting them to raw strings prevents this. (As long as no input data contains `"###`.)
|
||||
fn to_raw_str(s: &str) -> String {
|
||||
assert!(!s.contains("\"###"));
|
||||
format!("r###\"{s}\"###")
|
||||
}
|
||||
for (msgid, msgstr) in language_localizations {
|
||||
single_language_localization_map.entry(
|
||||
String::from_utf8(msgid.into()).unwrap(),
|
||||
to_raw_str(&String::from_utf8(msgstr.into()).unwrap()),
|
||||
);
|
||||
}
|
||||
writeln!(&mut cached_map_file, "#[allow(non_upper_case_globals)]").unwrap();
|
||||
write!(
|
||||
&mut cached_map_file,
|
||||
"static {}: phf::Map<&'static str, &'static str> = {}",
|
||||
&map_name,
|
||||
single_language_localization_map.build()
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(&mut cached_map_file, ";").unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
write!(
|
||||
&mut localization_map_file,
|
||||
"pub static CATALOGS: phf::Map<&str, &phf::Map<&str, &str>> = {}",
|
||||
catalogs.build()
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(&mut localization_map_file, ";").unwrap();
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
include!(concat!(env!("OUT_DIR"), "/localization_maps.rs"));
|
||||
@@ -1,10 +0,0 @@
|
||||
[package]
|
||||
name = "fish-gettext-mo-file-parser"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
version = "0.0.0"
|
||||
repository.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
@@ -1,134 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
const U32_SIZE: usize = size_of::<u32>();
|
||||
|
||||
fn read_le_u32(bytes: &[u8]) -> u32 {
|
||||
u32::from_le_bytes(bytes[..U32_SIZE].try_into().unwrap())
|
||||
}
|
||||
|
||||
fn read_be_u32(bytes: &[u8]) -> u32 {
|
||||
u32::from_be_bytes(bytes[..U32_SIZE].try_into().unwrap())
|
||||
}
|
||||
|
||||
fn get_u32_reader_from_magic_number(magic_number: &[u8]) -> std::io::Result<fn(&[u8]) -> u32> {
|
||||
match magic_number {
|
||||
[0x95, 0x04, 0x12, 0xde] => Ok(read_be_u32),
|
||||
[0xde, 0x12, 0x04, 0x95] => Ok(read_le_u32),
|
||||
_ => Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"First 4 bytes of MO file must correspond to magic number 0x950412de, either big or little endian.",
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns an error if an unknown major revision is detected.
|
||||
/// There are no relevant differences between supported revisions.
|
||||
fn check_if_revision_is_supported(revision: u32) -> std::io::Result<()> {
|
||||
// From the reference:
|
||||
// A program seeing an unexpected major revision number should stop reading the MO file entirely;
|
||||
// whereas an unexpected minor revision number means that the file can be read
|
||||
// but will not reveal its full contents,
|
||||
// when parsed by a program that supports only smaller minor revision numbers.
|
||||
let major_revision = revision >> 16;
|
||||
match major_revision {
|
||||
0 | 1 => {
|
||||
// At time of writing, these are the only major revisions which exist.
|
||||
// There is no documented difference and the GNU gettext code does not seem to
|
||||
// differentiate between the two either.
|
||||
// All features we care about are supported in minor revision 0,
|
||||
// so we do not need to care about the minor revision.
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"Major revision must be 0 or 1",
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
fn as_usize(value: u32) -> usize {
|
||||
const {
|
||||
assert!(size_of::<u32>() <= size_of::<usize>());
|
||||
}
|
||||
|
||||
// SAFETY: `usize` is guaranteed to be at least as wide as `u32` by the const assert above.
|
||||
unsafe { usize::try_from(value).unwrap_unchecked() }
|
||||
}
|
||||
|
||||
fn parse_strings(
|
||||
file_content: &[u8],
|
||||
num_strings: usize,
|
||||
table_offset: usize,
|
||||
read_u32: fn(&[u8]) -> u32,
|
||||
) -> std::io::Result<Vec<&[u8]>> {
|
||||
let file_too_short_error = || {
|
||||
Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"MO file is too short.",
|
||||
))
|
||||
};
|
||||
if table_offset + num_strings * 2 * U32_SIZE > file_content.len() {
|
||||
return file_too_short_error();
|
||||
}
|
||||
let mut strings = Vec::with_capacity(num_strings);
|
||||
let mut offset = table_offset;
|
||||
let mut get_next_u32 = || {
|
||||
let val = read_u32(&file_content[offset..]);
|
||||
offset += U32_SIZE;
|
||||
val
|
||||
};
|
||||
for _ in 0..num_strings {
|
||||
// not including NUL terminator
|
||||
let string_length = as_usize(get_next_u32());
|
||||
let string_offset = as_usize(get_next_u32());
|
||||
let string_end = string_offset.checked_add(string_length).unwrap();
|
||||
if string_end > file_content.len() {
|
||||
return file_too_short_error();
|
||||
}
|
||||
// Contexts are stored by storing the concatenation of the context, a EOT byte, and the original string, instead of the original string.
|
||||
// Contexts are not supported by this implementation.
|
||||
// The format allows plural forms to appear behind singular forms, separated by a NUL byte,
|
||||
// where `string_length` includes the length of both.
|
||||
// This is not supported here.
|
||||
// Do not include the NUL terminator in the slice.
|
||||
strings.push(&file_content[string_offset..string_end]);
|
||||
}
|
||||
Ok(strings)
|
||||
}
|
||||
|
||||
/// Parse a MO file.
|
||||
/// Format reference used: <https://www.gnu.org/software/gettext/manual/html_node/MO-Files.html>
|
||||
pub fn parse_mo_file(file_content: &[u8]) -> std::io::Result<HashMap<&[u8], &[u8]>> {
|
||||
if file_content.len() < 7 * U32_SIZE {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"File too short to contain header.",
|
||||
));
|
||||
}
|
||||
// The first 4 bytes are a magic number, from which the endianness can be determined.
|
||||
let read_u32 = get_u32_reader_from_magic_number(&file_content[0..U32_SIZE])?;
|
||||
let mut offset = U32_SIZE;
|
||||
let mut get_next_u32 = || {
|
||||
let val = read_u32(&file_content[offset..]);
|
||||
offset += U32_SIZE;
|
||||
val
|
||||
};
|
||||
let file_format_revision = get_next_u32();
|
||||
check_if_revision_is_supported(file_format_revision)?;
|
||||
let num_strings = as_usize(get_next_u32());
|
||||
let original_strings_offset = as_usize(get_next_u32());
|
||||
let translation_strings_offset = as_usize(get_next_u32());
|
||||
let original_strings =
|
||||
parse_strings(file_content, num_strings, original_strings_offset, read_u32)?;
|
||||
let translated_strings = parse_strings(
|
||||
file_content,
|
||||
num_strings,
|
||||
translation_strings_offset,
|
||||
read_u32,
|
||||
)?;
|
||||
let mut translation_map = HashMap::with_capacity(num_strings);
|
||||
for i in 0..num_strings {
|
||||
translation_map.insert(original_strings[i], translated_strings[i]);
|
||||
}
|
||||
Ok(translation_map)
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
[package]
|
||||
name = "fish-gettext"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
version = "0.0.0"
|
||||
repository.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
fish-gettext-maps.workspace = true
|
||||
phf.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user