File ghostty.spec of Package ghostty
Name: ghostty
Version: 1.2.3
Release: 0
Summary: Ghostty terminal emulator
License: MIT
URL: https://ghostty.org/
Source0: %{name}-%{version}.tar.gz
Source1: vendor.tar.zst
Source2: zig-linux-x86_64-0.14.0.tar.xz
Source3: zig-linux-aarch64-0.14.0.tar.xz
BuildRequires: zig
BuildRequires: zstd
BuildRequires: pkgconfig(gtk4)
BuildRequires: pkgconfig(libadwaita-1)
BuildRequires: blueprint-compiler
BuildRequires: pkgconfig(wayland-client)
BuildRequires: pkgconfig(xkbcommon)
BuildRequires: pkgconfig(vulkan)
BuildRequires: gcc
BuildRequires: make
BuildRequires: ncurses-devel
BuildRequires: libxml2-tools
BuildRequires: gtk4-layer-shell-devel
BuildRequires: chrpath
BuildRequires: fdupes
BuildRequires: pkgconfig
BuildRequires: pkgconfig(x11)
BuildRequires: pkgconfig(xcb)
BuildRequires: pkgconfig(xrandr)
BuildRequires: pkgconfig(xrender)
BuildRequires: pkgconfig(xext)
BuildRequires: pkgconfig(xfixes)
BuildRequires: pkgconfig(xcursor)
BuildRequires: pkgconfig(xi)
BuildRequires: pkgconfig(xinerama)
BuildRequires: pkgconfig(xdamage)
BuildRequires: pkgconfig(xkbcommon)
# (Optional) If rpmlint complains later, we can tighten files/desktop/appstream bits.
%description
Ghostty is a terminal emulator.
%prep
%autosetup -n %{name}-%{version}
# Unpack vendored tarballs (no network in OBS builds)
mkdir -p vendor
tar -I zstd -xf %{SOURCE1}
# Extract the correct Zig compiler for the architecture
%ifarch x86_64
tar -xf %{SOURCE2}
%endif
%ifarch aarch64
tar -xf %{SOURCE3}
%endif
echo "Listing vendor directory contents:"
ls -R vendor
# Rewrite ALL dependency URLs in all *.zon files to local file:// URLs.
# This keeps Zig's dependency hashes intact, but prevents network access.
export VENDOR_DIR="$PWD/vendor"
# Replace: .url = "https://.../something.tar.gz"
# With: .path = "../vendor_unpacked/something_unpacked/..."
# across root and nested pkg/* zon files.
# This script runs in multiple passes to handle nested dependencies.
cat > patch_zon.py << 'EOF'
import os
import re
import sys
import shutil
import subprocess
from urllib.parse import urlparse
vendor_dir = os.environ.get('VENDOR_DIR')
if not vendor_dir:
sys.exit("VENDOR_DIR not set")
vendor_dir = os.path.abspath(vendor_dir)
# Sibling to vendor
unpacked_root = os.path.join(os.path.dirname(vendor_dir), 'vendor_unpacked')
os.makedirs(unpacked_root, exist_ok=True)
print(f"Patching .zon files. VENDOR_DIR: {vendor_dir}")
print(f"Unpacking to: {unpacked_root}")
# Mapping of git commit hashes to their actual vendor filenames
# Note: With the fix to build-vendor.sh, all git dependencies now use
# their commit hash as filename, so no special mapping is needed.
GIT_COMMIT_TO_FILE = {}
unpacked_cache = {}
def patch_all_zons():
"""Patch all .zon files found in current directory tree"""
patched_any = False
for root, dirs, files in os.walk('.'):
# Skip the local Zig compiler directory
if "zig-linux" in root:
continue
for file in files:
if file.endswith('.zon'):
path = os.path.join(root, file)
with open(path, 'r') as f:
content = f.read()
abs_zon_dir = os.path.dirname(os.path.abspath(path))
def repl(m):
nonlocal patched_any
git_prefix = m.group(1) or '' # 'git+' or empty string
url_part = m.group(2) # The https://... part
full_url = git_prefix + url_part
# Handle git URLs with commit hashes
if git_prefix == 'git+':
# git+https://github.com/user/repo#commithash
# The vendored file may use a mapped filename
if '#' in url_part:
commit_hash = url_part.split('#')[1]
# Check if this commit hash has a known mapping
if commit_hash in GIT_COMMIT_TO_FILE:
filename = GIT_COMMIT_TO_FILE[commit_hash]
else:
# Try common patterns: commit.tar.gz or reponame-commit.tar.gz
filename = f"{commit_hash}.tar.gz"
abs_vendor_file = os.path.join(vendor_dir, filename)
# If not found, try with repo name prefix
if not os.path.exists(abs_vendor_file):
# Extract repo name from URL
parsed = urlparse(url_part.split('#')[0])
repo_name = os.path.basename(parsed.path)
filename = f"{repo_name}-{commit_hash}.tar.gz"
else:
parsed = urlparse(url_part)
filename = os.path.basename(parsed.path)
else:
parsed = urlparse(url_part)
filename = os.path.basename(parsed.path)
abs_vendor_file = os.path.join(vendor_dir, filename)
if not os.path.exists(abs_vendor_file):
# Try removing version query/hash if present in url but not filename?
# make-vendor.sh uses simple basename.
print(f"Warning: {filename} not found in vendor for url {full_url}")
return m.group(0)
key = filename
if key not in unpacked_cache:
dest_dir_name = filename + "_unpacked"
dest_abs = os.path.join(unpacked_root, dest_dir_name)
if not os.path.exists(dest_abs):
os.makedirs(dest_abs)
print(f"Unpacking {filename} ...")
try:
subprocess.check_call(['tar', '-xf', abs_vendor_file, '-C', dest_abs])
except subprocess.CalledProcessError as e:
print(f"Failed to unpack {filename}: {e}")
return m.group(0)
# Heuristic: if unpacking resulted in a single directory, use that as root
entries = os.listdir(dest_abs)
if len(entries) == 1 and os.path.isdir(os.path.join(dest_abs, entries[0])):
final_path = os.path.join(dest_abs, entries[0])
else:
final_path = dest_abs
unpacked_cache[key] = final_path
target_path = unpacked_cache[key]
rel_path = os.path.relpath(target_path, abs_zon_dir)
# Debug output to help diagnose path issues
print(f" {os.path.basename(path)}: {full_url} -> {rel_path}")
patched_any = True
return f'.path = "{rel_path}"'
# Replace .url - now capturing git+ prefix separately
new_content = re.sub(r'\.url\s*=\s*"(git\+)?(https?://[^"]+)"', repl, content)
# Comment out .hash lines
new_content = re.sub(r'(\.hash\s*=\s*"[^"]+",?)', r'// \1', new_content)
if new_content != content:
print(f"Patched {path}")
with open(path, 'w') as f:
f.write(new_content)
return patched_any
# Run multiple passes to handle nested dependencies
# Each pass may unpack new tarballs with their own .zon files
max_passes = 5
for pass_num in range(1, max_passes + 1):
print(f"\n=== Pass {pass_num} ===")
if not patch_all_zons():
print(f"No changes in pass {pass_num}, stopping.")
break
else:
print(f"Warning: Reached maximum passes ({max_passes})")
EOF
python3 patch_zon.py
%build
export HOME="%{_builddir}/%{name}-%{version}-build/.home"
mkdir -p "$HOME"
export ZIG_GLOBAL_CACHE_DIR="%{_builddir}/%{name}-%{version}-build/.zig-global-cache"
export ZIG_LOCAL_CACHE_DIR="%{_builddir}/%{name}-%{version}-build/.zig-local-cache"
mkdir -p "$ZIG_GLOBAL_CACHE_DIR" "$ZIG_LOCAL_CACHE_DIR"
# Use local Zig 0.14.0
export PATH="$PWD/zig-linux-x86_64-0.14.0:$PATH"
zig version
# Tell Zig to skip URL validation (helps with offline builds)
export ZIG_SKIP_VALIDATE_URLS=1
# We use -Doptimize=Debug to speed up the build for verification.
# For a release build, change to -Doptimize=ReleaseFast
zig build -Doptimize=ReleaseFast -Demit-themes=false
%install
rm -rf %{buildroot}
install -D -m 0755 zig-out/bin/ghostty %{buildroot}%{_bindir}/ghostty
# Remove RPATH
chrpath --delete %{buildroot}%{_bindir}/ghostty
if [ -d "zig-out/share" ]; then
mkdir -p %{buildroot}%{_datadir}
cp -a zig-out/share/* %{buildroot}%{_datadir}/
fi
%fdupes %{buildroot}%{_datadir}
%files
%license LICENSE
%{_bindir}/ghostty
%{_datadir}/*
%changelog