File rpds.py-0.22.3.obscpio of Package python-rpds-py

07070100000000000041ED0000000000000000000000026750735100000000000000000000000000000000000000000000001700000000rpds.py-0.22.3/.github07070100000001000081A40000000000000000000000016750735100000270000000000000000000000000000000000000002300000000rpds.py-0.22.3/.github/SECURITY.md# Security Policy

## Supported Versions

In general, only the latest released `rpds-py` version is supported and will receive updates.

## Reporting a Vulnerability

To report a security vulnerability, please send an email to `Julian+Security` at `GrayVines.com` with subject line `SECURITY (rpds-py)`.

I will do my best to respond within 48 hours to acknowledge the message and discuss further steps.

If the vulnerability is accepted, an advisory will be sent out via GitHub's security advisory functionality.

For non-sensitive discussion related to this policy itself, feel free to open an issue on the issue tracker.
07070100000002000081A400000000000000000000000167507351000000D0000000000000000000000000000000000000002600000000rpds.py-0.22.3/.github/dependabot.ymlversion: 2
updates:
  - package-ecosystem: "github-actions"
    directory: "/"
    schedule:
      interval: "weekly"

  - package-ecosystem: "cargo"
    directory: "/"
    schedule:
      interval: "weekly"
07070100000003000081A4000000000000000000000001675073510000004C000000000000000000000000000000000000002300000000rpds.py-0.22.3/.github/release.ymlchangelog:
  exclude:
    authors:
      - dependabot
      - pre-commit-ci
07070100000004000041ED0000000000000000000000026750735100000000000000000000000000000000000000000000002100000000rpds.py-0.22.3/.github/workflows07070100000005000081A40000000000000000000000016750735100001E27000000000000000000000000000000000000002800000000rpds.py-0.22.3/.github/workflows/CI.ymlname: CI

on:
  push:
    branches-ignore:
      - "wip*"
    tags:
      - "v[0-9].*"
  pull_request:
  schedule:
    # Daily at 5:33
    - cron: "33 5 * * *"
  workflow_dispatch:

jobs:
  list:
    runs-on: ubuntu-latest
    outputs:
      noxenvs: ${{ steps.noxenvs-matrix.outputs.noxenvs }}
    steps:
      - uses: actions/checkout@v4
        with:
          persist-credentials: false
      - name: Set up nox
        uses: wntrblm/nox@2024.10.09
      - id: noxenvs-matrix
        run: |
          echo >>$GITHUB_OUTPUT noxenvs=$(
            nox --list-sessions --json | jq '[.[].session]'
          )

  test:
    needs: list
    runs-on: ubuntu-latest

    strategy:
      fail-fast: false
      matrix:
        noxenv: ${{ fromJson(needs.list.outputs.noxenvs) }}

    steps:
      - uses: actions/checkout@v4
        with:
          persist-credentials: false
      - name: Install dependencies
        run: sudo apt-get update && sudo apt-get install -y libenchant-2-dev
        if: runner.os == 'Linux' && startsWith(matrix.noxenv, 'docs')
      - name: Install dependencies
        run: brew install enchant
        if: runner.os == 'macOS' && startsWith(matrix.noxenv, 'docs')
      - name: Set up Python
        uses: quansight-labs/setup-python@v5
        with:
          python-version: |
            3.9
            3.10
            3.11
            3.12
            3.13
            3.13t
            pypy3.9
            pypy3.10
          allow-prereleases: true

      - name: Set up uv
        uses: hynek/setup-cached-uv@v2
      - name: Set up nox
        uses: wntrblm/nox@2024.10.09

      - name: Run nox
        run: nox -s "${{ matrix.noxenv }}"

  manylinux:
    needs: test
    runs-on: ubuntu-latest
    strategy:
      matrix:
        target: [x86_64, x86, aarch64, armv7, s390x, ppc64le]
    steps:
      - uses: actions/checkout@v4
        with:
          persist-credentials: false
      - uses: quansight-labs/setup-python@v5
        with:
          python-version: |
            3.9
            3.10
            3.11
            3.12
            3.13
            3.13t
            pypy3.9
            pypy3.10
          allow-prereleases: true
      - name: Build wheels
        uses: PyO3/maturin-action@v1
        with:
          target: ${{ matrix.target }}
          args: --release --out dist --interpreter '3.9 3.10 3.11 3.12 3.13 3.13t pypy3.9 pypy3.10'
          sccache: "true"
          manylinux: auto
      - name: Upload wheels
        uses: actions/upload-artifact@v4
        with:
          name: dist-${{ github.job }}-${{ matrix.target }}
          path: dist

  musllinux:
    needs: test
    runs-on: ubuntu-latest
    strategy:
      matrix:
        target:
          - aarch64-unknown-linux-musl
          - i686-unknown-linux-musl
          - x86_64-unknown-linux-musl
    steps:
      - uses: actions/checkout@v4
        with:
          persist-credentials: false
      - uses: quansight-labs/setup-python@v5
        with:
          python-version: |
            3.9
            3.10
            3.11
            3.12
            3.13
            3.13t
            pypy3.9
            pypy3.10
          allow-prereleases: true
      - name: Build wheels
        uses: PyO3/maturin-action@v1
        with:
          target: ${{ matrix.target }}
          args: --release --out dist --interpreter '3.9 3.10 3.11 3.12 3.13 3.13t pypy3.9 pypy3.10'
          manylinux: musllinux_1_2
          sccache: "true"
      - name: Upload wheels
        uses: actions/upload-artifact@v4
        with:
          name: dist-${{ github.job }}-${{ matrix.target }}
          path: dist

  windows:
    needs: test
    runs-on: windows-latest
    strategy:
      matrix:
        target: [x64, x86] # x86 is not supported by pypy
    steps:
      - uses: actions/checkout@v4
        with:
          persist-credentials: false
      - uses: quansight-labs/setup-python@v5
        with:
          python-version: |
            3.9
            3.10
            3.11
            3.12
            3.13
            ${{ matrix.target == 'x64' && 'pypy3.9' || '' }}
            ${{ matrix.target == 'x64' && 'pypy3.10' || '' }}
          allow-prereleases: true
          architecture: ${{ matrix.target }}
      - name: Build wheels
        uses: PyO3/maturin-action@v1
        with:
          target: ${{ matrix.target }}
          args: --release --out dist --interpreter '3.9 3.10 3.11 3.12 3.13' --interpreter ${{ matrix.target == 'x64' && 'pypy3.9 pypy3.10' || '' }}
          sccache: "true"
      - name: Upload wheels
        uses: actions/upload-artifact@v4
        with:
          name: dist-${{ github.job }}-${{ matrix.target }}
          path: dist

  windows-free-threaded:
    needs: test
    runs-on: windows-latest
    strategy:
      matrix:
        target: [x64, x86] # x86 is not supported by pypy
    steps:
      - uses: actions/checkout@v4
        with:
          persist-credentials: false
      - uses: quansight-labs/setup-python@v5
        with:
          python-version: 3.13t
          allow-prereleases: true
          architecture: ${{ matrix.target }}
      - name: Build wheels
        uses: PyO3/maturin-action@v1
        with:
          target: ${{ matrix.target }}
          args: --release --out dist --interpreter '3.13t'
          sccache: "true"
      - name: Upload wheels
        uses: actions/upload-artifact@v4
        with:
          name: dist-${{ github.job }}-${{ matrix.target }}-free-threaded
          path: dist

  macos:
    needs: test
    runs-on: macos-latest
    strategy:
      matrix:
        target: [x86_64, aarch64]
    steps:
      - uses: actions/checkout@v4
        with:
          persist-credentials: false
      - uses: quansight-labs/setup-python@v5
        with:
          python-version: |
            3.9
            3.10
            3.11
            3.12
            3.13
            3.13t
            pypy3.9
            pypy3.10
          allow-prereleases: true
      - name: Build wheels
        uses: PyO3/maturin-action@v1
        with:
          target: ${{ matrix.target }}
          args: --release --out dist --interpreter '3.9 3.10 3.11 3.12 3.13 3.13t pypy3.9 pypy3.10'
          sccache: "true"
      - name: Upload wheels
        uses: actions/upload-artifact@v4
        with:
          name: dist-${{ github.job }}-${{ matrix.target }}
          path: dist

  sdist:
    needs: test
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v4
        with:
          persist-credentials: false
      - uses: actions/setup-python@v5
        with:
          python-version: 3.13
      - name: Build an sdist
        uses: PyO3/maturin-action@v1
        with:
          command: sdist
          args: --out dist
      - name: Upload sdist
        uses: actions/upload-artifact@v4
        with:
          name: dist-${{ github.job }}
          path: dist

  release:
    needs: [manylinux, musllinux, windows, windows-free-threaded, macos]
    runs-on: ubuntu-latest
    if: "startsWith(github.ref, 'refs/tags/')"
    environment:
      name: PyPI
      url: https://pypi.org/p/rpds-py
    permissions:
      contents: write
      id-token: write

    steps:
      - uses: actions/download-artifact@v4
        with:
          pattern: dist-*
          merge-multiple: true
      - name: Publish to PyPI
        uses: PyO3/maturin-action@v1
        with:
          command: upload
          args: --non-interactive --skip-existing *
      - name: Create a GitHub Release
        if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags')
        uses: softprops/action-gh-release@v2
        with:
          files: |
            *
          generate_release_notes: true
07070100000006000081A40000000000000000000000016750735100000320000000000000000000000000000000000000002C00000000rpds.py-0.22.3/.github/workflows/zizmor.ymlname: GitHub Actions Security Analysis with zizmor 🌈

on:
  push:
    branches: ["main"]
  pull_request:
    branches: ["**"]

jobs:
  zizmor:
    runs-on: ubuntu-latest

    permissions:
      security-events: write

    steps:
      - name: Checkout repository
        uses: actions/checkout@v4
        with:
          persist-credentials: false
      - name: Setup Rust
        uses: actions-rust-lang/setup-rust-toolchain@v1
      - name: Install zizmor
        run: cargo install zizmor
      - name: Run zizmor 🌈
        run: zizmor --format sarif . > results.sarif
        env:
          GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

      - name: Upload SARIF file
        uses: github/codeql-action/upload-sarif@v3
        with:
          sarif_file: results.sarif
          category: zizmor
07070100000007000081A4000000000000000000000001675073510000008D000000000000000000000000000000000000002200000000rpds.py-0.22.3/.github/zizmor.ymlrules:
  template-injection:
    ignore:
      # our matrix is dynamically generated via `nox -l` but with no user input
      - CI.yml:71:9
07070100000008000081A400000000000000000000000167507351000002D4000000000000000000000000000000000000001A00000000rpds.py-0.22.3/.gitignore/target

# Byte-compiled / optimized / DLL files
__pycache__/
.pytest_cache/
*.py[cod]

# C extensions
*.so

# Distribution / packaging
.Python
.venv/
env/
bin/
build/
develop-eggs/
dist/
eggs/
lib/
lib64/
parts/
sdist/
var/
include/
man/
venv/
*.egg-info/
.installed.cfg
*.egg

# Installer logs
pip-log.txt
pip-delete-this-directory.txt
pip-selfcheck.json

# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.cache
nosetests.xml
coverage.xml

# Translations
*.mo

# Mr Developer
.mr.developer.cfg
.project
.pydevproject

# Rope
.ropeproject

# Django stuff:
*.log
*.pot

.DS_Store

# Sphinx documentation
docs/_build/

# PyCharm
.idea/

# VSCode
.vscode/

# Pyenv
.python-version

# User defined
/dirhtml
_cache

TODO
07070100000009000081A4000000000000000000000001675073510000036E000000000000000000000000000000000000002700000000rpds.py-0.22.3/.pre-commit-config.yamlci:
  skip:
    # pre-commit.ci doesn't have Rust installed
    - fmt
    - clippy
    - zizmor

repos:
  - repo: https://github.com/pre-commit/pre-commit-hooks
    rev: v5.0.0
    hooks:
      - id: check-ast
      - id: check-docstring-first
      - id: check-toml
      - id: check-vcs-permalinks
      - id: check-yaml
      - id: debug-statements
      - id: end-of-file-fixer
      - id: mixed-line-ending
        args: [--fix, lf]
      - id: trailing-whitespace
  - repo: https://github.com/doublify/pre-commit-rust
    rev: "v1.0"
    hooks:
      - id: fmt
      - id: clippy
  - repo: https://github.com/psf/black
    rev: 24.10.0
    hooks:
      - id: black
  - repo: https://github.com/pre-commit/mirrors-prettier
    rev: "v4.0.0-alpha.8"
    hooks:
      - id: prettier
  - repo: https://github.com/woodruffw/zizmor
    rev: v0.6.0
    hooks:
      - id: zizmor
0707010000000A000081A400000000000000000000000167507351000000F1000000000000000000000000000000000000002000000000rpds.py-0.22.3/.readthedocs.ymlversion: 2

build:
  os: ubuntu-22.04
  tools:
    python: "3.11"
    rust: "1.70"

sphinx:
  builder: dirhtml
  configuration: docs/conf.py
  fail_on_warning: true

formats: all

python:
  install:
    - requirements: docs/requirements.txt
0707010000000B000081A400000000000000000000000167507351000015A1000000000000000000000000000000000000001A00000000rpds.py-0.22.3/Cargo.lock# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4

[[package]]
name = "archery"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eae2ed21cd55021f05707a807a5fc85695dafb98832921f6cfa06db67ca5b869"
dependencies = [
 "triomphe",
]

[[package]]
name = "autocfg"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0"

[[package]]
name = "cc"
version = "1.0.90"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8cd6604a82acf3039f1144f54b8eb34e91ffba622051189e71b781822d5ee1f5"

[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"

[[package]]
name = "heck"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"

[[package]]
name = "indoc"
version = "2.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5"

[[package]]
name = "libc"
version = "0.2.155"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"

[[package]]
name = "memoffset"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a"
dependencies = [
 "autocfg",
]

[[package]]
name = "once_cell"
version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"

[[package]]
name = "portable-atomic"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0"

[[package]]
name = "proc-macro2"
version = "1.0.86"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77"
dependencies = [
 "unicode-ident",
]

[[package]]
name = "pyo3"
version = "0.23.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e484fd2c8b4cb67ab05a318f1fd6fa8f199fcc30819f08f07d200809dba26c15"
dependencies = [
 "cfg-if",
 "indoc",
 "libc",
 "memoffset",
 "once_cell",
 "portable-atomic",
 "pyo3-build-config",
 "pyo3-ffi",
 "pyo3-macros",
 "unindent",
]

[[package]]
name = "pyo3-build-config"
version = "0.23.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc0e0469a84f208e20044b98965e1561028180219e35352a2afaf2b942beff3b"
dependencies = [
 "once_cell",
 "python3-dll-a",
 "target-lexicon",
]

[[package]]
name = "pyo3-ffi"
version = "0.23.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb1547a7f9966f6f1a0f0227564a9945fe36b90da5a93b3933fc3dc03fae372d"
dependencies = [
 "libc",
 "pyo3-build-config",
]

[[package]]
name = "pyo3-macros"
version = "0.23.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fdb6da8ec6fa5cedd1626c886fc8749bdcbb09424a86461eb8cdf096b7c33257"
dependencies = [
 "proc-macro2",
 "pyo3-macros-backend",
 "quote",
 "syn",
]

[[package]]
name = "pyo3-macros-backend"
version = "0.23.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38a385202ff5a92791168b1136afae5059d3ac118457bb7bc304c197c2d33e7d"
dependencies = [
 "heck",
 "proc-macro2",
 "pyo3-build-config",
 "quote",
 "syn",
]

[[package]]
name = "python3-dll-a"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b9e268ee1be609e93a13eb06839f68f67e5fe0fb4049834d261c2d5091c1b6d"
dependencies = [
 "cc",
]

[[package]]
name = "quote"
version = "1.0.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7"
dependencies = [
 "proc-macro2",
]

[[package]]
name = "rpds"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0e15515d3ce3313324d842629ea4905c25a13f81953eadb88f85516f59290a4"
dependencies = [
 "archery",
]

[[package]]
name = "rpds-py"
version = "0.22.3"
dependencies = [
 "archery",
 "pyo3",
 "rpds",
]

[[package]]
name = "syn"
version = "2.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "201fcda3845c23e8212cd466bfebf0bd20694490fc0356ae8e428e0824a915a6"
dependencies = [
 "proc-macro2",
 "quote",
 "unicode-ident",
]

[[package]]
name = "target-lexicon"
version = "0.12.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1fc403891a21bcfb7c37834ba66a547a8f402146eba7265b5a6d88059c9ff2f"

[[package]]
name = "triomphe"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6631e42e10b40c0690bf92f404ebcfe6e1fdb480391d15f17cc8e96eeed5369"

[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"

[[package]]
name = "unindent"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce"
0707010000000C000081A40000000000000000000000016750735100000193000000000000000000000000000000000000001A00000000rpds.py-0.22.3/Cargo.toml[package]
name = "rpds-py"
version = "0.22.3"
edition = "2021"

[lib]
name = "rpds"
crate-type = ["cdylib"]

[dependencies]
rpds = "1.1.0"
archery = "1.2.1"

[dependencies.pyo3]
version = "0.23.3"
# To build extension for PyPy on Windows, "generate-import-lib" is needed:
# https://github.com/PyO3/maturin-action/issues/267#issuecomment-2106844429
features = ["extension-module", "generate-import-lib"]
0707010000000D000081A40000000000000000000000016750735100000421000000000000000000000000000000000000001700000000rpds.py-0.22.3/LICENSECopyright (c) 2023 Julian Berman

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
0707010000000E000081A40000000000000000000000016750735100000A8E000000000000000000000000000000000000001A00000000rpds.py-0.22.3/README.rst===========
``rpds.py``
===========

|PyPI| |Pythons| |CI|

.. |PyPI| image:: https://img.shields.io/pypi/v/rpds-py.svg
  :alt: PyPI version
  :target: https://pypi.org/project/rpds-py/

.. |Pythons| image:: https://img.shields.io/pypi/pyversions/rpds-py.svg
  :alt: Supported Python versions
  :target: https://pypi.org/project/rpds-py/

.. |CI| image:: https://github.com/crate-py/rpds/workflows/CI/badge.svg
  :alt: Build status
  :target: https://github.com/crate-py/rpds/actions?query=workflow%3ACI

.. |ReadTheDocs| image:: https://readthedocs.org/projects/referencing/badge/?version=stable&style=flat
   :alt: ReadTheDocs status
   :target: https://referencing.readthedocs.io/en/stable/


Python bindings to the `Rust rpds crate <https://docs.rs/rpds/>`_ for persistent data structures.

What's here is quite minimal (in transparency, it was written initially to support replacing ``pyrsistent`` in the `referencing library <https://github.com/python-jsonschema/referencing>`_).
If you see something missing (which is very likely), a PR is definitely welcome to add it.

Installation
------------

The distribution on PyPI is named ``rpds.py`` (equivalently ``rpds-py``), and thus can be installed via e.g.:

.. code:: sh

    $ pip install rpds-py

Note that if you install ``rpds-py`` from source, you will need a Rust toolchain installed, as it is a build-time dependency.
An example of how to do so in a ``Dockerfile`` can be found `here <https://github.com/bowtie-json-schema/bowtie/blob/e77fd93598cb6e7dc1b8b1f53c00e5aa410c201a/implementations/python-jsonschema/Dockerfile#L1-L8>`_.

If you believe you are on a common platform which should have wheels built (i.e. and not need to compile from source), feel free to file an issue or pull request modifying the GitHub action used here to build wheels via ``maturin``.

Usage
-----

Methods in general are named similarly to their ``rpds`` counterparts (rather than ``pyrsistent``\ 's conventions, though probably a full drop-in ``pyrsistent``\ -compatible wrapper module is a good addition at some point).

.. code:: python

    >>> from rpds import HashTrieMap, HashTrieSet, List

    >>> m = HashTrieMap({"foo": "bar", "baz": "quux"})
    >>> m.insert("spam", 37) == HashTrieMap({"foo": "bar", "baz": "quux", "spam": 37})
    True
    >>> m.remove("foo") == HashTrieMap({"baz": "quux"})
    True

    >>> s = HashTrieSet({"foo", "bar", "baz", "quux"})
    >>> s.insert("spam") == HashTrieSet({"foo", "bar", "baz", "quux", "spam"})
    True
    >>> s.remove("foo") == HashTrieSet({"bar", "baz", "quux"})
    True

    >>> L = List([1, 3, 5])
    >>> L.push_front(-1) == List([-1, 1, 3, 5])
    True
    >>> L.rest == List([3, 5])
    True
0707010000000F000041ED0000000000000000000000026750735100000000000000000000000000000000000000000000001400000000rpds.py-0.22.3/docs07070100000010000081A400000000000000000000000167507351000000A9000000000000000000000000000000000000001C00000000rpds.py-0.22.3/docs/api.rstAPI Reference
=============

.. automodule:: rpds
   :members:
   :undoc-members:
   :imported-members:
   :special-members: __iter__, __getitem__, __len__, __rmatmul__
07070100000011000081A4000000000000000000000001675073510000068D000000000000000000000000000000000000001C00000000rpds.py-0.22.3/docs/conf.pyimport importlib.metadata
import re

from url import URL

GITHUB = URL.parse("https://github.com/")
HOMEPAGE = GITHUB / "crate-py/rpds"

project = "rpds.py"
author = "Julian Berman"
copyright = f"2023, {author}"

release = importlib.metadata.version("rpds.py")
version = release.partition("-")[0]

language = "en"
default_role = "any"

extensions = [
    "sphinx.ext.autodoc",
    "sphinx.ext.autosectionlabel",
    "sphinx.ext.coverage",
    "sphinx.ext.doctest",
    "sphinx.ext.extlinks",
    "sphinx.ext.intersphinx",
    "sphinx.ext.napoleon",
    "sphinx.ext.todo",
    "sphinx.ext.viewcode",
    "sphinx_copybutton",
    "sphinxcontrib.spelling",
    "sphinxext.opengraph",
]

pygments_style = "lovelace"
pygments_dark_style = "one-dark"

html_theme = "furo"


def entire_domain(host):
    return r"http.?://" + re.escape(host) + r"($|/.*)"


linkcheck_ignore = [
    entire_domain("img.shields.io"),
    f"{GITHUB}.*#.*",
    str(HOMEPAGE / "actions"),
    str(HOMEPAGE / "workflows/CI/badge.svg"),
]

# = Extensions =

# -- autodoc --

autodoc_default_options = {
    "members": True,
    "member-order": "bysource",
}

# -- autosectionlabel --

autosectionlabel_prefix_document = True

# -- intersphinx --

intersphinx_mapping = {
    "python": ("https://docs.python.org/", None),
}

# -- extlinks --

extlinks = {
    "gh": (str(HOMEPAGE) + "/%s", None),
    "github": (str(GITHUB) + "/%s", None),
}
extlinks_detect_hardcoded_links = True

# -- sphinx-copybutton --

copybutton_prompt_text = r">>> |\.\.\. |\$"
copybutton_prompt_is_regexp = True

# -- sphinxcontrib-spelling --

spelling_word_list_filename = "spelling-wordlist.txt"
spelling_show_suggestions = True
07070100000012000081A40000000000000000000000016750735100000806000000000000000000000000000000000000001E00000000rpds.py-0.22.3/docs/index.rstPython bindings to the `Rust rpds crate <https://docs.rs/rpds/>`_ for persistent data structures.

What's here is quite minimal (in transparency, it was written initially to support replacing ``pyrsistent`` in the `referencing library <https://github.com/python-jsonschema/referencing>`_).
If you see something missing (which is very likely), a PR is definitely welcome to add it.

Installation
------------

The distribution on PyPI is named ``rpds.py`` (equivalently ``rpds-py``), and thus can be installed via e.g.:

.. code:: sh

    $ pip install rpds-py

Note that if you install ``rpds-py`` from source, you will need a Rust toolchain installed, as it is a build-time dependency.
An example of how to do so in a ``Dockerfile`` can be found `here <https://github.com/bowtie-json-schema/bowtie/blob/e77fd93598cb6e7dc1b8b1f53c00e5aa410c201a/implementations/python-jsonschema/Dockerfile#L1-L8>`_.

If you believe you are on a common platform which should have wheels built (i.e. and not need to compile from source), feel free to file an issue or pull request modifying the GitHub action used here to build wheels via ``maturin``.

Usage
-----

Methods in general are named similarly to their ``rpds`` counterparts (rather than ``pyrsistent``\ 's conventions, though probably a full drop-in ``pyrsistent``\ -compatible wrapper module is a good addition at some point).

.. code:: python

    >>> from rpds import HashTrieMap, HashTrieSet, List

    >>> m = HashTrieMap({"foo": "bar", "baz": "quux"})
    >>> m.insert("spam", 37) == HashTrieMap({"foo": "bar", "baz": "quux", "spam": 37})
    True
    >>> m.remove("foo") == HashTrieMap({"baz": "quux"})
    True

    >>> s = HashTrieSet({"foo", "bar", "baz", "quux"})
    >>> s.insert("spam") == HashTrieSet({"foo", "bar", "baz", "quux", "spam"})
    True
    >>> s.remove("foo") == HashTrieSet({"bar", "baz", "quux"})
    True

    >>> L = List([1, 3, 5])
    >>> L.push_front(-1) == List([-1, 1, 3, 5])
    True
    >>> L.rest == List([3, 5])
    True


.. toctree::
    :glob:
    :hidden:

    api
07070100000013000081A4000000000000000000000001675073510000007E000000000000000000000000000000000000002400000000rpds.py-0.22.3/docs/requirements.infile:.#egg=rpds-py
furo
pygments-github-lexers
sphinx-copybutton
sphinx>5
sphinxcontrib-spelling>5
sphinxext-opengraph
url.py
07070100000014000081A400000000000000000000000167507351000006CF000000000000000000000000000000000000002500000000rpds.py-0.22.3/docs/requirements.txt# This file was autogenerated by uv via the following command:
#    uv pip compile --output-file /Users/julian/Development/rpds.py/docs/requirements.txt docs/requirements.in
alabaster==1.0.0
    # via sphinx
babel==2.16.0
    # via sphinx
beautifulsoup4==4.12.3
    # via furo
certifi==2024.8.30
    # via requests
charset-normalizer==3.4.0
    # via requests
docutils==0.21.2
    # via sphinx
furo==2024.8.6
    # via -r docs/requirements.in
idna==3.10
    # via requests
imagesize==1.4.1
    # via sphinx
jinja2==3.1.4
    # via sphinx
markupsafe==3.0.2
    # via jinja2
packaging==24.1
    # via sphinx
pyenchant==3.2.2
    # via sphinxcontrib-spelling
pygments==2.18.0
    # via
    #   furo
    #   pygments-github-lexers
    #   sphinx
pygments-github-lexers==0.0.5
    # via -r docs/requirements.in
requests==2.32.3
    # via sphinx
file:.#egg=rpds-py
    # via -r docs/requirements.in
snowballstemmer==2.2.0
    # via sphinx
soupsieve==2.6
    # via beautifulsoup4
sphinx==8.1.3
    # via
    #   -r docs/requirements.in
    #   furo
    #   sphinx-basic-ng
    #   sphinx-copybutton
    #   sphinxcontrib-spelling
    #   sphinxext-opengraph
sphinx-basic-ng==1.0.0b2
    # via furo
sphinx-copybutton==0.5.2
    # via -r docs/requirements.in
sphinxcontrib-applehelp==2.0.0
    # via sphinx
sphinxcontrib-devhelp==2.0.0
    # via sphinx
sphinxcontrib-htmlhelp==2.1.0
    # via sphinx
sphinxcontrib-jsmath==1.0.1
    # via sphinx
sphinxcontrib-qthelp==2.0.0
    # via sphinx
sphinxcontrib-serializinghtml==2.0.0
    # via sphinx
sphinxcontrib-spelling==8.0.0
    # via -r docs/requirements.in
sphinxext-opengraph==0.9.1
    # via -r docs/requirements.in
url-py==0.13.0
    # via -r docs/requirements.in
urllib3==2.2.3
    # via requests
07070100000015000081A40000000000000000000000016750735100000013000000000000000000000000000000000000002A00000000rpds.py-0.22.3/docs/spelling-wordlist.txtiter
len
toolchain
07070100000016000081A4000000000000000000000001675073510000134D000000000000000000000000000000000000001A00000000rpds.py-0.22.3/noxfile.pyfrom pathlib import Path
from tempfile import TemporaryDirectory
import os

import nox

ROOT = Path(__file__).parent
PYPROJECT = ROOT / "pyproject.toml"
DOCS = ROOT / "docs"
TESTS = ROOT / "tests"

REQUIREMENTS = dict(
    docs=DOCS / "requirements.txt",
    tests=TESTS / "requirements.txt",
)
REQUIREMENTS_IN = [  # this is actually ordered, as files depend on each other
    (path.parent / f"{path.stem}.in", path) for path in REQUIREMENTS.values()
]

SUPPORTED = ["3.9", "3.10", "pypy3.10", "3.11", "3.12", "3.13", "3.13t"]
LATEST = "3.13"

nox.options.default_venv_backend = "uv|virtualenv"
nox.options.sessions = []


def session(default=True, python=LATEST, **kwargs):  # noqa: D103
    def _session(fn):
        if default:
            nox.options.sessions.append(kwargs.get("name", fn.__name__))
        return nox.session(python=python, **kwargs)(fn)

    return _session


@session(python=SUPPORTED)
def tests(session):
    """
    Run the test suite with a corresponding Python version.
    """
    # Really we want --profile=test here (for
    # https://github.com/crate-py/rpds/pull/87#issuecomment-2291409297)
    # but it produces strange symbol errors saying:
    #   dynamic module does not define module export function (PyInit_rpds)
    # so OK, dev it is.
    session.install(
        "--config-settings",
        "build-args=--profile=dev",
        "--no-cache",
        "-r",
        REQUIREMENTS["tests"],
    )

    if session.posargs and session.posargs[0] == "coverage":
        if len(session.posargs) > 1 and session.posargs[1] == "github":
            github = Path(os.environ["GITHUB_STEP_SUMMARY"])
        else:
            github = None

        session.install("coverage[toml]")
        session.run("coverage", "run", "-m", "pytest", TESTS)
        if github is None:
            session.run("coverage", "report")
        else:
            with github.open("a") as summary:
                summary.write("### Coverage\n\n")
                summary.flush()  # without a flush, output seems out of order.
                session.run(
                    "coverage",
                    "report",
                    "--format=markdown",
                    stdout=summary,
                )
    else:
        session.run("pytest", "--parallel-threads=10", *session.posargs, TESTS)


@session()
def audit(session):
    """
    Audit dependencies for vulnerabilities.
    """
    session.install("pip-audit", ROOT)
    session.run("python", "-m", "pip_audit")


@session(tags=["build"])
def build(session):
    """
    Build a distribution suitable for PyPI and check its validity.
    """
    session.install("build", "twine")
    with TemporaryDirectory() as tmpdir:
        session.run("python", "-m", "build", ROOT, "--outdir", tmpdir)
        session.run("twine", "check", "--strict", tmpdir + "/*")


@session(tags=["style"])
def style(session):
    """
    Check Python code style.
    """
    session.install("ruff")
    session.run("ruff", "check", TESTS, __file__)


@session()
def typing(session):
    """
    Check the codebase using pyright by type checking the test suite.
    """
    session.install("pyright", ROOT, "-r", REQUIREMENTS["tests"])
    session.run("pyright", TESTS)


@session(tags=["docs"])
@nox.parametrize(
    "builder",
    [
        nox.param(name, id=name)
        for name in [
            "dirhtml",
            "doctest",
            "linkcheck",
            "man",
            "spelling",
        ]
    ],
)
def docs(session, builder):
    """
    Build the documentation using a specific Sphinx builder.
    """
    session.install("-r", REQUIREMENTS["docs"])
    with TemporaryDirectory() as tmpdir_str:
        tmpdir = Path(tmpdir_str)
        argv = ["-n", "-T", "-W"]
        if builder != "spelling":
            argv += ["-q"]
        posargs = session.posargs or [tmpdir / builder]
        session.run(
            "python",
            "-m",
            "sphinx",
            "-b",
            builder,
            DOCS,
            *argv,
            *posargs,
        )


@session(tags=["docs", "style"], name="docs(style)")
def docs_style(session):
    """
    Check the documentation style.
    """
    session.install(
        "doc8",
        "pygments",
        "pygments-github-lexers",
    )
    session.run("python", "-m", "doc8", "--config", PYPROJECT, DOCS)


@session(default=False)
def requirements(session):
    """
    Update the project's pinned requirements.

    You should commit the result afterwards.
    """
    if session.venv_backend == "uv":
        cmd = ["uv", "pip", "compile"]
    else:
        session.install("pip-tools")
        cmd = ["pip-compile", "--resolver", "backtracking", "--strip-extras"]

    for each, out in REQUIREMENTS_IN:
        # otherwise output files end up with silly absolute path comments...
        relative = each.relative_to(ROOT)
        session.run(*cmd, "--upgrade", "--output-file", out, relative)
07070100000017000081A400000000000000000000000167507351000011F5000000000000000000000000000000000000001E00000000rpds.py-0.22.3/pyproject.toml[build-system]
requires = ["maturin>=1.2,<2.0"]
build-backend = "maturin"

[project]
name = "rpds-py"
description = "Python bindings to Rust's persistent data structures (rpds)"
requires-python = ">=3.9"
readme = "README.rst"
keywords = ["data structures", "rust", "persistent"]
authors = [
  { name = "Julian Berman", email = "Julian+rpds@GrayVines.com" },
]
classifiers = [
  "Development Status :: 3 - Alpha",
  "Intended Audience :: Developers",
  "License :: OSI Approved :: MIT License",
  "Operating System :: OS Independent",
  "Programming Language :: Rust",
  "Programming Language :: Python :: 3.9",
  "Programming Language :: Python :: 3.10",
  "Programming Language :: Python :: 3.11",
  "Programming Language :: Python :: 3.12",
  "Programming Language :: Python :: 3.13",
  "Programming Language :: Python :: 3",
  "Programming Language :: Python :: Implementation :: CPython",
  "Programming Language :: Python :: Implementation :: PyPy",
]
dynamic = ["version"]

[project.urls]
Documentation = "https://rpds.readthedocs.io/"
Homepage = "https://github.com/crate-py/rpds"
Issues = "https://github.com/crate-py/rpds/issues/"
Funding = "https://github.com/sponsors/Julian"
Tidelift = "https://tidelift.com/subscription/pkg/pypi-rpds-py?utm_source=pypi-rpds-py&utm_medium=referral&utm_campaign=pypi-link"
Source = "https://github.com/crate-py/rpds"
Upstream = "https://github.com/orium/rpds"

[tool.black]
line-length = 79

[tool.coverage.html]
show_contexts = true
skip_covered = false

[tool.coverage.run]
branch = true
dynamic_context = "test_function"

[tool.coverage.report]
exclude_also = [
  "if TYPE_CHECKING:",
  "\\s*\\.\\.\\.\\s*",
]
fail_under = 100
show_missing = true
skip_covered = true

[tool.doc8]
ignore = [
    "D000",  # see PyCQA/doc8#125
    "D001",  # one sentence per line, so max length doesn't make sense
]

[tool.maturin]
features = ["pyo3/extension-module"]

[tool.pyright]
reportUnnecessaryTypeIgnoreComment = true
strict = ["**/*"]
exclude = [
    "**/tests/__init__.py",
    "**/tests/test_*.py",
]

[tool.ruff]
line-length = 79

[tool.ruff.lint]
select = ["ALL"]
ignore = [
  "A001",  # It's fine to shadow builtins
  "A002",
  "A003",
  "ARG",  # This is all wrong whenever an interface is involved
  "ANN",  # Just let the type checker do this
  "B006",  # Mutable arguments require care but are OK if you don't abuse them
  "B008",  # It's totally OK to call functions for default arguments.
  "B904",  # raise SomeException(...) is fine.
  "B905",  # No need for explicit strict, this is simply zip's default behavior
  "C408",  # Calling dict is fine when it saves quoting the keys
  "C901",  # Not really something to focus on
  "D105",  # It's fine to not have docstrings for magic methods.
  "D107",  # __init__ especially doesn't need a docstring
  "D200",  # This rule makes diffs uglier when expanding docstrings
  "D203",  # No blank lines before docstrings.
  "D212",  # Start docstrings on the second line.
  "D400",  # This rule misses sassy docstrings ending with ! or ?
  "D401",  # This rule is too flaky.
  "D406",  # Section headers should end with a colon not a newline
  "D407",  # Underlines aren't needed
  "D412",  # Plz spaces after section headers
  "EM101",  # These don't bother me, it's fine there's some duplication.
  "EM102",
  "FBT",  # It's worth avoiding boolean args but I don't care to enforce it
  "FIX",  # Yes thanks, if I could it wouldn't be there
  "N",  # These naming rules are silly
  "PLR0912",  # These metrics are fine to be aware of but not to enforce
  "PLR0913",
  "PLR0915",
  "PLW2901",  # Shadowing for loop variables is occasionally fine.
  "PT006",  # pytest parametrize takes strings as well
  "PYI025",  # wat, I'm not confused, thanks.
  "RET502",  # Returning None implicitly is fine
  "RET503",
  "RET505",  # These push you to use `if` instead of `elif`, but for no reason
  "RET506",
  "RSE102",  # Ha, what, who even knew you could leave the parens off. But no.
  "SIM300", # Not sure what heuristic this uses, but it's easily incorrect
  "SLF001",  # Private usage within this package itself is fine
  "TD",  # These TODO style rules are also silly
  "UP007",  # We support 3.9
]

[tool.ruff.lint.flake8-pytest-style]
mark-parentheses = false

[tool.ruff.lint.flake8-quotes]
docstring-quotes = "double"

[tool.ruff.lint.isort]
combine-as-imports = true
from-first = true
known-first-party = ["rpds"]

[tool.ruff.lint.per-file-ignores]
"noxfile.py" = ["ANN", "D100", "S101", "T201"]
"docs/*" = ["ANN", "D", "INP001"]
"tests/*" = ["ANN", "B018", "D", "PLR", "RUF012", "S", "SIM", "TRY"]
07070100000018000081A40000000000000000000000016750735100000A0A000000000000000000000000000000000000001800000000rpds.py-0.22.3/rpds.pyifrom typing import (
    ItemsView,
    Iterable,
    Iterator,
    KeysView,
    Mapping,
    TypeVar,
    ValuesView,
)

_T = TypeVar("_T")
_KT_co = TypeVar("_KT_co", covariant=True)
_VT_co = TypeVar("_VT_co", covariant=True)
_KU_co = TypeVar("_KU_co", covariant=True)
_VU_co = TypeVar("_VU_co", covariant=True)

class HashTrieMap(Mapping[_KT_co, _VT_co]):
    def __init__(
        self,
        value: Mapping[_KT_co, _VT_co] | Iterable[tuple[_KT_co, _VT_co]] = {},
        **kwds: Mapping[_KT_co, _VT_co],
    ): ...
    def __getitem__(self, key: _KT_co) -> _VT_co: ...
    def __iter__(self) -> Iterator[_KT_co]: ...
    def __len__(self) -> int: ...
    def discard(self, key: _KT_co) -> HashTrieMap[_KT_co, _VT_co]: ...
    def items(self) -> ItemsView[_KT_co, _VT_co]: ...
    def keys(self) -> KeysView[_KT_co]: ...
    def values(self) -> ValuesView[_VT_co]: ...
    def remove(self, key: _KT_co) -> HashTrieMap[_KT_co, _VT_co]: ...
    def insert(
        self,
        key: _KT_co,
        val: _VT_co,
    ) -> HashTrieMap[_KT_co, _VT_co]: ...
    def update(
        self,
        *args: Mapping[_KU_co, _VU_co] | Iterable[tuple[_KU_co, _VU_co]],
    ) -> HashTrieMap[_KT_co | _KU_co, _VT_co | _VU_co]: ...
    @classmethod
    def convert(
        cls,
        value: Mapping[_KT_co, _VT_co] | Iterable[tuple[_KT_co, _VT_co]],
    ) -> HashTrieMap[_KT_co, _VT_co]: ...
    @classmethod
    def fromkeys(
        cls,
        keys: Iterable[_KT_co],
        value: _VT_co = None,
    ) -> HashTrieMap[_KT_co, _VT_co]: ...

class HashTrieSet(frozenset[_T]):
    def __init__(self, value: Iterable[_T] = ()): ...
    def __iter__(self) -> Iterator[_T]: ...
    def __len__(self) -> int: ...
    def discard(self, value: _T) -> HashTrieSet[_T]: ...
    def remove(self, value: _T) -> HashTrieSet[_T]: ...
    def insert(self, value: _T) -> HashTrieSet[_T]: ...
    def update(self, *args: Iterable[_T]) -> HashTrieSet[_T]: ...

class List(Iterable[_T]):
    def __init__(self, value: Iterable[_T] = (), *more: _T): ...
    def __iter__(self) -> Iterator[_T]: ...
    def __len__(self) -> int: ...
    def push_front(self, value: _T) -> List[_T]: ...
    def drop_first(self) -> List[_T]: ...

class Queue(Iterable[_T]):
    def __init__(self, value: Iterable[_T] = (), *more: _T): ...
    def __iter__(self) -> Iterator[_T]: ...
    def __len__(self) -> int: ...
    def enqueue(self, value: _T) -> Queue[_T]: ...
    def dequeue(self, value: _T) -> Queue[_T]: ...
    @property
    def is_empty(self) -> _T: ...
    @property
    def peek(self) -> _T: ...
07070100000019000041ED0000000000000000000000026750735100000000000000000000000000000000000000000000001300000000rpds.py-0.22.3/src0707010000001A000081A4000000000000000000000001675073510000A8B5000000000000000000000000000000000000001A00000000rpds.py-0.22.3/src/lib.rsuse pyo3::exceptions::{PyIndexError, PyTypeError};
use pyo3::pyclass::CompareOp;
use pyo3::types::{PyDict, PyIterator, PyTuple, PyType};
use pyo3::{exceptions::PyKeyError, types::PyMapping, types::PyTupleMethods};
use pyo3::{prelude::*, AsPyPointer, BoundObject, PyTypeInfo};
use rpds::{
    HashTrieMap, HashTrieMapSync, HashTrieSet, HashTrieSetSync, List, ListSync, Queue, QueueSync,
};
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};

fn hash_shuffle_bits(h: usize) -> usize {
    ((h ^ 89869747) ^ (h << 16)).wrapping_mul(3644798167)
}

#[derive(Debug)]
struct Key {
    hash: isize,
    inner: PyObject,
}

impl<'py> IntoPyObject<'py> for Key {
    type Target = PyAny;
    type Output = Bound<'py, Self::Target>;
    type Error = std::convert::Infallible;

    fn into_pyobject(self, py: Python<'py>) -> Result<Self::Output, Self::Error> {
        Ok(self.inner.into_bound(py))
    }
}

impl<'a, 'py> IntoPyObject<'py> for &'a Key {
    type Target = PyAny;
    type Output = Borrowed<'a, 'py, Self::Target>;
    type Error = std::convert::Infallible;

    fn into_pyobject(self, py: Python<'py>) -> Result<Self::Output, Self::Error> {
        Ok(self.inner.bind_borrowed(py))
    }
}

impl Hash for Key {
    fn hash<H: Hasher>(&self, state: &mut H) {
        state.write_isize(self.hash);
    }
}

impl Eq for Key {}

impl PartialEq for Key {
    fn eq(&self, other: &Self) -> bool {
        Python::with_gil(|py| {
            self.inner
                .call_method1(py, "__eq__", (&other.inner,))
                .and_then(|value| value.extract(py))
                .expect("__eq__ failed!")
        })
    }
}

impl Key {
    fn clone_ref(&self, py: Python<'_>) -> Self {
        Key {
            hash: self.hash,
            inner: self.inner.clone_ref(py),
        }
    }
}

unsafe impl AsPyPointer for Key {
    fn as_ptr(&self) -> *mut pyo3::ffi::PyObject {
        self.inner.as_ptr()
    }
}

impl<'source> FromPyObject<'source> for Key {
    fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
        Ok(Key {
            hash: ob.hash()?,
            inner: ob.clone().unbind(),
        })
    }
}

#[repr(transparent)]
#[pyclass(name = "HashTrieMap", module = "rpds", frozen, mapping)]
struct HashTrieMapPy {
    inner: HashTrieMapSync<Key, PyObject>,
}

impl From<HashTrieMapSync<Key, PyObject>> for HashTrieMapPy {
    fn from(map: HashTrieMapSync<Key, PyObject>) -> Self {
        HashTrieMapPy { inner: map }
    }
}

impl<'source> FromPyObject<'source> for HashTrieMapPy {
    fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
        let mut ret = HashTrieMap::new_sync();
        if let Ok(mapping) = ob.downcast::<PyMapping>() {
            for each in mapping.items()?.iter() {
                let (k, v): (Key, PyObject) = each.extract()?;
                ret.insert_mut(k, v);
            }
        } else {
            for each in ob.try_iter()? {
                let (k, v) = each?.extract()?;
                ret.insert_mut(k, v);
            }
        }
        Ok(HashTrieMapPy { inner: ret })
    }
}

#[pymethods]
impl HashTrieMapPy {
    #[new]
    #[pyo3(signature = (value=None, ** kwds))]
    fn init(value: Option<HashTrieMapPy>, kwds: Option<&Bound<'_, PyDict>>) -> PyResult<Self> {
        let mut map: HashTrieMapPy;
        if let Some(value) = value {
            map = value;
        } else {
            map = HashTrieMapPy {
                inner: HashTrieMap::new_sync(),
            };
        }
        if let Some(kwds) = kwds {
            for (k, v) in kwds {
                map.inner.insert_mut(Key::extract_bound(&k)?, v.into());
            }
        }
        Ok(map)
    }

    fn __contains__(&self, key: Key) -> bool {
        self.inner.contains_key(&key)
    }

    fn __iter__(slf: PyRef<'_, Self>) -> KeysIterator {
        KeysIterator {
            inner: slf.inner.clone(),
        }
    }

    fn __getitem__(&self, key: Key, py: Python) -> PyResult<PyObject> {
        match self.inner.get(&key) {
            Some(value) => Ok(value.clone_ref(py)),
            None => Err(PyKeyError::new_err(key)),
        }
    }

    fn __len__(&self) -> usize {
        self.inner.size()
    }

    fn __repr__(&self, py: Python) -> String {
        let contents = self.inner.into_iter().map(|(k, v)| {
            format!(
                "{}: {}",
                k.inner
                    .call_method0(py, "__repr__")
                    .and_then(|r| r.extract(py))
                    .unwrap_or("<repr error>".to_owned()),
                v.call_method0(py, "__repr__")
                    .and_then(|r| r.extract(py))
                    .unwrap_or("<repr error>".to_owned())
            )
        });
        format!(
            "HashTrieMap({{{}}})",
            contents.collect::<Vec<_>>().join(", ")
        )
    }

    fn __richcmp__<'py>(&self, other: &Self, op: CompareOp, py: Python<'py>) -> PyResult<PyObject> {
        match op {
            CompareOp::Eq => (self.inner.size() == other.inner.size()
                && self
                    .inner
                    .iter()
                    .map(|(k1, v1)| (v1, other.inner.get(k1)))
                    .map(|(v1, v2)| v1.bind(py).eq(v2))
                    .all(|r| r.unwrap_or(false)))
            .into_pyobject(py)
            .map_err(Into::into)
            .map(BoundObject::into_any)
            .map(BoundObject::unbind),
            CompareOp::Ne => (self.inner.size() != other.inner.size()
                || self
                    .inner
                    .iter()
                    .map(|(k1, v1)| (v1, other.inner.get(k1)))
                    .map(|(v1, v2)| v1.bind(py).ne(v2))
                    .all(|r| r.unwrap_or(true)))
            .into_pyobject(py)
            .map_err(Into::into)
            .map(BoundObject::into_any)
            .map(BoundObject::unbind),
            _ => Ok(py.NotImplemented()),
        }
    }

    fn __hash__(&self, py: Python) -> PyResult<isize> {
        // modified from https://github.com/python/cpython/blob/d69529d31ccd1510843cfac1ab53bb8cb027541f/Objects/setobject.c#L715

        let mut hash_val = self
            .inner
            .iter()
            .map(|(key, val)| {
                let mut hasher = DefaultHasher::new();
                let val_bound = val.bind(py);

                let key_hash = key.hash;
                let val_hash = val_bound.hash().map_err(|_| {
                    PyTypeError::new_err(format!(
                        "Unhashable type in HashTrieMap of key {}: {}",
                        key.inner
                            .bind(py)
                            .repr()
                            .and_then(|r| r.extract())
                            .unwrap_or("<repr> error".to_string()),
                        val_bound
                            .repr()
                            .and_then(|r| r.extract())
                            .unwrap_or("<repr> error".to_string())
                    ))
                })?;

                hasher.write_isize(key_hash);
                hasher.write_isize(val_hash);

                Ok(hasher.finish() as usize)
            })
            .try_fold(0, |acc: usize, x: PyResult<usize>| {
                PyResult::<usize>::Ok(acc ^ hash_shuffle_bits(x?))
            })?;

        // factor in the number of entries in the collection
        hash_val ^= self.inner.size().wrapping_add(1).wrapping_mul(1927868237);

        // dispense patterns in the hash value
        hash_val ^= (hash_val >> 11) ^ (hash_val >> 25);
        hash_val = hash_val.wrapping_mul(69069).wrapping_add(907133923);

        Ok(hash_val as isize)
    }

    fn __reduce__(slf: PyRef<Self>) -> (Bound<'_, PyType>, (Vec<(Key, PyObject)>,)) {
        (
            HashTrieMapPy::type_object(slf.py()),
            (slf.inner
                .iter()
                .map(|(k, v)| (k.clone_ref(slf.py()), v.clone_ref(slf.py())))
                .collect(),),
        )
    }

    #[classmethod]
    fn convert(
        _cls: &Bound<'_, PyType>,
        value: Bound<'_, PyAny>,
        py: Python,
    ) -> PyResult<PyObject> {
        if value.is_instance_of::<HashTrieMapPy>() {
            Ok(value.unbind())
        } else {
            HashTrieMapPy::extract_bound(&value)?
                .into_pyobject(py)
                .map(BoundObject::into_any)
                .map(BoundObject::unbind)
        }
    }

    #[classmethod]
    #[pyo3(signature = (keys, val=None))]
    fn fromkeys(
        _cls: &Bound<'_, PyType>,
        keys: &Bound<'_, PyAny>,
        val: Option<&Bound<'_, PyAny>>,
        py: Python,
    ) -> PyResult<HashTrieMapPy> {
        let mut inner = HashTrieMap::new_sync();
        let none = py.None().into_bound(py);
        let value = val.unwrap_or(&none);
        for each in keys.try_iter()? {
            let key = Key::extract_bound(&each?)?;
            inner.insert_mut(key, value.clone().unbind());
        }
        Ok(HashTrieMapPy { inner })
    }

    #[pyo3(signature = (key, default=None))]
    fn get(&self, key: Key, default: Option<PyObject>, py: Python) -> Option<PyObject> {
        if let Some(value) = self.inner.get(&key) {
            Some(value.clone_ref(py))
        } else {
            default
        }
    }

    fn keys(&self) -> KeysView {
        KeysView {
            inner: self.inner.clone(),
        }
    }

    fn values(&self) -> ValuesView {
        ValuesView {
            inner: self.inner.clone(),
        }
    }

    fn items(&self) -> ItemsView {
        ItemsView {
            inner: self.inner.clone(),
        }
    }

    fn discard(&self, key: Key) -> PyResult<HashTrieMapPy> {
        match self.inner.contains_key(&key) {
            true => Ok(HashTrieMapPy {
                inner: self.inner.remove(&key),
            }),
            false => Ok(HashTrieMapPy {
                inner: self.inner.clone(),
            }),
        }
    }

    fn insert(&self, key: Key, value: Bound<'_, PyAny>) -> HashTrieMapPy {
        HashTrieMapPy {
            inner: self.inner.insert(key, value.unbind()),
        }
    }

    fn remove(&self, key: Key) -> PyResult<HashTrieMapPy> {
        match self.inner.contains_key(&key) {
            true => Ok(HashTrieMapPy {
                inner: self.inner.remove(&key),
            }),
            false => Err(PyKeyError::new_err(key)),
        }
    }

    #[pyo3(signature = (*maps, **kwds))]
    fn update(
        &self,
        maps: &Bound<'_, PyTuple>,
        kwds: Option<&Bound<'_, PyDict>>,
    ) -> PyResult<HashTrieMapPy> {
        let mut inner = self.inner.clone();
        for value in maps {
            let map = HashTrieMapPy::extract_bound(&value)?;
            for (k, v) in &map.inner {
                inner.insert_mut(k.clone_ref(value.py()), v.clone_ref(value.py()));
            }
        }
        if let Some(kwds) = kwds {
            for (k, v) in kwds {
                inner.insert_mut(Key::extract_bound(&k)?, v.extract()?);
            }
        }
        Ok(HashTrieMapPy { inner })
    }
}

#[pyclass(module = "rpds")]
struct KeysIterator {
    inner: HashTrieMapSync<Key, PyObject>,
}

#[pymethods]
impl KeysIterator {
    fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> {
        slf
    }

    fn __next__(mut slf: PyRefMut<'_, Self>) -> Option<Key> {
        let first = slf.inner.keys().next()?.clone_ref(slf.py());
        slf.inner = slf.inner.remove(&first);
        Some(first)
    }
}

#[pyclass(module = "rpds")]
struct ValuesIterator {
    inner: HashTrieMapSync<Key, PyObject>,
}

#[pymethods]
impl ValuesIterator {
    fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> {
        slf
    }

    fn __next__(mut slf: PyRefMut<'_, Self>) -> Option<PyObject> {
        let kv = slf.inner.iter().next()?;
        let value = kv.1.clone_ref(slf.py());
        slf.inner = slf.inner.remove(kv.0);
        Some(value)
    }
}

#[pyclass(module = "rpds")]
struct ItemsIterator {
    inner: HashTrieMapSync<Key, PyObject>,
}

#[pymethods]
impl ItemsIterator {
    fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> {
        slf
    }

    fn __next__(mut slf: PyRefMut<'_, Self>) -> Option<(Key, PyObject)> {
        let kv = slf.inner.iter().next()?;
        let key = kv.0.clone_ref(slf.py());
        let value = kv.1.clone_ref(slf.py());

        slf.inner = slf.inner.remove(kv.0);

        Some((key, value))
    }
}

#[pyclass(module = "rpds")]
struct KeysView {
    inner: HashTrieMapSync<Key, PyObject>,
}

#[pymethods]
impl KeysView {
    fn __contains__(&self, key: Key) -> bool {
        self.inner.contains_key(&key)
    }

    fn __eq__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult<bool> {
        let abc = PyModule::import(py, "collections.abc")?;
        if !other.is_instance(&abc.getattr("Set")?)? || other.len()? != slf.inner.size() {
            return Ok(false);
        }
        for each in other.try_iter()? {
            if !slf.inner.contains_key(&Key::extract_bound(&each?)?) {
                return Ok(false);
            }
        }
        Ok(true)
    }

    fn __lt__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult<bool> {
        let abc = PyModule::import(py, "collections.abc")?;
        if !other.is_instance(&abc.getattr("Set")?)? || other.len()? <= slf.inner.size() {
            return Ok(false);
        }

        for each in slf.inner.keys() {
            if !other.contains(each.inner.clone_ref(slf.py()))? {
                return Ok(false);
            }
        }
        Ok(true)
    }

    fn __le__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult<bool> {
        let abc = PyModule::import(py, "collections.abc")?;
        if !other.is_instance(&abc.getattr("Set")?)? || other.len()? < slf.inner.size() {
            return Ok(false);
        }

        for each in slf.inner.keys() {
            if !other.contains(each.inner.clone_ref(slf.py()))? {
                return Ok(false);
            }
        }
        Ok(true)
    }

    fn __gt__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult<bool> {
        let abc = PyModule::import(py, "collections.abc")?;
        if !other.is_instance(&abc.getattr("Set")?)? || other.len()? >= slf.inner.size() {
            return Ok(false);
        }
        for each in other.try_iter()? {
            if !slf.inner.contains_key(&Key::extract_bound(&each?)?) {
                return Ok(false);
            }
        }
        Ok(true)
    }

    fn __ge__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult<bool> {
        let abc = PyModule::import(py, "collections.abc")?;
        if !other.is_instance(&abc.getattr("Set")?)? || other.len()? > slf.inner.size() {
            return Ok(false);
        }
        for each in other.try_iter()? {
            if !slf.inner.contains_key(&Key::extract_bound(&each?)?) {
                return Ok(false);
            }
        }
        Ok(true)
    }

    fn __iter__(slf: PyRef<'_, Self>) -> KeysIterator {
        KeysIterator {
            inner: slf.inner.clone(),
        }
    }

    fn __len__(slf: PyRef<'_, Self>) -> usize {
        slf.inner.size()
    }

    fn __and__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>) -> PyResult<HashTrieSetPy> {
        KeysView::intersection(slf, other)
    }

    fn __or__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult<KeysView> {
        KeysView::union(slf, other, py)
    }

    fn __repr__(&self, py: Python) -> PyResult<String> {
        let contents = self.inner.into_iter().map(|(k, _)| {
            Ok(k.clone_ref(py)
                .inner
                .into_pyobject(py)?
                .call_method0("__repr__")
                .and_then(|r| r.extract())
                .unwrap_or("<repr failed>".to_owned()))
        });
        let contents = contents.collect::<Result<Vec<_>, PyErr>>()?;
        Ok(format!("keys_view({{{}}})", contents.join(", ")))
    }

    fn intersection(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>) -> PyResult<HashTrieSetPy> {
        // TODO: iterate over the shorter one if it's got a length
        let mut inner = HashTrieSet::new_sync();
        for each in other.try_iter()? {
            let key = Key::extract_bound(&each?)?;
            if slf.inner.contains_key(&key) {
                inner.insert_mut(key);
            }
        }
        Ok(HashTrieSetPy { inner })
    }

    fn union(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult<KeysView> {
        // There doesn't seem to be a low-effort way to get a HashTrieSet out of a map,
        // so we just keep our map and add values we'll ignore.
        let mut inner = slf.inner.clone();
        for each in other.try_iter()? {
            inner.insert_mut(Key::extract_bound(&each?)?, py.None());
        }
        Ok(KeysView { inner })
    }
}

#[pyclass(module = "rpds")]
struct ValuesView {
    inner: HashTrieMapSync<Key, PyObject>,
}

#[pymethods]
impl ValuesView {
    fn __iter__(slf: PyRef<'_, Self>) -> ValuesIterator {
        ValuesIterator {
            inner: slf.inner.clone(),
        }
    }

    fn __len__(slf: PyRef<'_, Self>) -> usize {
        slf.inner.size()
    }

    fn __repr__(&self, py: Python) -> PyResult<String> {
        let contents = self.inner.into_iter().map(|(_, v)| {
            Ok(v.into_pyobject(py)?
                .call_method0("__repr__")
                .and_then(|r| r.extract())
                .unwrap_or("<repr failed>".to_owned()))
        });
        let contents = contents.collect::<Result<Vec<_>, PyErr>>()?;
        Ok(format!("values_view([{}])", contents.join(", ")))
    }
}

#[pyclass(module = "rpds")]
struct ItemsView {
    inner: HashTrieMapSync<Key, PyObject>,
}

#[derive(FromPyObject)]
struct ItemViewQuery(Key, PyObject);

#[pymethods]
impl ItemsView {
    fn __contains__(slf: PyRef<'_, Self>, item: ItemViewQuery) -> PyResult<bool> {
        if let Some(value) = slf.inner.get(&item.0) {
            return item.1.bind(slf.py()).eq(value);
        }

        Ok(false)
    }

    fn __iter__(slf: PyRef<'_, Self>) -> ItemsIterator {
        ItemsIterator {
            inner: slf.inner.clone(),
        }
    }

    fn __len__(slf: PyRef<'_, Self>) -> usize {
        slf.inner.size()
    }

    fn __eq__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult<bool> {
        let abc = PyModule::import(py, "collections.abc")?;
        if !other.is_instance(&abc.getattr("Set")?)? || other.len()? != slf.inner.size() {
            return Ok(false);
        }
        for (k, v) in slf.inner.iter() {
            if !other.contains((k.inner.clone_ref(slf.py()), v))? {
                return Ok(false);
            }
        }
        Ok(true)
    }

    fn __repr__(&self, py: Python) -> PyResult<String> {
        let contents = self.inner.into_iter().map(|(k, v)| {
            let tuple = PyTuple::new(py, [k.inner.clone_ref(py), v.clone_ref(py)])?;
            Ok(format!("{:?}", tuple))
        });
        let contents = contents.collect::<Result<Vec<_>, PyErr>>()?;
        Ok(format!("items_view([{}])", contents.join(", ")))
    }

    fn __lt__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult<bool> {
        let abc = PyModule::import(py, "collections.abc")?;
        if !other.is_instance(&abc.getattr("Set")?)? || other.len()? <= slf.inner.size() {
            return Ok(false);
        }
        for (k, v) in slf.inner.iter() {
            let pair = PyTuple::new(py, [k.inner.clone_ref(py), v.clone_ref(py)])?;
            // FIXME: needs to compare
            if !other.contains(pair)? {
                return Ok(false);
            }
        }
        Ok(true)
    }

    fn __le__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult<bool> {
        let abc = PyModule::import(py, "collections.abc")?;
        if !other.is_instance(&abc.getattr("Set")?)? || other.len()? < slf.inner.size() {
            return Ok(false);
        }
        for (k, v) in slf.inner.iter() {
            let pair = PyTuple::new(py, [k.inner.clone_ref(py), v.clone_ref(py)])?;
            // FIXME: needs to compare
            if !other.contains(pair)? {
                return Ok(false);
            }
        }
        Ok(true)
    }

    fn __gt__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult<bool> {
        let abc = PyModule::import(py, "collections.abc")?;
        if !other.is_instance(&abc.getattr("Set")?)? || other.len()? >= slf.inner.size() {
            return Ok(false);
        }
        for each in other.try_iter()? {
            let kv = each?;
            let k = kv.get_item(0)?;
            match slf.inner.get(&Key::extract_bound(&k)?) {
                Some(value) => {
                    let pair = PyTuple::new(py, [k, value.bind(py).clone()])?;
                    if !pair.eq(kv)? {
                        return Ok(false);
                    }
                }
                None => return Ok(false),
            }
        }
        Ok(true)
    }

    fn __ge__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult<bool> {
        let abc = PyModule::import(py, "collections.abc")?;
        if !other.is_instance(&abc.getattr("Set")?)? || other.len()? > slf.inner.size() {
            return Ok(false);
        }
        for each in other.try_iter()? {
            let kv = each?;
            let k = kv.get_item(0)?;
            match slf.inner.get(&Key::extract_bound(&k)?) {
                Some(value) => {
                    let pair = PyTuple::new(py, [k, value.bind(py).clone()])?;
                    if !pair.eq(kv)? {
                        return Ok(false);
                    }
                }
                None => return Ok(false),
            }
        }
        Ok(true)
    }

    fn __and__(
        slf: PyRef<'_, Self>,
        other: &Bound<'_, PyAny>,
        py: Python,
    ) -> PyResult<HashTrieSetPy> {
        ItemsView::intersection(slf, other, py)
    }

    fn __or__(
        slf: PyRef<'_, Self>,
        other: &Bound<'_, PyAny>,
        py: Python,
    ) -> PyResult<HashTrieSetPy> {
        ItemsView::union(slf, other, py)
    }

    fn intersection(
        slf: PyRef<'_, Self>,
        other: &Bound<'_, PyAny>,
        py: Python,
    ) -> PyResult<HashTrieSetPy> {
        // TODO: iterate over the shorter one if it's got a length
        let mut inner = HashTrieSet::new_sync();
        for each in other.try_iter()? {
            let kv = each?;
            let k = kv.get_item(0)?;
            if let Some(value) = slf.inner.get(&Key::extract_bound(&k)?) {
                let pair = PyTuple::new(py, [k, value.bind(py).clone()])?;
                if pair.eq(kv)? {
                    inner.insert_mut(Key::extract_bound(&pair)?);
                }
            }
        }
        Ok(HashTrieSetPy { inner })
    }

    fn union(
        slf: PyRef<'_, Self>,
        other: &Bound<'_, PyAny>,
        py: Python,
    ) -> PyResult<HashTrieSetPy> {
        // TODO: this is very inefficient, but again can't seem to get a HashTrieSet out of ourself
        let mut inner = HashTrieSet::new_sync();
        for (k, v) in slf.inner.iter() {
            let pair = PyTuple::new(py, [k.inner.clone_ref(py), v.clone_ref(py)])?;
            inner.insert_mut(Key::extract_bound(&pair)?);
        }
        for each in other.try_iter()? {
            inner.insert_mut(Key::extract_bound(&each?)?);
        }
        Ok(HashTrieSetPy { inner })
    }
}

#[repr(transparent)]
#[pyclass(name = "HashTrieSet", module = "rpds", frozen)]
struct HashTrieSetPy {
    inner: HashTrieSetSync<Key>,
}

impl<'source> FromPyObject<'source> for HashTrieSetPy {
    fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
        let mut ret = HashTrieSet::new_sync();
        for each in ob.try_iter()? {
            let k: Key = each?.extract()?;
            ret.insert_mut(k);
        }
        Ok(HashTrieSetPy { inner: ret })
    }
}

#[pymethods]
impl HashTrieSetPy {
    #[new]
    #[pyo3(signature = (value=None))]
    fn init(value: Option<HashTrieSetPy>) -> Self {
        if let Some(value) = value {
            value
        } else {
            HashTrieSetPy {
                inner: HashTrieSet::new_sync(),
            }
        }
    }

    fn __contains__(&self, key: Key) -> bool {
        self.inner.contains(&key)
    }

    fn __and__(&self, other: &Self, py: Python) -> Self {
        self.intersection(other, py)
    }

    fn __or__(&self, other: &Self, py: Python) -> Self {
        self.union(other, py)
    }

    fn __sub__(&self, other: &Self) -> Self {
        self.difference(other)
    }

    fn __xor__(&self, other: &Self, py: Python) -> Self {
        self.symmetric_difference(other, py)
    }

    fn __iter__(slf: PyRef<'_, Self>) -> SetIterator {
        SetIterator {
            inner: slf.inner.clone(),
        }
    }

    fn __len__(&self) -> usize {
        self.inner.size()
    }

    fn __repr__(&self, py: Python) -> PyResult<String> {
        let contents = self.inner.into_iter().map(|k| {
            Ok(k.clone_ref(py)
                .into_pyobject(py)?
                .call_method0("__repr__")
                .and_then(|r| r.extract())
                .unwrap_or("<repr failed>".to_owned()))
        });
        let contents = contents.collect::<Result<Vec<_>, PyErr>>()?;
        Ok(format!("HashTrieSet({{{}}})", contents.join(", ")))
    }

    fn __eq__(slf: PyRef<'_, Self>, other: Bound<'_, PyAny>, py: Python) -> PyResult<bool> {
        let abc = PyModule::import(py, "collections.abc")?;
        if !other.is_instance(&abc.getattr("Set")?)? || other.len()? != slf.inner.size() {
            return Ok(false);
        }
        for each in other.try_iter()? {
            if !slf.inner.contains(&Key::extract_bound(&each?)?) {
                return Ok(false);
            }
        }
        Ok(true)
    }

    fn __hash__(&self) -> PyResult<isize> {
        // modified from https://github.com/python/cpython/blob/d69529d31ccd1510843cfac1ab53bb8cb027541f/Objects/setobject.c#L715

        let mut hash_val = self
            .inner
            .iter()
            .map(|k| k.hash as usize)
            .fold(0, |acc: usize, x: usize| acc ^ hash_shuffle_bits(x));

        // factor in the number of entries in the collection
        hash_val ^= self.inner.size().wrapping_add(1).wrapping_mul(1927868237);

        // dispense patterns in the hash value
        hash_val ^= (hash_val >> 11) ^ (hash_val >> 25);
        hash_val = hash_val.wrapping_mul(69069).wrapping_add(907133923);

        Ok(hash_val as isize)
    }

    fn __lt__(slf: PyRef<'_, Self>, other: Bound<'_, PyAny>, py: Python) -> PyResult<bool> {
        let abc = PyModule::import(py, "collections.abc")?;
        if !other.is_instance(&abc.getattr("Set")?)? || other.len()? <= slf.inner.size() {
            return Ok(false);
        }
        for each in slf.inner.iter() {
            if !other.contains(each.inner.clone_ref(py))? {
                return Ok(false);
            }
        }
        Ok(true)
    }

    fn __le__(slf: PyRef<'_, Self>, other: Bound<'_, PyAny>, py: Python) -> PyResult<bool> {
        let abc = PyModule::import(py, "collections.abc")?;
        if !other.is_instance(&abc.getattr("Set")?)? || other.len()? < slf.inner.size() {
            return Ok(false);
        }
        for each in slf.inner.iter() {
            if !other.contains(each.inner.clone_ref(slf.py()))? {
                return Ok(false);
            }
        }
        Ok(true)
    }

    fn __gt__(slf: PyRef<'_, Self>, other: Bound<'_, PyAny>, py: Python) -> PyResult<bool> {
        let abc = PyModule::import(py, "collections.abc")?;
        if !other.is_instance(&abc.getattr("Set")?)? || other.len()? >= slf.inner.size() {
            return Ok(false);
        }
        for each in other.try_iter()? {
            if !slf.inner.contains(&Key::extract_bound(&each?)?) {
                return Ok(false);
            }
        }
        Ok(true)
    }

    fn __ge__(slf: PyRef<'_, Self>, other: Bound<'_, PyAny>, py: Python) -> PyResult<bool> {
        let abc = PyModule::import(py, "collections.abc")?;
        if !other.is_instance(&abc.getattr("Set")?)? || other.len()? > slf.inner.size() {
            return Ok(false);
        }
        for each in other.try_iter()? {
            if !slf.inner.contains(&Key::extract_bound(&each?)?) {
                return Ok(false);
            }
        }
        Ok(true)
    }

    fn __reduce__(slf: PyRef<Self>) -> (Bound<'_, PyType>, (Vec<Key>,)) {
        (
            HashTrieSetPy::type_object(slf.py()),
            (slf.inner.iter().map(|e| e.clone_ref(slf.py())).collect(),),
        )
    }

    fn insert(&self, value: Key) -> HashTrieSetPy {
        HashTrieSetPy {
            inner: self.inner.insert(value),
        }
    }

    fn discard(&self, value: Key) -> PyResult<HashTrieSetPy> {
        match self.inner.contains(&value) {
            true => Ok(HashTrieSetPy {
                inner: self.inner.remove(&value),
            }),
            false => Ok(HashTrieSetPy {
                inner: self.inner.clone(),
            }),
        }
    }

    fn remove(&self, value: Key) -> PyResult<HashTrieSetPy> {
        match self.inner.contains(&value) {
            true => Ok(HashTrieSetPy {
                inner: self.inner.remove(&value),
            }),
            false => Err(PyKeyError::new_err(value)),
        }
    }

    fn difference(&self, other: &Self) -> HashTrieSetPy {
        let mut inner = self.inner.clone();
        for value in other.inner.iter() {
            inner.remove_mut(value);
        }
        HashTrieSetPy { inner }
    }

    fn intersection(&self, other: &Self, py: Python) -> HashTrieSetPy {
        let mut inner: HashTrieSetSync<Key> = HashTrieSet::new_sync();
        let larger: &HashTrieSetSync<Key>;
        let iter;
        if self.inner.size() > other.inner.size() {
            larger = &self.inner;
            iter = other.inner.iter();
        } else {
            larger = &other.inner;
            iter = self.inner.iter();
        }
        for value in iter {
            if larger.contains(value) {
                inner.insert_mut(value.clone_ref(py));
            }
        }
        HashTrieSetPy { inner }
    }

    fn symmetric_difference(&self, other: &Self, py: Python) -> HashTrieSetPy {
        let mut inner: HashTrieSetSync<Key>;
        let iter;
        if self.inner.size() > other.inner.size() {
            inner = self.inner.clone();
            iter = other.inner.iter();
        } else {
            inner = other.inner.clone();
            iter = self.inner.iter();
        }
        for value in iter {
            if inner.contains(value) {
                inner.remove_mut(value);
            } else {
                inner.insert_mut(value.clone_ref(py));
            }
        }
        HashTrieSetPy { inner }
    }

    fn union(&self, other: &Self, py: Python) -> HashTrieSetPy {
        let mut inner: HashTrieSetSync<Key>;
        let iter;
        if self.inner.size() > other.inner.size() {
            inner = self.inner.clone();
            iter = other.inner.iter();
        } else {
            inner = other.inner.clone();
            iter = self.inner.iter();
        }
        for value in iter {
            inner.insert_mut(value.clone_ref(py));
        }
        HashTrieSetPy { inner }
    }

    #[pyo3(signature = (*iterables))]
    fn update(&self, iterables: Bound<'_, PyTuple>) -> PyResult<HashTrieSetPy> {
        let mut inner = self.inner.clone();
        for each in iterables {
            let iter = each.try_iter()?;
            for value in iter {
                inner.insert_mut(Key::extract_bound(&value?)?);
            }
        }
        Ok(HashTrieSetPy { inner })
    }
}

#[pyclass(module = "rpds")]
struct SetIterator {
    inner: HashTrieSetSync<Key>,
}

#[pymethods]
impl SetIterator {
    fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> {
        slf
    }

    fn __next__(mut slf: PyRefMut<'_, Self>) -> Option<Key> {
        let first = slf.inner.iter().next()?.clone_ref(slf.py());
        slf.inner = slf.inner.remove(&first);
        Some(first)
    }
}

#[repr(transparent)]
#[pyclass(name = "List", module = "rpds", frozen, sequence)]
struct ListPy {
    inner: ListSync<PyObject>,
}

impl From<ListSync<PyObject>> for ListPy {
    fn from(elements: ListSync<PyObject>) -> Self {
        ListPy { inner: elements }
    }
}

impl<'source> FromPyObject<'source> for ListPy {
    fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
        let mut ret = List::new_sync();
        let reversed = PyModule::import(ob.py(), "builtins")?.getattr("reversed")?;
        let rob: Bound<'_, PyIterator> = reversed.call1((ob,))?.try_iter()?;
        for each in rob {
            ret.push_front_mut(each?.extract()?);
        }
        Ok(ListPy { inner: ret })
    }
}

#[pymethods]
impl ListPy {
    #[new]
    #[pyo3(signature = (*elements))]
    fn init(elements: &Bound<'_, PyTuple>) -> PyResult<Self> {
        let mut ret: ListPy;
        if elements.len() == 1 {
            ret = elements.get_item(0)?.extract()?;
        } else {
            ret = ListPy {
                inner: List::new_sync(),
            };
            if elements.len() > 1 {
                for each in (0..elements.len()).rev() {
                    ret.inner
                        .push_front_mut(elements.get_item(each)?.extract()?);
                }
            }
        }
        Ok(ret)
    }

    fn __len__(&self) -> usize {
        self.inner.len()
    }

    fn __repr__(&self, py: Python) -> PyResult<String> {
        let contents = self.inner.into_iter().map(|k| {
            Ok(k.into_pyobject(py)?
                .call_method0("__repr__")
                .and_then(|r| r.extract())
                .unwrap_or("<repr failed>".to_owned()))
        });
        let contents = contents.collect::<Result<Vec<_>, PyErr>>()?;
        Ok(format!("List([{}])", contents.join(", ")))
    }

    fn __richcmp__(&self, other: &Self, op: CompareOp, py: Python<'_>) -> PyResult<PyObject> {
        match op {
            CompareOp::Eq => (self.inner.len() == other.inner.len()
                && self
                    .inner
                    .iter()
                    .zip(other.inner.iter())
                    .map(|(e1, e2)| e1.bind(py).eq(e2))
                    .all(|r| r.unwrap_or(false)))
            .into_pyobject(py)
            .map_err(Into::into)
            .map(BoundObject::into_any)
            .map(BoundObject::unbind),
            CompareOp::Ne => (self.inner.len() != other.inner.len()
                || self
                    .inner
                    .iter()
                    .zip(other.inner.iter())
                    .map(|(e1, e2)| e1.bind(py).ne(e2))
                    .any(|r| r.unwrap_or(true)))
            .into_pyobject(py)
            .map_err(Into::into)
            .map(BoundObject::into_any)
            .map(BoundObject::unbind),
            _ => Ok(py.NotImplemented()),
        }
    }

    fn __hash__(&self, py: Python) -> PyResult<u64> {
        let mut hasher = DefaultHasher::new();

        self.inner
            .iter()
            .enumerate()
            .try_for_each(|(index, each)| {
                each.bind(py)
                    .hash()
                    .map_err(|_| {
                        PyTypeError::new_err(format!(
                            "Unhashable type at {} element in List: {}",
                            index,
                            each.bind(py)
                                .repr()
                                .and_then(|r| r.extract())
                                .unwrap_or("<repr> error".to_string())
                        ))
                    })
                    .map(|x| hasher.write_isize(x))
            })?;

        Ok(hasher.finish())
    }

    fn __iter__(slf: PyRef<'_, Self>) -> ListIterator {
        ListIterator {
            inner: slf.inner.clone(),
        }
    }

    fn __reversed__(&self) -> ListPy {
        ListPy {
            inner: self.inner.reverse(),
        }
    }

    fn __reduce__(slf: PyRef<Self>) -> (Bound<'_, PyType>, (Vec<PyObject>,)) {
        (
            ListPy::type_object(slf.py()),
            (slf.inner.iter().map(|e| e.clone_ref(slf.py())).collect(),),
        )
    }

    #[getter]
    fn first(&self) -> PyResult<&PyObject> {
        self.inner
            .first()
            .ok_or_else(|| PyIndexError::new_err("empty list has no first element"))
    }

    #[getter]
    fn rest(&self) -> ListPy {
        let mut inner = self.inner.clone();
        inner.drop_first_mut();
        ListPy { inner }
    }

    fn push_front(&self, other: PyObject) -> ListPy {
        ListPy {
            inner: self.inner.push_front(other),
        }
    }

    fn drop_first(&self) -> PyResult<ListPy> {
        if let Some(inner) = self.inner.drop_first() {
            Ok(ListPy { inner })
        } else {
            Err(PyIndexError::new_err("empty list has no first element"))
        }
    }
}

#[pyclass(module = "rpds")]
struct ListIterator {
    inner: ListSync<PyObject>,
}

#[pymethods]
impl ListIterator {
    fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> {
        slf
    }

    fn __next__(mut slf: PyRefMut<'_, Self>) -> Option<PyObject> {
        let first_op = slf.inner.first()?;
        let first = first_op.clone_ref(slf.py());

        slf.inner = slf.inner.drop_first()?;

        Some(first)
    }
}

#[pyclass(module = "rpds")]
struct QueueIterator {
    inner: QueueSync<PyObject>,
}

#[pymethods]
impl QueueIterator {
    fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> {
        slf
    }

    fn __next__(mut slf: PyRefMut<'_, Self>) -> Option<PyObject> {
        let first_op = slf.inner.peek()?;
        let first = first_op.clone_ref(slf.py());
        slf.inner = slf.inner.dequeue()?;
        Some(first)
    }
}

#[repr(transparent)]
#[pyclass(name = "Queue", module = "rpds", frozen, sequence)]
struct QueuePy {
    inner: QueueSync<PyObject>,
}

impl From<QueueSync<PyObject>> for QueuePy {
    fn from(elements: QueueSync<PyObject>) -> Self {
        QueuePy { inner: elements }
    }
}

impl<'source> FromPyObject<'source> for QueuePy {
    fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
        let mut ret = Queue::new_sync();
        for each in ob.try_iter()? {
            ret.enqueue_mut(each?.extract()?);
        }
        Ok(QueuePy { inner: ret })
    }
}

#[pymethods]
impl QueuePy {
    #[new]
    #[pyo3(signature = (*elements))]
    fn init(elements: &Bound<'_, PyTuple>, py: Python<'_>) -> PyResult<Self> {
        let mut ret: QueuePy;
        if elements.len() == 1 {
            ret = elements.get_item(0)?.extract()?;
        } else {
            ret = QueuePy {
                inner: Queue::new_sync(),
            };
            if elements.len() > 1 {
                for each in elements {
                    ret.inner.enqueue_mut(each.into_pyobject(py)?.unbind());
                }
            }
        }
        Ok(ret)
    }

    fn __eq__(&self, other: &Self, py: Python<'_>) -> bool {
        (self.inner.len() == other.inner.len())
            && self
                .inner
                .iter()
                .zip(other.inner.iter())
                .map(|(e1, e2)| e1.bind(py).eq(e2))
                .all(|r| r.unwrap_or(false))
    }

    fn __hash__(&self, py: Python<'_>) -> PyResult<u64> {
        let mut hasher = DefaultHasher::new();

        self.inner
            .iter()
            .enumerate()
            .try_for_each(|(index, each)| {
                each.bind(py)
                    .hash()
                    .map_err(|_| {
                        PyTypeError::new_err(format!(
                            "Unhashable type at {} element in Queue: {}",
                            index,
                            each.bind(py)
                                .repr()
                                .and_then(|r| r.extract())
                                .unwrap_or("<repr> error".to_string())
                        ))
                    })
                    .map(|x| hasher.write_isize(x))
            })?;

        Ok(hasher.finish())
    }

    fn __ne__(&self, other: &Self, py: Python<'_>) -> bool {
        (self.inner.len() != other.inner.len())
            || self
                .inner
                .iter()
                .zip(other.inner.iter())
                .map(|(e1, e2)| e1.bind(py).ne(e2))
                .any(|r| r.unwrap_or(true))
    }

    fn __iter__(slf: PyRef<'_, Self>) -> QueueIterator {
        QueueIterator {
            inner: slf.inner.clone(),
        }
    }

    fn __len__(&self) -> usize {
        self.inner.len()
    }

    fn __repr__(&self, py: Python) -> PyResult<String> {
        let contents = self.inner.into_iter().map(|k| {
            Ok(k.into_pyobject(py)?
                .call_method0("__repr__")
                .and_then(|r| r.extract())
                .unwrap_or("<repr failed>".to_owned()))
        });
        let contents = contents.collect::<Result<Vec<_>, PyErr>>()?;
        Ok(format!("Queue([{}])", contents.join(", ")))
    }

    #[getter]
    fn peek(&self, py: Python) -> PyResult<PyObject> {
        if let Some(peeked) = self.inner.peek() {
            Ok(peeked.clone_ref(py))
        } else {
            Err(PyIndexError::new_err("peeked an empty queue"))
        }
    }

    #[getter]
    fn is_empty(&self) -> bool {
        self.inner.is_empty()
    }

    fn enqueue(&self, value: Bound<'_, PyAny>) -> Self {
        QueuePy {
            inner: self.inner.enqueue(value.into()),
        }
    }

    fn dequeue(&self) -> PyResult<QueuePy> {
        if let Some(inner) = self.inner.dequeue() {
            Ok(QueuePy { inner })
        } else {
            Err(PyIndexError::new_err("dequeued an empty queue"))
        }
    }
}

#[pymodule(gil_used = false)]
#[pyo3(name = "rpds")]
fn rpds_py(py: Python, m: &Bound<'_, PyModule>) -> PyResult<()> {
    m.add_class::<HashTrieMapPy>()?;
    m.add_class::<HashTrieSetPy>()?;
    m.add_class::<ListPy>()?;
    m.add_class::<QueuePy>()?;

    PyMapping::register::<HashTrieMapPy>(py)?;

    let abc = PyModule::import(py, "collections.abc")?;

    abc.getattr("Set")?
        .call_method1("register", (HashTrieSetPy::type_object(py),))?;

    abc.getattr("MappingView")?
        .call_method1("register", (KeysView::type_object(py),))?;
    abc.getattr("MappingView")?
        .call_method1("register", (ValuesView::type_object(py),))?;
    abc.getattr("MappingView")?
        .call_method1("register", (ItemsView::type_object(py),))?;

    abc.getattr("KeysView")?
        .call_method1("register", (KeysView::type_object(py),))?;
    abc.getattr("ValuesView")?
        .call_method1("register", (ValuesView::type_object(py),))?;
    abc.getattr("ItemsView")?
        .call_method1("register", (ItemsView::type_object(py),))?;

    Ok(())
}
0707010000001B000041ED0000000000000000000000026750735100000000000000000000000000000000000000000000001500000000rpds.py-0.22.3/tests0707010000001C000081A40000000000000000000000016750735100000000000000000000000000000000000000000000002100000000rpds.py-0.22.3/tests/__init__.py0707010000001D000081A4000000000000000000000001675073510000002E000000000000000000000000000000000000002500000000rpds.py-0.22.3/tests/requirements.infile:.#egg=rpds-py
pytest
pytest-run-parallel
0707010000001E000081A400000000000000000000000167507351000001B9000000000000000000000000000000000000002600000000rpds.py-0.22.3/tests/requirements.txt# This file was autogenerated by uv via the following command:
#    uv pip compile --output-file tests/requirements.txt tests/requirements.in
iniconfig==2.0.0
    # via pytest
packaging==24.1
    # via pytest
pluggy==1.5.0
    # via pytest
pytest==8.3.3
    # via
    #   -r tests/requirements.in
    #   pytest-run-parallel
pytest-run-parallel==0.2.0
    # via -r tests/requirements.in
file:.#egg=rpds-py
    # via -r tests/requirements.in
0707010000001F000081A40000000000000000000000016750735100003BFE000000000000000000000000000000000000002B00000000rpds.py-0.22.3/tests/test_hash_trie_map.py"""
Modified from the pyrsistent test suite.

Pre-modification, these were MIT licensed, and are copyright:

    Copyright (c) 2022 Tobias Gustafsson

    Permission is hereby granted, free of charge, to any person
    obtaining a copy of this software and associated documentation
    files (the "Software"), to deal in the Software without
    restriction, including without limitation the rights to use,
    copy, modify, merge, publish, distribute, sublicense, and/or sell
    copies of the Software, and to permit persons to whom the
    Software is furnished to do so, subject to the following
    conditions:

    The above copyright notice and this permission notice shall be
    included in all copies or substantial portions of the Software.

    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
    OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
    HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
    WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
    OTHER DEALINGS IN THE SOFTWARE.
"""

from collections import abc
from operator import methodcaller
import pickle
import sysconfig

import pytest

from rpds import HashTrieMap

# see https://github.com/python/cpython/issues/127065,
# remove this when the CPython bug is fixed in a released version
if bool(sysconfig.get_config_var("Py_GIL_DISABLED")):

    def methodcaller(name, /, *args, **kwargs):
        def caller(obj):
            return getattr(obj, name)(*args, **kwargs)

        return caller


def test_instance_of_hashable():
    assert isinstance(HashTrieMap(), abc.Hashable)


def test_instance_of_map():
    assert isinstance(HashTrieMap(), abc.Mapping)


def test_literalish_works():
    assert HashTrieMap() == HashTrieMap()
    assert HashTrieMap(a=1, b=2) == HashTrieMap({"a": 1, "b": 2})


def test_empty_initialization():
    a_map = HashTrieMap()
    assert len(a_map) == 0


def test_initialization_with_one_element():
    the_map = HashTrieMap({"a": 2})
    assert len(the_map) == 1
    assert the_map["a"] == 2
    assert "a" in the_map

    empty_map = the_map.remove("a")
    assert len(empty_map) == 0
    assert "a" not in empty_map


def test_index_non_existing_raises_key_error():
    m1 = HashTrieMap()
    with pytest.raises(KeyError) as error:
        m1["foo"]

    assert str(error.value) == "'foo'"


def test_remove_non_existing_element_raises_key_error():
    m1 = HashTrieMap(a=1)

    with pytest.raises(KeyError) as error:
        m1.remove("b")

    assert str(error.value) == "'b'"


def test_various_iterations():
    assert {"a", "b"} == set(HashTrieMap(a=1, b=2))
    assert ["a", "b"] == sorted(HashTrieMap(a=1, b=2).keys())
    assert [1, 2] == sorted(HashTrieMap(a=1, b=2).values())
    assert {("a", 1), ("b", 2)} == set(HashTrieMap(a=1, b=2).items())

    pm = HashTrieMap({k: k for k in range(100)})
    assert len(pm) == len(pm.keys())
    assert len(pm) == len(pm.values())
    assert len(pm) == len(pm.items())
    ks = pm.keys()
    assert all(k in pm for k in ks)
    assert all(k in ks for k in ks)
    us = pm.items()
    assert all(pm[k] == v for (k, v) in us)
    vs = pm.values()
    assert all(v in vs for v in vs)


def test_initialization_with_two_elements():
    map1 = HashTrieMap({"a": 2, "b": 3})
    assert len(map1) == 2
    assert map1["a"] == 2
    assert map1["b"] == 3

    map2 = map1.remove("a")
    assert "a" not in map2
    assert map2["b"] == 3


def test_initialization_with_many_elements():
    init_dict = {str(x): x for x in range(1700)}
    the_map = HashTrieMap(init_dict)

    assert len(the_map) == 1700
    assert the_map["16"] == 16
    assert the_map["1699"] == 1699
    assert the_map.insert("256", 256) == the_map

    new_map = the_map.remove("1600")
    assert len(new_map) == 1699
    assert "1600" not in new_map
    assert new_map["1601"] == 1601

    # Some NOP properties
    assert new_map.discard("18888") == new_map
    assert "19999" not in new_map
    assert new_map["1500"] == 1500
    assert new_map.insert("1500", new_map["1500"]) == new_map


def test_access_non_existing_element():
    map1 = HashTrieMap()
    assert len(map1) == 0

    map2 = map1.insert("1", 1)
    assert "1" not in map1
    assert map2["1"] == 1
    assert "2" not in map2


def test_overwrite_existing_element():
    map1 = HashTrieMap({"a": 2})
    map2 = map1.insert("a", 3)

    assert len(map2) == 1
    assert map2["a"] == 3


def test_hashing():
    o = object()

    assert hash(HashTrieMap([(o, o), (1, o)])) == hash(
        HashTrieMap([(o, o), (1, o)]),
    )
    assert hash(HashTrieMap([(o, o), (1, o)])) == hash(
        HashTrieMap([(1, o), (o, o)]),
    )
    assert hash(HashTrieMap([(o, "foo")])) == hash(HashTrieMap([(o, "foo")]))
    assert hash(HashTrieMap()) == hash(HashTrieMap([]))

    assert hash(HashTrieMap({1: 2})) != hash(HashTrieMap({1: 3}))
    assert hash(HashTrieMap({o: 1})) != hash(HashTrieMap({o: o}))
    assert hash(HashTrieMap([])) != hash(HashTrieMap([(o, 1)]))
    assert hash(HashTrieMap({1: 2, 3: 4})) != hash(HashTrieMap({1: 3, 2: 4}))


def test_same_hash_when_content_the_same_but_underlying_vector_size_differs():
    x = HashTrieMap({x: x for x in range(1000)})
    y = HashTrieMap({10: 10, 200: 200, 700: 700})

    for z in x:
        if z not in y:
            x = x.remove(z)

    assert x == y
    # assert hash(x) == hash(y)  # noqa: ERA001


class HashabilityControlled:
    hashable = True

    def __hash__(self):
        if self.hashable:
            return 4  # Proven random
        raise ValueError("I am not currently hashable.")


def test_map_does_not_hash_values_on_second_hash_invocation():
    hashable = HashabilityControlled()
    x = HashTrieMap(dict(el=hashable))
    hash(x)

    hashable.hashable = False
    with pytest.raises(
        TypeError,
        match=r"Unhashable type in HashTrieMap of key 'el'",
    ):
        hash(x)


def test_equal():
    x = HashTrieMap(a=1, b=2, c=3)
    y = HashTrieMap(a=1, b=2, c=3)

    assert x == y
    assert not (x != y)

    assert y == x
    assert not (y != x)


def test_equal_with_different_insertion_order():
    x = HashTrieMap([(i, i) for i in range(50)])
    y = HashTrieMap([(i, i) for i in range(49, -1, -1)])

    assert x == y
    assert not (x != y)

    assert y == x
    assert not (y != x)


def test_not_equal():
    x = HashTrieMap(a=1, b=2, c=3)
    y = HashTrieMap(a=1, b=2)

    assert x != y
    assert not (x == y)

    assert y != x
    assert not (y == x)


def test_not_equal_to_dict():
    x = HashTrieMap(a=1, b=2, c=3)
    y = dict(a=1, b=2, d=4)

    assert x != y
    assert not (x == y)

    assert y != x
    assert not (y == x)


def test_update_with_multiple_arguments():
    # If same value is present in multiple sources, the rightmost is used.
    x = HashTrieMap(a=1, b=2, c=3)
    y = x.update(HashTrieMap(b=4, c=5), {"c": 6})

    assert y == HashTrieMap(a=1, b=4, c=6)


def test_update_one_argument():
    x = HashTrieMap(a=1)

    assert x.update({"b": 2}) == HashTrieMap(a=1, b=2)


def test_update_no_arguments():
    x = HashTrieMap(a=1)

    assert x.update() == x


class HashDummy:
    def __hash__(self):
        return 6528039219058920  # Hash of '33'

    def __eq__(self, other):
        return self is other


def test_iteration_with_many_elements():
    values = list(range(2000))
    keys = [str(x) for x in values]
    init_dict = dict(zip(keys, values))

    hash_dummy1 = HashDummy()
    hash_dummy2 = HashDummy()

    # Throw in a couple of hash collision nodes to tests
    # those properly as well
    init_dict[hash_dummy1] = 12345
    init_dict[hash_dummy2] = 54321
    a_map = HashTrieMap(init_dict)

    actual_values = set()
    actual_keys = set()

    for k, v in a_map.items():
        actual_values.add(v)
        actual_keys.add(k)

    assert actual_keys == {*keys, hash_dummy1, hash_dummy2}
    assert actual_values == {*values, 12345, 54321}


def test_repr():
    rep = repr(HashTrieMap({"foo": "12", "": 37}))
    assert rep in {
        "HashTrieMap({'foo': '12', '': 37})",
        "HashTrieMap({'': 37, 'foo': '12'})",
    }


def test_str():
    s = str(HashTrieMap({1: 2, 3: 4}))
    assert s == "HashTrieMap({1: 2, 3: 4})" or s == "HashTrieMap({3: 4, 1: 2})"


def test_empty_truthiness():
    assert HashTrieMap(a=1)
    assert not HashTrieMap()


def test_iterable():
    m = HashTrieMap((i, i * 2) for i in range(3))
    assert m == HashTrieMap({0: 0, 1: 2, 2: 4})


def test_convert_hashtriemap():
    m = HashTrieMap({i: i * 2 for i in range(3)})
    assert HashTrieMap.convert({i: i * 2 for i in range(3)}) == m


def test_fast_convert_hashtriemap():
    m = HashTrieMap({i: i * 2 for i in range(3)})
    assert HashTrieMap.convert(m) is m


# Non-pyrsistent-test-suite tests


def test_more_eq():
    o = object()

    assert HashTrieMap([(o, o), (1, o)]) == HashTrieMap([(o, o), (1, o)])
    assert HashTrieMap([(o, "foo")]) == HashTrieMap([(o, "foo")])
    assert HashTrieMap() == HashTrieMap([])

    assert HashTrieMap({1: 2}) != HashTrieMap({1: 3})
    assert HashTrieMap({o: 1}) != HashTrieMap({o: o})
    assert HashTrieMap([]) != HashTrieMap([(o, 1)])


def test_pickle():
    assert pickle.loads(
        pickle.dumps(HashTrieMap([(1, 2), (3, 4)])),
    ) == HashTrieMap([(1, 2), (3, 4)])


def test_get():
    m1 = HashTrieMap({"foo": "bar"})
    assert m1.get("foo") == "bar"
    assert m1.get("baz") is None
    assert m1.get("spam", "eggs") == "eggs"


@pytest.mark.parametrize(
    "view",
    [pytest.param(methodcaller(p), id=p) for p in ["keys", "values", "items"]],
)
@pytest.mark.parametrize(
    "cls",
    [
        abc.Set,
        abc.MappingView,
        abc.KeysView,
        abc.ValuesView,
        abc.ItemsView,
    ],
)
def test_views_abc(view, cls):
    m, d = HashTrieMap(), {}
    assert isinstance(view(m), cls) == isinstance(view(d), cls)


def test_keys():
    d = HashTrieMap({1: 2, 3: 4})
    k = d.keys()

    assert 1 in k
    assert 2 not in k
    assert object() not in k

    assert len(k) == 2

    assert k == d.keys()
    assert k == HashTrieMap({1: 2, 3: 4}).keys()
    assert k == {1, 3}

    assert k != iter({1, 3})
    assert k != {1, 2, 3}
    assert k != {1, 4}
    assert not k == {1, 4}

    assert k != object()


def test_keys_setlike():
    assert {1: 2, 3: 4}.keys() & HashTrieMap({1: 2}).keys() == {1}
    assert {1: 2, 3: 4}.keys() & HashTrieMap({1: 2}).keys() != {1, 2}
    assert HashTrieMap({1: 2}).keys() & {1: 2, 3: 4}.keys() == {1}
    assert HashTrieMap({1: 2}).keys() & {1: 2, 3: 4}.keys() != {2}
    assert not HashTrieMap({1: 2}).keys() & {}.keys()
    assert HashTrieMap({1: 2}).keys() & {1} == {1}
    assert HashTrieMap({1: 2}).keys() & [1] == {1}

    assert HashTrieMap({1: 2}).keys() | {3} == {1, 3}
    assert HashTrieMap({1: 2}).keys() | [3] == {1, 3}

    # these don't really exist on the KeysView protocol but it's nice to have
    s = (1, "foo")
    assert HashTrieMap({1: 2, "foo": 7}).keys().intersection(s) == set(s)
    assert not HashTrieMap({1: 2}).keys().intersection({})
    assert HashTrieMap({1: 2}).keys().union({3}) == {1, 3}

    assert HashTrieMap({1: 2, 3: 4}).keys() < {1, 2, 3}
    assert HashTrieMap({1: 2, 3: 4}).keys() <= {1, 2, 3}
    assert not HashTrieMap({1: 2}).keys() < {1}
    assert HashTrieMap({1: 2}).keys() > set()
    assert HashTrieMap({1: 2}).keys() >= set()


def test_keys_repr():
    m = HashTrieMap({"foo": 3, 37: "bar"})
    assert repr(m.keys()) in {
        "keys_view({'foo', 37})",
        "keys_view({37, 'foo'})",
    }


def test_values():
    d = HashTrieMap({1: 2, 3: 4})
    v = d.values()

    assert 2 in v
    assert 3 not in v
    assert object() not in v

    assert len(v) == 2

    assert v == v
    # https://bugs.python.org/issue12445 which was WONTFIXed
    assert v != HashTrieMap({1: 2, 3: 4}).values()
    assert v != [2, 4]

    assert set(v) == {2, 4}


def test_values_repr():
    m = HashTrieMap({"foo": 3, 37: "bar", "baz": 3})
    assert repr(m.values()) in {
        "values_view(['bar', 3, 3])",
        "values_view([3, 'bar', 3])",
        "values_view([3, 3, 'bar'])",
    }


def test_items():
    d = HashTrieMap({1: 2, 3: 4})
    i = d.items()

    assert (1, 2) in i
    assert (1, 4) not in i

    assert len(i) == 2

    assert i == d.items()
    assert i == HashTrieMap({1: 2, 3: 4}).items()
    assert i == {(1, 2), (3, 4)}

    assert i != iter({(1, 2), (3, 4)})
    assert i != {(1, 2, 3), (3, 4, 5)}
    assert i == {1: 2, 3: 4}.items()
    assert i != {(1, 2), (3, 4), (5, 6)}
    assert i != {(1, 2)}
    assert not i == {1, 4}

    assert i != object()


def test_items_setlike():
    assert {1: 2, 3: 4}.items() & HashTrieMap({1: 2}).items() == {(1, 2)}
    assert {1: 2, 3: 4}.items() & HashTrieMap({1: 2}).items() != {(1, 2), 3}

    assert HashTrieMap({1: 2}).items() & {1: 2, 3: 4}.items() == {(1, 2)}
    assert HashTrieMap({1: 2}).items() & {1: 2, 3: 4}.items() != {(3, 4)}
    assert not HashTrieMap({1: 2}).items() & {}.items()

    assert HashTrieMap({1: 2}).items() & [(1, 2)] == {(1, 2)}
    assert HashTrieMap({1: 2}).items() & [[1, 2]] == set()

    assert HashTrieMap({1: 2}).items() | {(3, 4)} == {(1, 2), (3, 4)}
    assert HashTrieMap({1: 2}).items() | [7] == {(1, 2), 7}

    s = ((1, 2), ("foo", 37))
    assert HashTrieMap({1: 2, "foo": 7}).items().intersection(s) == {(1, 2)}
    assert not HashTrieMap({1: 2}).items().intersection({})

    assert HashTrieMap({1: 2}).items().union({3}) == {(1, 2), 3}

    assert HashTrieMap({1: 2, 3: 4}).items() < {(1, 2), (3, 4), ("foo", "bar")}
    assert HashTrieMap({1: 2, 3: 4}).items() <= {(1, 2), (3, 4)}
    assert not HashTrieMap({1: 2}).keys() < {1}
    assert HashTrieMap({1: 2}).items() > set()
    assert HashTrieMap({1: 2}).items() >= set()


def test_items_repr():
    m = HashTrieMap({"foo": 3, 37: "bar", "baz": 3})
    assert repr(m.items()) in {
        "items_view([('foo', 3), (37, 'bar'), ('baz', 3)])",
        "items_view([('foo', 3), ('baz', 3), (37, 'bar')])",
        "items_view([(37, 'bar'), ('foo', 3), ('baz', 3)])",
        "items_view([(37, 'bar'), ('baz', 3), ('foo', 3)])",
        "items_view([('baz', 3), (37, 'bar'), ('foo', 3)])",
        "items_view([('baz', 3), ('foo', 3), (37, 'bar')])",
    }


def test_fromkeys():
    keys = list(range(10))
    got = HashTrieMap.fromkeys(keys)
    expected = HashTrieMap((i, None) for i in keys)
    assert got == HashTrieMap(dict.fromkeys(keys)) == expected


def test_fromkeys_explicit_value():
    keys = list(range(10))
    expected = HashTrieMap((i, "foo") for i in keys)
    got = HashTrieMap.fromkeys(keys, "foo")
    expected = HashTrieMap((i, "foo") for i in keys)
    assert got == HashTrieMap(dict.fromkeys(keys, "foo")) == expected


def test_fromkeys_explicit_value_not_copied():
    keys = list(range(5))

    got = HashTrieMap.fromkeys(keys, [])
    got[3].append(1)

    assert got == HashTrieMap((i, [1]) for i in keys)


def test_update_with_iterable_of_kvs():
    assert HashTrieMap({1: 2}).update(iter([(3, 4), ("5", 6)])) == HashTrieMap(
        {
            1: 2,
            3: 4,
            "5": 6,
        },
    )
07070100000020000081A400000000000000000000000167507351000017CE000000000000000000000000000000000000002B00000000rpds.py-0.22.3/tests/test_hash_trie_set.py"""
Modified from the pyrsistent test suite.

Pre-modification, these were MIT licensed, and are copyright:

    Copyright (c) 2022 Tobias Gustafsson

    Permission is hereby granted, free of charge, to any person
    obtaining a copy of this software and associated documentation
    files (the "Software"), to deal in the Software without
    restriction, including without limitation the rights to use,
    copy, modify, merge, publish, distribute, sublicense, and/or sell
    copies of the Software, and to permit persons to whom the
    Software is furnished to do so, subject to the following
    conditions:

    The above copyright notice and this permission notice shall be
    included in all copies or substantial portions of the Software.

    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
    OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
    HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
    WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
    OTHER DEALINGS IN THE SOFTWARE.
"""

from collections import abc
import pickle

import pytest

from rpds import HashTrieSet


def test_key_is_tuple():
    with pytest.raises(KeyError):
        HashTrieSet().remove((1, 1))


def test_key_is_not_tuple():
    with pytest.raises(KeyError):
        HashTrieSet().remove("asdf")


def test_hashing():
    o = object()

    assert hash(HashTrieSet([o])) == hash(HashTrieSet([o]))
    assert hash(HashTrieSet([o, o])) == hash(HashTrieSet([o, o]))
    assert hash(HashTrieSet([])) == hash(HashTrieSet([]))
    assert hash(HashTrieSet([1, 2])) == hash(HashTrieSet([1, 2]))
    assert hash(HashTrieSet([1, 2])) == hash(HashTrieSet([2, 1]))
    assert not (HashTrieSet([1, 2]) == HashTrieSet([1, 3]))
    assert not (HashTrieSet([]) == HashTrieSet([o]))

    assert hash(HashTrieSet([1, 2])) != hash(HashTrieSet([1, 3]))
    assert hash(HashTrieSet([1, o])) != hash(HashTrieSet([1, 2]))
    assert hash(HashTrieSet([1, 2])) != hash(HashTrieSet([2, 1, 3]))
    assert not (HashTrieSet([o]) != HashTrieSet([o, o]))
    assert not (HashTrieSet([o, o]) != HashTrieSet([o, o]))
    assert not (HashTrieSet() != HashTrieSet([]))


def test_empty_truthiness():
    assert HashTrieSet([1])
    assert not HashTrieSet()


def test_contains_elements_that_it_was_initialized_with():
    initial = [1, 2, 3]
    s = HashTrieSet(initial)

    assert set(s) == set(initial)
    assert len(s) == len(set(initial))


def test_is_immutable():
    s1 = HashTrieSet([1])
    s2 = s1.insert(2)

    assert s1 == HashTrieSet([1])
    assert s2 == HashTrieSet([1, 2])

    s3 = s2.remove(1)
    assert s2 == HashTrieSet([1, 2])
    assert s3 == HashTrieSet([2])


def test_remove_when_not_present():
    s1 = HashTrieSet([1, 2, 3])
    with pytest.raises(KeyError):
        s1.remove(4)


def test_discard():
    s1 = HashTrieSet((1, 2, 3))
    assert s1.discard(3) == HashTrieSet((1, 2))
    assert s1.discard(4) == s1


def test_is_iterable():
    assert sum(HashTrieSet([1, 2, 3])) == 6


def test_contains():
    s = HashTrieSet([1, 2, 3])

    assert 2 in s
    assert 4 not in s


def test_supports_set_operations():
    s1 = HashTrieSet([1, 2, 3])
    s2 = HashTrieSet([3, 4, 5])

    assert s1 | s2 == HashTrieSet([1, 2, 3, 4, 5])
    assert s1.union(s2) == s1 | s2

    assert s1 & s2 == HashTrieSet([3])
    assert s1.intersection(s2) == s1 & s2

    assert s1 - s2 == HashTrieSet([1, 2])
    assert s1.difference(s2) == s1 - s2

    assert s1 ^ s2 == HashTrieSet([1, 2, 4, 5])
    assert s1.symmetric_difference(s2) == s1 ^ s2


def test_supports_set_comparisons():
    s1 = HashTrieSet([1, 2, 3])
    s3 = HashTrieSet([1, 2])
    s4 = HashTrieSet([1, 2, 3])

    assert HashTrieSet([1, 2, 3, 3, 5]) == HashTrieSet([1, 2, 3, 5])
    assert s1 != s3

    assert s3 < s1
    assert s3 <= s1
    assert s3 <= s4

    assert s1 > s3
    assert s1 >= s3
    assert s4 >= s3


def test_repr():
    rep = repr(HashTrieSet([1, 2]))
    assert rep == "HashTrieSet({1, 2})" or rep == "HashTrieSet({2, 1})"

    rep = repr(HashTrieSet(["1", "2"]))
    assert rep == "HashTrieSet({'1', '2'})" or rep == "HashTrieSet({'2', '1'})"


def test_update():
    assert HashTrieSet([1, 2, 3]).update([3, 4, 4, 5]) == HashTrieSet(
        [1, 2, 3, 4, 5],
    )


def test_update_no_elements():
    s1 = HashTrieSet([1, 2])
    assert s1.update([]) == s1


def test_iterable():
    assert HashTrieSet(iter("a")) == HashTrieSet(iter("a"))


def test_more_eq():
    # Non-pyrsistent-test-suite test
    o = object()

    assert HashTrieSet([o]) == HashTrieSet([o])
    assert HashTrieSet([o, o]) == HashTrieSet([o, o])
    assert HashTrieSet([o]) == HashTrieSet([o, o])
    assert HashTrieSet() == HashTrieSet([])
    assert not (HashTrieSet([1, 2]) == HashTrieSet([1, 3]))
    assert not (HashTrieSet([o, 1]) == HashTrieSet([o, o]))
    assert not (HashTrieSet([]) == HashTrieSet([o]))

    assert HashTrieSet([1, 2]) != HashTrieSet([1, 3])
    assert HashTrieSet([]) != HashTrieSet([o])
    assert not (HashTrieSet([o]) != HashTrieSet([o]))
    assert not (HashTrieSet([o, o]) != HashTrieSet([o, o]))
    assert not (HashTrieSet([o]) != HashTrieSet([o, o]))
    assert not (HashTrieSet() != HashTrieSet([]))

    assert HashTrieSet([1, 2]) == {1, 2}
    assert HashTrieSet([1, 2]) != {1, 2, 3}
    assert HashTrieSet([1, 2]) != [1, 2]


def test_more_set_comparisons():
    s = HashTrieSet([1, 2, 3])

    assert s == s
    assert not (s < s)
    assert s <= s
    assert not (s > s)
    assert s >= s


def test_pickle():
    assert pickle.loads(
        pickle.dumps(HashTrieSet([1, 2, 3, 4])),
    ) == HashTrieSet([1, 2, 3, 4])


def test_instance_of_set():
    assert isinstance(HashTrieSet(), abc.Set)


def test_lt_le_gt_ge():
    assert HashTrieSet({}) < {1}
    assert HashTrieSet({}) <= {1}
    assert HashTrieSet({1}) > set()
    assert HashTrieSet({1}) >= set()
07070100000021000081A400000000000000000000000167507351000010E3000000000000000000000000000000000000002200000000rpds.py-0.22.3/tests/test_list.py"""
Modified from the pyrsistent test suite.

Pre-modification, these were MIT licensed, and are copyright:

    Copyright (c) 2022 Tobias Gustafsson

    Permission is hereby granted, free of charge, to any person
    obtaining a copy of this software and associated documentation
    files (the "Software"), to deal in the Software without
    restriction, including without limitation the rights to use,
    copy, modify, merge, publish, distribute, sublicense, and/or sell
    copies of the Software, and to permit persons to whom the
    Software is furnished to do so, subject to the following
    conditions:

    The above copyright notice and this permission notice shall be
    included in all copies or substantial portions of the Software.

    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
    OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
    HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
    WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
    OTHER DEALINGS IN THE SOFTWARE.
"""

import pickle

import pytest

from rpds import List


def test_literalish_works():
    assert List(1, 2, 3) == List([1, 2, 3])


def test_first_and_rest():
    pl = List([1, 2])
    assert pl.first == 1
    assert pl.rest.first == 2
    assert pl.rest.rest == List()


def test_instantiate_large_list():
    assert List(range(1000)).first == 0


def test_iteration():
    assert list(List()) == []
    assert list(List([1, 2, 3])) == [1, 2, 3]


def test_push_front():
    assert List([1, 2, 3]).push_front(0) == List([0, 1, 2, 3])


def test_push_front_empty_list():
    assert List().push_front(0) == List([0])


def test_truthiness():
    assert List([1])
    assert not List()


def test_len():
    assert len(List([1, 2, 3])) == 3
    assert len(List()) == 0


def test_first_illegal_on_empty_list():
    with pytest.raises(IndexError):
        List().first


def test_rest_return_self_on_empty_list():
    assert List().rest == List()


def test_reverse():
    assert reversed(List([1, 2, 3])) == List([3, 2, 1])

    assert reversed(List()) == List()


def test_inequality():
    assert List([1, 2]) != List([1, 3])
    assert List([1, 2]) != List([1, 2, 3])
    assert List() != List([1, 2, 3])


def test_repr():
    assert str(List()) == "List([])"
    assert str(List([1, 2, 3])) in "List([1, 2, 3])"


def test_hashing():
    o = object()

    assert hash(List([o, o])) == hash(List([o, o]))
    assert hash(List([o])) == hash(List([o]))
    assert hash(List()) == hash(List([]))
    assert not (hash(List([1, 2])) == hash(List([1, 3])))
    assert not (hash(List([1, 2])) == hash(List([2, 1])))
    assert not (hash(List([o])) == hash(List([o, o])))
    assert not (hash(List([])) == hash(List([o])))

    assert hash(List([1, 2])) != hash(List([1, 3]))
    assert hash(List([1, 2])) != hash(List([2, 1]))
    assert hash(List([o])) != hash(List([o, o]))
    assert hash(List([])) != hash(List([o]))
    assert not (hash(List([o, o])) != hash(List([o, o])))
    assert not (hash(List([o])) != hash(List([o])))
    assert not (hash(List([])) != hash(List([])))


def test_sequence():
    m = List("asdf")
    assert m == List(["a", "s", "d", "f"])


# Non-pyrsistent-test-suite tests


def test_drop_first():
    assert List([1, 2, 3]).drop_first() == List([2, 3])


def test_drop_first_empty():
    """
    rpds itself returns an Option<List> here but we try IndexError instead.
    """
    with pytest.raises(IndexError):
        List([]).drop_first()


def test_more_eq():
    o = object()

    assert List([o, o]) == List([o, o])
    assert List([o]) == List([o])
    assert List() == List([])
    assert not (List([1, 2]) == List([1, 3]))
    assert not (List([o]) == List([o, o]))
    assert not (List([]) == List([o]))

    assert List([1, 2]) != List([1, 3])
    assert List([o]) != List([o, o])
    assert List([]) != List([o])
    assert not (List([o, o]) != List([o, o]))
    assert not (List([o]) != List([o]))
    assert not (List() != List([]))


def test_pickle():
    assert pickle.loads(pickle.dumps(List([1, 2, 3, 4]))) == List([1, 2, 3, 4])
07070100000022000081A40000000000000000000000016750735100000E4A000000000000000000000000000000000000002300000000rpds.py-0.22.3/tests/test_queue.py"""
Modified from the pyrsistent test suite.

Pre-modification, these were MIT licensed, and are copyright:

    Copyright (c) 2022 Tobias Gustafsson

    Permission is hereby granted, free of charge, to any person
    obtaining a copy of this software and associated documentation
    files (the "Software"), to deal in the Software without
    restriction, including without limitation the rights to use,
    copy, modify, merge, publish, distribute, sublicense, and/or sell
    copies of the Software, and to permit persons to whom the
    Software is furnished to do so, subject to the following
    conditions:

    The above copyright notice and this permission notice shall be
    included in all copies or substantial portions of the Software.

    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
    OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
    HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
    WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
    OTHER DEALINGS IN THE SOFTWARE.
"""

import pytest

from rpds import Queue


def test_literalish_works():
    assert Queue(1, 2, 3) == Queue([1, 2, 3])


def test_peek_dequeue():
    pl = Queue([1, 2])
    assert pl.peek == 1
    assert pl.dequeue().peek == 2
    assert pl.dequeue().dequeue().is_empty
    with pytest.raises(IndexError):
        pl.dequeue().dequeue().dequeue()


def test_instantiate_large_list():
    assert Queue(range(1000)).peek == 0


def test_iteration():
    assert list(Queue()) == []
    assert list(Queue([1, 2, 3])) == [1, 2, 3]


def test_enqueue():
    assert Queue([1, 2, 3]).enqueue(4) == Queue([1, 2, 3, 4])


def test_enqueue_empty_list():
    assert Queue().enqueue(0) == Queue([0])


def test_truthiness():
    assert Queue([1])
    assert not Queue()


def test_len():
    assert len(Queue([1, 2, 3])) == 3
    assert len(Queue()) == 0


def test_peek_illegal_on_empty_list():
    with pytest.raises(IndexError):
        Queue().peek


def test_inequality():
    assert Queue([1, 2]) != Queue([1, 3])
    assert Queue([1, 2]) != Queue([1, 2, 3])
    assert Queue() != Queue([1, 2, 3])


def test_repr():
    assert str(Queue()) == "Queue([])"
    assert str(Queue([1, 2, 3])) in "Queue([1, 2, 3])"


def test_sequence():
    m = Queue("asdf")
    assert m == Queue(["a", "s", "d", "f"])


# Non-pyrsistent-test-suite tests


def test_dequeue():
    assert Queue([1, 2, 3]).dequeue() == Queue([2, 3])


def test_dequeue_empty():
    """
    rpds itself returns an Option<Queue> here but we try IndexError instead.
    """
    with pytest.raises(IndexError):
        Queue([]).dequeue()


def test_more_eq():
    o = object()

    assert Queue([o, o]) == Queue([o, o])
    assert Queue([o]) == Queue([o])
    assert Queue() == Queue([])
    assert not (Queue([1, 2]) == Queue([1, 3]))
    assert not (Queue([o]) == Queue([o, o]))
    assert not (Queue([]) == Queue([o]))

    assert Queue([1, 2]) != Queue([1, 3])
    assert Queue([o]) != Queue([o, o])
    assert Queue([]) != Queue([o])
    assert not (Queue([o, o]) != Queue([o, o]))
    assert not (Queue([o]) != Queue([o]))
    assert not (Queue() != Queue([]))


def test_hashing():
    assert hash(Queue([1, 2])) == hash(Queue([1, 2]))
    assert hash(Queue([1, 2])) != hash(Queue([2, 1]))
    assert len({Queue([1, 2]), Queue([1, 2])}) == 1


def test_unhashable_contents():
    q = Queue([1, {1}])
    with pytest.raises(TypeError):
        hash(q)
07070100000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000B00000000TRAILER!!!230 blocks
openSUSE Build Service is sponsored by