File sqlite3-to-mysql-2.1.7.obscpio of Package python-sqlite3-to-mysql
07070100000000000081A400000000000000000000000165A2FB8A00002011000000000000000000000000000000000000002300000000sqlite3-to-mysql-2.1.7/.bandit.yml### This config may optionally select a subset of tests to run or skip by
### filling out the 'tests' and 'skips' lists given below. If no tests are
### specified for inclusion then it is assumed all tests are desired. The skips
### set will remove specific tests from the include set. This can be controlled
### using the -t/-s CLI options. Note that the same test ID should not appear
### in both 'tests' and 'skips', this would be nonsensical and is detected by
### Bandit at runtime.
# Available tests:
# B101 : assert_used
# B102 : exec_used
# B103 : set_bad_file_permissions
# B104 : hardcoded_bind_all_interfaces
# B105 : hardcoded_password_string
# B106 : hardcoded_password_funcarg
# B107 : hardcoded_password_default
# B108 : hardcoded_tmp_directory
# B110 : try_except_pass
# B112 : try_except_continue
# B201 : flask_debug_true
# B301 : pickle
# B302 : marshal
# B303 : md5
# B304 : ciphers
# B305 : cipher_modes
# B306 : mktemp_q
# B307 : eval
# B308 : mark_safe
# B309 : httpsconnection
# B310 : urllib_urlopen
# B311 : random
# B312 : telnetlib
# B313 : xml_bad_cElementTree
# B314 : xml_bad_ElementTree
# B315 : xml_bad_expatreader
# B316 : xml_bad_expatbuilder
# B317 : xml_bad_sax
# B318 : xml_bad_minidom
# B319 : xml_bad_pulldom
# B320 : xml_bad_etree
# B321 : ftplib
# B322 : input
# B323 : unverified_context
# B324 : hashlib_new_insecure_functions
# B325 : tempnam
# B401 : import_telnetlib
# B402 : import_ftplib
# B403 : import_pickle
# B404 : import_subprocess
# B405 : import_xml_etree
# B406 : import_xml_sax
# B407 : import_xml_expat
# B408 : import_xml_minidom
# B409 : import_xml_pulldom
# B410 : import_lxml
# B411 : import_xmlrpclib
# B412 : import_httpoxy
# B413 : import_pycrypto
# B501 : request_with_no_cert_validation
# B502 : ssl_with_bad_version
# B503 : ssl_with_bad_defaults
# B504 : ssl_with_no_version
# B505 : weak_cryptographic_key
# B506 : yaml_load
# B507 : ssh_no_host_key_verification
# B601 : paramiko_calls
# B602 : subprocess_popen_with_shell_equals_true
# B603 : subprocess_without_shell_equals_true
# B604 : any_other_function_with_shell_equals_true
# B605 : start_process_with_a_shell
# B606 : start_process_with_no_shell
# B607 : start_process_with_partial_path
# B608 : hardcoded_sql_expressions
# B609 : linux_commands_wildcard_injection
# B610 : django_extra_used
# B611 : django_rawsql_used
# B701 : jinja2_autoescape_false
# B702 : use_of_mako_templates
# B703 : django_mark_safe
# (optional) list included test IDs here, eg '[B101, B406]':
tests:
# (optional) list skipped test IDs here, eg '[B101, B406]':
skips:
- B404
- B603
- B607
- B608
### (optional) plugin settings - some test plugins require configuration data
### that may be given here, per-plugin. All bandit test plugins have a built in
### set of sensible defaults and these will be used if no configuration is
### provided. It is not necessary to provide settings for every (or any) plugin
### if the defaults are acceptable.
any_other_function_with_shell_equals_true:
no_shell:
- os.execl
- os.execle
- os.execlp
- os.execlpe
- os.execv
- os.execve
- os.execvp
- os.execvpe
- os.spawnl
- os.spawnle
- os.spawnlp
- os.spawnlpe
- os.spawnv
- os.spawnve
- os.spawnvp
- os.spawnvpe
- os.startfile
shell:
- os.system
- os.popen
- os.popen2
- os.popen3
- os.popen4
- popen2.popen2
- popen2.popen3
- popen2.popen4
- popen2.Popen3
- popen2.Popen4
- commands.getoutput
- commands.getstatusoutput
subprocess:
- subprocess.Popen
- subprocess.call
- subprocess.check_call
- subprocess.check_output
- subprocess.run
hardcoded_tmp_directory:
tmp_dirs:
- /tmp
- /var/tmp
- /dev/shm
linux_commands_wildcard_injection:
no_shell:
- os.execl
- os.execle
- os.execlp
- os.execlpe
- os.execv
- os.execve
- os.execvp
- os.execvpe
- os.spawnl
- os.spawnle
- os.spawnlp
- os.spawnlpe
- os.spawnv
- os.spawnve
- os.spawnvp
- os.spawnvpe
- os.startfile
shell:
- os.system
- os.popen
- os.popen2
- os.popen3
- os.popen4
- popen2.popen2
- popen2.popen3
- popen2.popen4
- popen2.Popen3
- popen2.Popen4
- commands.getoutput
- commands.getstatusoutput
subprocess:
- subprocess.Popen
- subprocess.call
- subprocess.check_call
- subprocess.check_output
- subprocess.run
ssl_with_bad_defaults:
bad_protocol_versions:
- PROTOCOL_SSLv2
- SSLv2_METHOD
- SSLv23_METHOD
- PROTOCOL_SSLv3
- PROTOCOL_TLSv1
- SSLv3_METHOD
- TLSv1_METHOD
ssl_with_bad_version:
bad_protocol_versions:
- PROTOCOL_SSLv2
- SSLv2_METHOD
- SSLv23_METHOD
- PROTOCOL_SSLv3
- PROTOCOL_TLSv1
- SSLv3_METHOD
- TLSv1_METHOD
start_process_with_a_shell:
no_shell:
- os.execl
- os.execle
- os.execlp
- os.execlpe
- os.execv
- os.execve
- os.execvp
- os.execvpe
- os.spawnl
- os.spawnle
- os.spawnlp
- os.spawnlpe
- os.spawnv
- os.spawnve
- os.spawnvp
- os.spawnvpe
- os.startfile
shell:
- os.system
- os.popen
- os.popen2
- os.popen3
- os.popen4
- popen2.popen2
- popen2.popen3
- popen2.popen4
- popen2.Popen3
- popen2.Popen4
- commands.getoutput
- commands.getstatusoutput
subprocess:
- subprocess.Popen
- subprocess.call
- subprocess.check_call
- subprocess.check_output
- subprocess.run
start_process_with_no_shell:
no_shell:
- os.execl
- os.execle
- os.execlp
- os.execlpe
- os.execv
- os.execve
- os.execvp
- os.execvpe
- os.spawnl
- os.spawnle
- os.spawnlp
- os.spawnlpe
- os.spawnv
- os.spawnve
- os.spawnvp
- os.spawnvpe
- os.startfile
shell:
- os.system
- os.popen
- os.popen2
- os.popen3
- os.popen4
- popen2.popen2
- popen2.popen3
- popen2.popen4
- popen2.Popen3
- popen2.Popen4
- commands.getoutput
- commands.getstatusoutput
subprocess:
- subprocess.Popen
- subprocess.call
- subprocess.check_call
- subprocess.check_output
- subprocess.run
start_process_with_partial_path:
no_shell:
- os.execl
- os.execle
- os.execlp
- os.execlpe
- os.execv
- os.execve
- os.execvp
- os.execvpe
- os.spawnl
- os.spawnle
- os.spawnlp
- os.spawnlpe
- os.spawnv
- os.spawnve
- os.spawnvp
- os.spawnvpe
- os.startfile
shell:
- os.system
- os.popen
- os.popen2
- os.popen3
- os.popen4
- popen2.popen2
- popen2.popen3
- popen2.popen4
- popen2.Popen3
- popen2.Popen4
- commands.getoutput
- commands.getstatusoutput
subprocess:
- subprocess.Popen
- subprocess.call
- subprocess.check_call
- subprocess.check_output
- subprocess.run
subprocess_popen_with_shell_equals_true:
no_shell:
- os.execl
- os.execle
- os.execlp
- os.execlpe
- os.execv
- os.execve
- os.execvp
- os.execvpe
- os.spawnl
- os.spawnle
- os.spawnlp
- os.spawnlpe
- os.spawnv
- os.spawnve
- os.spawnvp
- os.spawnvpe
- os.startfile
shell:
- os.system
- os.popen
- os.popen2
- os.popen3
- os.popen4
- popen2.popen2
- popen2.popen3
- popen2.popen4
- popen2.Popen3
- popen2.Popen4
- commands.getoutput
- commands.getstatusoutput
subprocess:
- subprocess.Popen
- subprocess.call
- subprocess.check_call
- subprocess.check_output
- subprocess.run
subprocess_without_shell_equals_true:
no_shell:
- os.execl
- os.execle
- os.execlp
- os.execlpe
- os.execv
- os.execve
- os.execvp
- os.execvpe
- os.spawnl
- os.spawnle
- os.spawnlp
- os.spawnlpe
- os.spawnv
- os.spawnve
- os.spawnvp
- os.spawnvpe
- os.startfile
shell:
- os.system
- os.popen
- os.popen2
- os.popen3
- os.popen4
- popen2.popen2
- popen2.popen3
- popen2.popen4
- popen2.Popen3
- popen2.Popen4
- commands.getoutput
- commands.getstatusoutput
subprocess:
- subprocess.Popen
- subprocess.call
- subprocess.check_call
- subprocess.check_output
- subprocess.run
try_except_continue:
check_typed_exception: false
try_except_pass:
check_typed_exception: false
weak_cryptographic_key:
weak_key_size_dsa_high: 1024
weak_key_size_dsa_medium: 2048
weak_key_size_ec_high: 160
weak_key_size_ec_medium: 224
weak_key_size_rsa_high: 1024
weak_key_size_rsa_medium: 2048
07070100000001000081A400000000000000000000000165A2FB8A0000006E000000000000000000000000000000000000001F00000000sqlite3-to-mysql-2.1.7/.flake8[flake8]
ignore = I100,I201,I202,D203,D401,W503,E203,F401,F403,C901,E501
exclude = tests
max-line-length = 88
07070100000002000041ED00000000000000000000000265A2FB8A00000000000000000000000000000000000000000000001F00000000sqlite3-to-mysql-2.1.7/.github07070100000003000081A400000000000000000000000165A2FB8A00000038000000000000000000000000000000000000002B00000000sqlite3-to-mysql-2.1.7/.github/FUNDING.ymlgithub: techouse
custom: [ "https://paypal.me/ktusar" ]
07070100000004000041ED00000000000000000000000265A2FB8A00000000000000000000000000000000000000000000002E00000000sqlite3-to-mysql-2.1.7/.github/ISSUE_TEMPLATE07070100000005000081A400000000000000000000000165A2FB8A000002BF000000000000000000000000000000000000003C00000000sqlite3-to-mysql-2.1.7/.github/ISSUE_TEMPLATE/bug_report.md---
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug
assignees: techouse
---
**Describe the bug**
A clear and concise description of what the bug is.
**Expected behaviour**
What you expected.
**Actual result**
What happened instead.
**System Information**
```bash
$ sqlite3mysql --version
```
```
<paste here>
```
This command is only available on v1.3.6 and greater. Otherwise, please provide some basic information about your system (Python version, operating system, etc.).
**Additional context**
Add any other context about the problem here.
In case of errors please run the same command with `--debug`. This option is only available on v1.4.12 or greater.
07070100000006000081A400000000000000000000000165A2FB8A00000262000000000000000000000000000000000000004100000000sqlite3-to-mysql-2.1.7/.github/ISSUE_TEMPLATE/feature_request.md---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: enhancement
assignees: techouse
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.
07070100000007000081A400000000000000000000000165A2FB8A000001F6000000000000000000000000000000000000002E00000000sqlite3-to-mysql-2.1.7/.github/dependabot.yml# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "pip" # See documentation for possible values
directory: "/" # Location of package manifests
schedule:
interval: "weekly"
07070100000008000041ED00000000000000000000000265A2FB8A00000000000000000000000000000000000000000000002900000000sqlite3-to-mysql-2.1.7/.github/workflows07070100000009000081A400000000000000000000000165A2FB8A0000026C000000000000000000000000000000000000003D00000000sqlite3-to-mysql-2.1.7/.github/workflows/codeql-analysis.ymlname: "CodeQL"
on:
push:
branches: [ master ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ master ]
schedule:
- cron: '0 12 * * 6'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
language: [ 'python' ]
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
0707010000000A000081A400000000000000000000000165A2FB8A00000666000000000000000000000000000000000000003400000000sqlite3-to-mysql-2.1.7/.github/workflows/docker.ymlname: Publish Docker image
on:
workflow_call:
defaults:
run:
shell: bash
jobs:
push_to_registry:
name: Push Docker image to Docker Hub
runs-on: ubuntu-latest
permissions:
packages: write
contents: read
environment:
name: docker
url: https://hub.docker.com/r/${{ vars.DOCKERHUB_REPOSITORY }}
steps:
- name: Check out the repo
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: |
${{ vars.DOCKERHUB_REPOSITORY }}
ghcr.io/${{ github.repository }}
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}.{{patch}}
- name: Log in to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Log in to the Container registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v5
with:
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
0707010000000B000081A400000000000000000000000165A2FB8A00000C84000000000000000000000000000000000000003500000000sqlite3-to-mysql-2.1.7/.github/workflows/publish.ymlname: Publish package
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+*'
defaults:
run:
shell: bash
permissions: read-all
jobs:
build-and-publish:
runs-on: ubuntu-latest
environment:
name: pypi
url: https://pypi.org/p/sqlite3-to-mysql
permissions:
id-token: write
contents: write
steps:
- uses: actions/checkout@v4
- name: Compare package version with ref/tag
id: compare
run: |
set -e
VERSION=$(awk -F'"' '/__version__/ {print $2}' sqlite3_to_mysql/__init__.py)
TAG=${GITHUB_REF_NAME#v}
if [[ "$VERSION" != "$TAG" ]]; then
echo "Version in sqlite3_to_mysql/__version__.py ($VERSION) does not match tag ($TAG)"
exit 1
fi
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Set up Python
id: setup_python
uses: actions/setup-python@v4
with:
python-version: "3.x"
- name: Install build dependencies
id: install_build_dependencies
run: |
set -e
python3 -m pip install --upgrade pip
pip install build setuptools wheel
- name: Build a binary wheel and a source tarball
id: build
run: |
set -e
python3 -m build --sdist --wheel --outdir dist/ .
- name: Publish distribution package to Test PyPI
id: publish_test
uses: pypa/gh-action-pypi-publish@release/v1
with:
repository-url: https://test.pypi.org/legacy/
- name: Publish distribution package to PyPI
id: publish
if: startsWith(github.ref, 'refs/tags')
uses: pypa/gh-action-pypi-publish@release/v1
- name: Install pyproject-parser
id: install_pyproject_parser
run: |
set -e
pip install pyproject-parser[cli]
- name: Read project name from pyproject.toml
id: read_project_name
run: |
set -e
NAME=$(pyproject-parser info project.name -r | tr -d '"')
echo "NAME=$NAME" >> $GITHUB_ENV
- name: Create tag-specific CHANGELOG
id: create_changelog
run: |
set -e
CHANGELOG_PATH=$RUNNER_TEMP/CHANGELOG.md
awk '/^#[[:space:]].*/ { if (count == 1) exit; count++; print } count == 1 && !/^#[[:space:]].*/ { print }' CHANGELOG.md | sed -e :a -e '/^\n*$/{$d;N;ba' -e '}' > $CHANGELOG_PATH
echo -en "\n[https://pypi.org/project/$NAME/$VERSION/](https://pypi.org/project/$NAME/$VERSION/)" >> $CHANGELOG_PATH
echo "CHANGELOG_PATH=$CHANGELOG_PATH" >> $GITHUB_ENV
- name: Github Release
id: github_release
uses: softprops/action-gh-release@v1
with:
name: ${{ env.VERSION }}
tag_name: ${{ github.ref }}
body_path: ${{ env.CHANGELOG_PATH }}
files: |
dist/*.whl
dist/*.tar.gz
- name: Cleanup
if: ${{ always() }}
run: |
rm -rf dist
rm -rf $CHANGELOG_PATH
docker:
needs: build-and-publish
permissions:
packages: write
contents: read
uses: ./.github/workflows/docker.yml
secrets: inherit0707010000000C000081A400000000000000000000000165A2FB8A000035E3000000000000000000000000000000000000003200000000sqlite3-to-mysql-2.1.7/.github/workflows/test.ymlname: Test
on:
push:
branches:
- master
pull_request:
branches:
- master
defaults:
run:
shell: bash
permissions: read-all
jobs:
analyze:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.x"
- name: Install dependencies
run: |
python3 -m pip install --upgrade pip
pip install -r requirements_dev.txt
- name: Run static analysis
run: tox -e linters
test:
needs: analyze
runs-on: ubuntu-latest
strategy:
matrix:
include:
- toxenv: "python3.8"
db: "mariadb:5.5"
legacy_db: 1
experimental: false
py: "3.8"
- toxenv: "python3.9"
db: "mariadb:5.5"
legacy_db: 1
experimental: false
py: "3.9"
- toxenv: "python3.10"
db: "mariadb:5.5"
legacy_db: 1
experimental: false
py: "3.10"
- toxenv: "python3.11"
db: "mariadb:5.5"
legacy_db: 1
experimental: false
py: "3.11"
- toxenv: "python3.12"
db: "mariadb:5.5"
legacy_db: 1
experimental: false
py: "3.12"
- toxenv: "python3.8"
db: "mariadb:10.0"
legacy_db: 1
experimental: false
py: "3.8"
- toxenv: "python3.9"
db: "mariadb:10.0"
legacy_db: 1
experimental: false
py: "3.9"
- toxenv: "python3.10"
db: "mariadb:10.0"
legacy_db: 1
experimental: false
py: "3.10"
- toxenv: "python3.11"
db: "mariadb:10.0"
legacy_db: 1
experimental: false
py: "3.11"
- toxenv: "python3.12"
db: "mariadb:10.0"
legacy_db: 1
experimental: false
py: "3.12"
- toxenv: "python3.8"
db: "mariadb:10.1"
legacy_db: 1
experimental: false
py: "3.8"
- toxenv: "python3.9"
db: "mariadb:10.1"
legacy_db: 1
experimental: false
py: "3.9"
- toxenv: "python3.10"
db: "mariadb:10.1"
legacy_db: 1
experimental: false
py: "3.10"
- toxenv: "python3.11"
db: "mariadb:10.1"
legacy_db: 1
experimental: false
py: "3.11"
- toxenv: "python3.12"
db: "mariadb:10.1"
legacy_db: 1
experimental: false
py: "3.12"
- toxenv: "python3.8"
db: "mariadb:10.2"
legacy_db: 0
experimental: false
py: "3.8"
- toxenv: "python3.9"
db: "mariadb:10.2"
legacy_db: 0
experimental: false
py: "3.9"
- toxenv: "python3.10"
db: "mariadb:10.2"
legacy_db: 0
experimental: false
py: "3.10"
- toxenv: "python3.11"
db: "mariadb:10.2"
legacy_db: 0
experimental: false
py: "3.11"
- toxenv: "python3.12"
db: "mariadb:10.2"
legacy_db: 0
experimental: false
py: "3.12"
- toxenv: "python3.8"
db: "mariadb:10.3"
legacy_db: 0
experimental: false
py: "3.8"
- toxenv: "python3.9"
db: "mariadb:10.3"
legacy_db: 0
experimental: false
py: "3.9"
- toxenv: "python3.10"
db: "mariadb:10.3"
legacy_db: 0
experimental: false
py: "3.10"
- toxenv: "python3.11"
db: "mariadb:10.3"
legacy_db: 0
experimental: false
py: "3.11"
- toxenv: "python3.12"
db: "mariadb:10.3"
legacy_db: 0
experimental: false
py: "3.12"
- toxenv: "python3.8"
db: "mariadb:10.4"
legacy_db: 0
experimental: false
py: "3.8"
- toxenv: "python3.9"
db: "mariadb:10.4"
legacy_db: 0
experimental: false
py: "3.9"
- toxenv: "python3.10"
db: "mariadb:10.4"
legacy_db: 0
experimental: false
py: "3.10"
- toxenv: "python3.11"
db: "mariadb:10.4"
legacy_db: 0
experimental: false
py: "3.11"
- toxenv: "python3.12"
db: "mariadb:10.4"
legacy_db: 0
experimental: false
py: "3.12"
- toxenv: "python3.8"
db: "mariadb:10.5"
legacy_db: 0
experimental: false
py: "3.8"
- toxenv: "python3.9"
db: "mariadb:10.5"
legacy_db: 0
experimental: false
py: "3.9"
- toxenv: "python3.10"
db: "mariadb:10.5"
legacy_db: 0
experimental: false
py: "3.10"
- toxenv: "python3.11"
db: "mariadb:10.5"
legacy_db: 0
experimental: false
py: "3.11"
- toxenv: "python3.12"
db: "mariadb:10.5"
legacy_db: 0
experimental: false
py: "3.12"
- toxenv: "python3.8"
db: "mariadb:10.6"
legacy_db: 0
experimental: false
py: "3.8"
- toxenv: "python3.9"
db: "mariadb:10.6"
legacy_db: 0
experimental: false
py: "3.9"
- toxenv: "python3.10"
db: "mariadb:10.6"
legacy_db: 0
experimental: false
py: "3.10"
- toxenv: "python3.11"
db: "mariadb:10.6"
legacy_db: 0
experimental: false
py: "3.11"
- toxenv: "python3.12"
db: "mariadb:10.6"
legacy_db: 0
experimental: false
py: "3.12"
- toxenv: "python3.8"
db: "mariadb:10.11"
legacy_db: 0
experimental: false
py: "3.8"
- toxenv: "python3.9"
db: "mariadb:10.11"
legacy_db: 0
experimental: false
py: "3.9"
- toxenv: "python3.10"
db: "mariadb:10.11"
legacy_db: 0
experimental: false
py: "3.10"
- toxenv: "python3.11"
db: "mariadb:10.11"
legacy_db: 0
experimental: false
py: "3.11"
- toxenv: "python3.12"
db: "mariadb:10.11"
legacy_db: 0
experimental: false
py: "3.12"
- toxenv: "python3.8"
db: "mysql:5.5"
legacy_db: 1
experimental: false
py: "3.8"
- toxenv: "python3.9"
db: "mysql:5.5"
legacy_db: 1
experimental: false
py: "3.9"
- toxenv: "python3.10"
db: "mysql:5.5"
legacy_db: 1
experimental: false
py: "3.10"
- toxenv: "python3.11"
db: "mysql:5.5"
legacy_db: 1
experimental: false
py: "3.11"
- toxenv: "python3.12"
db: "mysql:5.5"
legacy_db: 1
experimental: false
py: "3.12"
- toxenv: "python3.8"
db: "mysql:5.6"
legacy_db: 1
experimental: false
py: "3.8"
- toxenv: "python3.9"
db: "mysql:5.6"
legacy_db: 1
experimental: false
py: "3.9"
- toxenv: "python3.10"
db: "mysql:5.6"
legacy_db: 1
experimental: false
py: "3.10"
- toxenv: "python3.11"
db: "mysql:5.6"
legacy_db: 1
experimental: false
py: "3.11"
- toxenv: "python3.12"
db: "mysql:5.6"
legacy_db: 1
experimental: false
py: "3.12"
- toxenv: "python3.8"
db: "mysql:5.7"
legacy_db: 0
experimental: false
py: "3.8"
- toxenv: "python3.9"
db: "mysql:5.7"
legacy_db: 0
experimental: false
py: "3.9"
- toxenv: "python3.10"
db: "mysql:5.7"
legacy_db: 0
experimental: false
py: "3.10"
- toxenv: "python3.11"
db: "mysql:5.7"
legacy_db: 0
experimental: false
py: "3.11"
- toxenv: "python3.12"
db: "mysql:5.7"
legacy_db: 0
experimental: false
py: "3.12"
- toxenv: "python3.8"
db: "mysql:8.0"
legacy_db: 0
experimental: false
py: "3.8"
- toxenv: "python3.9"
db: "mysql:8.0"
legacy_db: 0
experimental: false
py: "3.9"
- toxenv: "python3.10"
db: "mysql:8.0"
legacy_db: 0
experimental: false
py: "3.10"
- toxenv: "python3.11"
db: "mysql:8.0"
legacy_db: 0
experimental: false
py: "3.11"
- toxenv: "python3.12"
db: "mysql:8.0"
legacy_db: 0
experimental: false
py: "3.12"
continue-on-error: ${{ matrix.experimental }}
services:
mysql:
image: "${{ matrix.db }}"
ports:
- 3306:3306
env:
MYSQL_ALLOW_EMPTY_PASSWORD: yes
options: "--name=mysqld"
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.py }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.py }}
- uses: actions/cache@v3
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-1
restore-keys: |
${{ runner.os }}-pip-
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install -U codecov tox-gh-actions
pip install -r requirements_dev.txt
- name: Set up MySQL
env:
DB: ${{ matrix.db }}
MYSQL_USER: ${{ secrets.MYSQL_USER }}
MYSQL_PASSWORD: ${{ secrets.MYSQL_PASSWORD }}
MYSQL_DATABASE: ${{ vars.MYSQL_DATABASE }}
MYSQL_HOST: ${{ vars.MYSQL_HOST }}
MYSQL_PORT: ${{ vars.MYSQL_PORT }}
run: |
set -e
while :
do
sleep 1
mysql -h127.0.0.1 -uroot -e 'select version()' && break
done
if [ "$DB" == 'mysql:8.0' ]; then
WITH_PLUGIN='with mysql_native_password'
mysql -h127.0.0.1 -uroot -e "SET GLOBAL local_infile=on"
docker cp mysqld:/var/lib/mysql/public_key.pem "${HOME}"
docker cp mysqld:/var/lib/mysql/ca.pem "${HOME}"
docker cp mysqld:/var/lib/mysql/server-cert.pem "${HOME}"
docker cp mysqld:/var/lib/mysql/client-key.pem "${HOME}"
docker cp mysqld:/var/lib/mysql/client-cert.pem "${HOME}"
mysql -uroot -h127.0.0.1 -e '
CREATE USER
user_sha256 IDENTIFIED WITH "sha256_password" BY "pass_sha256",
nopass_sha256 IDENTIFIED WITH "sha256_password",
user_caching_sha2 IDENTIFIED WITH "caching_sha2_password" BY "pass_caching_sha2",
nopass_caching_sha2 IDENTIFIED WITH "caching_sha2_password"
PASSWORD EXPIRE NEVER;'
mysql -uroot -h127.0.0.1 -e 'GRANT RELOAD ON *.* TO user_caching_sha2;'
else
WITH_PLUGIN=''
fi
mysql -h127.0.0.1 -uroot -e "create database $MYSQL_DATABASE DEFAULT CHARACTER SET utf8mb4"
mysql -h127.0.0.1 -uroot -e "create user $MYSQL_USER identified $WITH_PLUGIN by '${MYSQL_PASSWORD}'; grant all on ${MYSQL_DATABASE}.* to ${MYSQL_USER};"
mysql -h127.0.0.1 -uroot -e "create user ${MYSQL_USER}@localhost identified $WITH_PLUGIN by '${MYSQL_PASSWORD}'; grant all on ${MYSQL_DATABASE}.* to ${MYSQL_USER}@localhost;"
- name: Create db_credentials.json
env:
MYSQL_USER: ${{ secrets.MYSQL_USER }}
MYSQL_PASSWORD: ${{ secrets.MYSQL_PASSWORD }}
MYSQL_DATABASE: ${{ vars.MYSQL_DATABASE }}
MYSQL_HOST: ${{ vars.MYSQL_HOST }}
MYSQL_PORT: ${{ vars.MYSQL_PORT }}
run: |
set -e
jq -n \
--arg mysql_user "$MYSQL_USER" \
--arg mysql_password "$MYSQL_PASSWORD" \
--arg mysql_database "$MYSQL_DATABASE" \
--arg mysql_host "$MYSQL_HOST" \
--arg mysql_port $MYSQL_PORT \
'$ARGS.named' > tests/db_credentials.json
- name: Test with tox
env:
LEGACY_DB: ${{ matrix.legacy_db }}
run: tox
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
files: ./coverage.xml
env_vars: OS,PYTHON
verbose: true
- name: Cleanup
if: ${{ always() }}
run: |
rm -rf tests/db_credentials.json
0707010000000D000081A400000000000000000000000165A2FB8A00000527000000000000000000000000000000000000002200000000sqlite3-to-mysql-2.1.7/.gitignore# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
# PyCharm specific files
.idea
# macOS specific
.DS_Store
# Potential leftovers
tests/db_credentials.json
test.db0707010000000E000081A400000000000000000000000165A2FB8A00001F1C000000000000000000000000000000000000002400000000sqlite3-to-mysql-2.1.7/CHANGELOG.md# 2.1.6
* [FEAT] build both linux/amd64 and linux/arm64 Docker images
# 2.1.5
* [FEAT] add support for UNSIGNED numeric data type conversion
* [FIX] fix invalid column_type error message
# 2.1.4
* [CHORE] maintenance release to publish first containerized release
# 2.1.3
* [FIX] add packaging as a dependency
# 2.1.2
* [FIX] throw more comprehensive error messages when translating column types
# 2.1.1
* [CHORE] add support for Python 3.12
* [CHORE] bump minimum version of MySQL Connector/Python to 8.2.0
# 2.1.0
* [CHORE] drop support for Python 3.7
# 2.0.3
* [FIX] prevent AUTO_INCREMENT-ing fields from having a DEFAULT value
# 2.0.2
* [FIX] properly import CMySQLConnection
# 2.0.1
* [FIX] fix types
# 2.0.0
* [CHORE] drop support for Python 2.7, 3.5 and 3.6
* [CHORE] migrate pytest.ini configuration into pyproject.toml
* [CHORE] migrate from setuptools to hatch / hatchling
* [CHORE] add types
* [CHORE] add types to tests
* [CHORE] update dependencies
* [CHORE] use f-strings where appropriate
# 1.4.20
* [CHORE] update dependencies
* [CHORE] use [black](https://github.com/psf/black) and [isort](https://github.com/PyCQA/isort) in tox linters
# 1.4.19
* [FEAT] handle generated columns
# 1.4.18
* [CHORE] migrate from setup.py to pyproject.toml
# 1.4.17
* [CHORE] add publishing workflow
* [CHORE] add Python 3.11 support
* [CHORE] Remove CI tests for Python 3.5, 3.6, add CI tests for Python 3.11
* [CHORE] add MariaDB 10.11 CI tests
# 1.4.16
* [FIX] pin mysql-connector-python to <8.0.30
* [CHORE] update CI actions/checkout to v3
* [CHORE] update CI actions/setup-python to v4
* [CHORE] update CI actions/cache to v3
* [CHORE] update CI github/codeql-action/init to v2
* [CHORE] update CI github/codeql-action/analyze to v2
* [CHORE] update CI codecov/codecov-action to v2
# 1.4.15
* [FEAT] add option to truncate existing tables before inserting data
# 1.4.14
* [FIX] fix safe_identifier_length
# 1.4.13
* [FEAT] add option to update duplicate records
* [FEAT] add option to skip duplicate index creation if key name already exists
* [CHORE] mark test_quiet with xfail
* [CHORE] fix CLI test
* [CHORE] remove Fix MySQL GA Github Action step
# 1.4.12
* [FEAT] add --debug switch
* [FIX] import backports-datetime-fromisoformat only for Python 3.4, 3.5 and 3.6
* [FIX] handle SQLite date conversion
# 1.4.11
* [FIX] fix regression introduced in v1.4.9
# 1.4.10
* [FEAT] add ability to change default text type using --mysql-text-type
* [FIX] fix BOOLEAN conversion to TINYINT(1)
# 1.4.9
* [FEAT] add support for DEFAULT statements
# 1.4.8
* [CHORE] fix tests
# 1.4.7
* [CHORE] add support for Python 3.10
* [CHORE] add Python 3.10 tests
# 1.4.6
* [FEAT] add CLI options for custom charset and collation
* [FEAT] add unicase custom collation
* [FIX] limit MySQL identifier to 64 characters
* [FIX] handle multiple column FULLTEXT index transfer error
* [FIX] fix multiple column index length #28
* [CHORE] move some MySQL helper methods out of the main transporter
* [CHORE] refactor package
* [CHORE] add experimental tests for Python 3.10-dev
# 1.4.5
* [FIX] revert change introduced in v1.4.4
* [FIX] fix Click 8.0 OptionEatAll wrong type
* [CHORE] add tests for MariaDB 10.6
# 1.4.4
* [FIX] pin Click to <8.0
# 1.4.3
* [FIX] pin python-tabulate to <0.8.6 for Python 3.4 or less
* [FIX] pin Click to <8.0 only for Python 3.5 or less
# 1.4.2
* [FIX] fix auto_increment
* [CHORE] add DECIMAL test
* [FIX] pin Click to <8.0
# 1.4.1
* [FIX] pin mysql-connector-python to <8.0.24 for Python 3.5 or lower
# 1.4.0
* [FEAT] add password prompt. This changes the default behavior of -p
* [FEAT] add option to disable MySQL connection encryption
* [FEAT] add progress bar
* [FEAT] implement feature to transport custom data types as strings
* [FIX] require sqlalchemy <1.4.0 to make compatible with sqlalchemy-utils
* [CHORE] fix CI tests
# 1.3.12
* [FIX] handle duplicate indices
* [CHORE] transition from Travis CI to GitHub Actions
# 1.3.11
* [CHORE] add Python 3.9 tests
# 1.3.10
* [FEAT] add --use-fulltext option
* [FIX] use FULLTEXT index only if all columns are TEXT
# 1.3.9
* [FEAT] add --quiet option
# 1.3.8
* [FIX] test for mysql client more gracefully
# 1.3.7
* [FEAT] transfer composite primary keys
# 1.3.6
* [FEAT] simpler access to the debug version info using the --version switch
* [FEAT] add debug_info module to be used in bug reports
* [CHORE] use pytest fixture fom Faker 4.1.0 in Python 3 tests
* [CHORE] omit debug_info.py in coverage reports
# 1.3.5
* [FEAT] set default collation of newly created databases and tables to utf8mb4_general_ci
* [FEAT] optional transfer of implicit column rowid using --with-rowid
* [CHORE] test non-numeric primary keys
* [CHORE] add rowid transfer tests
* [CHORE] fix tests
# 1.3.4
* [FIX] fix information_schema issue introduced with MySQL 8.0.21
* [FIX] sqlalchemy-utils dropped Python 2.7 support in v0.36.7
* [CHORE] add MySQL version output to CI tests
* [CHORE] add Python 3.9 to the CI tests
* [CHORE] add MariaDB 10.5 to the CI tests
* [CHORE] remove Python 2.7 from allowed CI test failures
* [CHORE] use Ubuntu Bionic instead of Ubuntu Xenial in CI tests
# 1.3.3
* [FEAT] add support for SQLite STRING and translate it as MySQL TEXT
# 1.3.2
* [FIX] force not null on primary-key columns
# 1.3.1
* [CHORE] test legacy databases in CI tests
* [CHORE] fix MySQL 8 CI tests
# 1.3.0
* [FEAT] add option to transfer only specific tables using -t
* [CHORE] add tests for transferring only certain tables
# 1.2.17
* [FIX] properly escape foreign keys names
# 1.2.16
* [FIX] differentiate better between MySQL and SQLite errors
* [CHORE] add Python 3.8 and 3.8-dev test build
# 1.2.15
* [CHORE] update Readme on PyPI
# 1.2.14
* [FIX] add INT64 as an alias for NUMERIC
* [CHORE] add support for Python 3.8
* [CHORE] add INT64 tests
# 1.2.13
* [CHORE] add [bandit](https://github.com/PyCQA/bandit) tests
* [CHORE] add more tests to increase test coverage
* [CHORE] fix tests
# 1.2.12
* [FEAT] transfer indices
* [CHORE] add additional index transfer tests
# 1.2.11
* [FIX] remove redundant SQL cleanup
* [CHORE] clean up a test
# 1.2.10
* [CHORE] update development requirements
# 1.2.9
* [FIX] change the way foreign keys are added.
* [FIX] change default MySQL character set to utf8mb4
* [CHORE] add more verbosity
# 1.2.8
* [FIX] disable FOREIGN_KEY_CHECKS before inserting the foreign keys and enable FOREIGN_KEY_CHECKS back once finished
# 1.2.7
* [FEAT] transfer foreign keys
* [FIX] in Python 2 MySQL binary protocol can not handle 'buffer' objects so we have to convert them to strings
* [CHORE] test transfer of foreign keys
* [CHORE] only test databases that support JSON
* [CHORE] fix tests
# 1.2.6
* [CHORE] refactor package
# 1.2.5
* [CHORE] update Readme
# 1.2.4
* [CHORE] fix CI tests
* [CHORE] add linter rules
# 1.2.3
* [CHORE] refactor package
* [CHORE] test the CLI interface
* [CHORE] fix tests
# 1.2.2
* [FEAT] add Python 2.7 support
* [CHORE] refactor package
* [CHORE] fix tests
* [CHORE] add option to test against a real SQLite file
# 1.2.1
* [FIX] catch exceptions
* [FIX] default mysql_string_type from VARCHAR(300) to VARCHAR(255)
* [CHORE] fix CI tests
* [CHORE] option to run tests against a physical MySQL server instance as well as a Docker one
* [CHORE] run tests against any Docker image with a MySQL/MariaDB database
* [CHORE] clean up hanged Docker images with the name "pytest_sqlite3_to_mysql"
* [CHORE] 100% code coverage
# 1.2.0
* [CHORE] add more tests
# 1.1.2
* [FIX] fix creation of tables with non-numeric primary keys
# 1.1.1
* [FIX] fix error of transferring empty tables
# 1.1.0
* [CHORE] update to work with MySQL Connector/Python v8.0.11+
# 1.0.3
* [FIX] don't autoincrement if primary key is TEXT/VARCHAR
# 1.0.2
* [CHORE] refactor package
# 1.0.1
* [CHORE] change license from GPL to MIT
# 1.0.0
Initial commit0707010000000F000081A400000000000000000000000165A2FB8A00001589000000000000000000000000000000000000002A00000000sqlite3-to-mysql-2.1.7/CODE-OF-CONDUCT.md# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, caste, color, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
[techouse@gmail.com](mailto:techouse@gmail.com).
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.1, available at
[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].
Community Impact Guidelines were inspired by
[Mozilla's code of conduct enforcement ladder][Mozilla CoC].
For answers to common questions about this code of conduct, see the FAQ at
[https://www.contributor-covenant.org/faq][FAQ]. Translations are available
at [https://www.contributor-covenant.org/translations][translations].
[homepage]: https://www.contributor-covenant.org
[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html
[Mozilla CoC]: https://github.com/mozilla/diversity
[FAQ]: https://www.contributor-covenant.org/faq
[translations]: https://www.contributor-covenant.org/translations
07070100000010000081A400000000000000000000000165A2FB8A00000BE4000000000000000000000000000000000000002700000000sqlite3-to-mysql-2.1.7/CONTRIBUTING.md# Contributing
I greatly appreciate your interest in reading this message, as this project requires volunteer developers to assist
in developing and maintaining it.
Before making any changes to this repository, please first discuss the proposed modifications with the repository owners
through an issue, email, or any other appropriate communication channel.
Please be aware that a [code of conduct](CODE-OF-CONDUCT.md) is in place, and should be adhered to during all
interactions related to the project.
## Python version support
Ensuring backward compatibility is an imperative requirement.
Currently, the tool supports Python versions 3.8, 3.9, 3.10, 3.11, and 3.12.
## MySQL version support
This tool is intended to fully support MySQL versions 5.5, 5.6, 5.7, and 8.0, including major forks like MariaDB.
We should prioritize and be dedicated to maintaining compatibility with these versions for a smooth user experience.
## Testing
As this project/tool involves the critical process of transferring data between different database types, it is of
utmost importance to ensure thorough testing. Please remember to write tests for any new code you create, utilizing the
[pytest](https://docs.pytest.org/en/latest/) framework for all test cases.
### Running the test suite
In order to run the test suite run these commands using a Docker MySQL image.
**Requires a running Docker instance!**
```bash
git clone https://github.com/techouse/sqlite3-to-mysql
cd sqlite3-to-mysql
python3 -m venv env
source env/bin/activate
pip install -e .
pip install -r requirements_dev.txt
tox
```
## Submitting changes
To contribute to this project, please submit a
new [pull request](https://github.com/techouse/sqlite3-to-mysql/pull/new/master) and provide a clear list of your
modifications. For guidance on creating pull requests, you can refer
to [this resource](http://help.github.com/pull-requests/).
When sending a pull request, we highly appreciate the inclusion of [pytest](https://docs.pytest.org/en/latest/) tests,
as we strive to enhance our test coverage. Following our coding conventions is essential, and it would be ideal if you
ensure that each commit focuses on a single feature.
For commits, please write clear log messages. While concise one-line messages are suitable for small changes, more
substantial modifications should follow a format similar to the example below:
```bash
git commit -m "A brief summary of the commit
>
> A paragraph describing what changed and its impact."
```
## Coding standards
It is essential to prioritize code readability and conciseness. To achieve this, we recommend
using [Black](https://github.com/psf/black) for code formatting.
Once your work is deemed complete, it is advisable to run the following command:
```bash
tox -e flake8,linters
```
This command executes various linters and checkers to identify any potential issues or inconsistencies in your code. By
following these guidelines, you can ensure a high-quality codebase.
Thanks,
Klemen Tusar
07070100000011000081A400000000000000000000000165A2FB8A000000C7000000000000000000000000000000000000002200000000sqlite3-to-mysql-2.1.7/DockerfileFROM python:3.12-alpine
LABEL maintainer="https://github.com/techouse"
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir sqlite3-to-mysql
ENTRYPOINT ["sqlite3mysql"]07070100000012000081A400000000000000000000000165A2FB8A0000042D000000000000000000000000000000000000001F00000000sqlite3-to-mysql-2.1.7/LICENSEMIT License
Copyright (c) 2024 Klemen Tusar
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
07070100000013000081A400000000000000000000000165A2FB8A00001B39000000000000000000000000000000000000002100000000sqlite3-to-mysql-2.1.7/README.md[](https://pypi.org/project/sqlite3-to-mysql/)
[](https://pypistats.org/packages/sqlite3-to-mysql)
[](https://pypi.org/project/sqlite3-to-mysql/)
[](https://img.shields.io/static/v1?label=MySQL&message=5.6+|+5.7+|+8.0&color=2b5d80)
[](https://img.shields.io/static/v1?label=MariaDB&message=10.0+|+10.1+|+10.2+|+10.3+|+10.4+|+10.5&color=C0765A)
[](https://github.com/techouse/sqlite3-to-mysql/blob/master/LICENSE)
[](CODE-OF-CONDUCT.md)
[]((https://pypi.org/project/sqlite3-to-mysql/))
[](https://github.com/ambv/black)
[](https://www.codacy.com/manual/techouse/sqlite3-to-mysql?utm_source=github.com&utm_medium=referral&utm_content=techouse/sqlite3-to-mysql&utm_campaign=Badge_Grade)
[](https://github.com/techouse/sqlite3-to-mysql/actions/workflows/test.yml)
[](https://github.com/techouse/sqlite3-to-mysql/actions/workflows/codeql-analysis.yml)
[](https://github.com/techouse/sqlite3-to-mysql/actions/workflows/publish.yml)
[](https://github.com/techouse/sqlite3-to-mysql/actions/workflows/docker.yml)
[](https://codecov.io/gh/techouse/sqlite3-to-mysql)
[](https://github.com/sponsors/techouse)
[](https://github.com/techouse/sqlite3-to-mysql/stargazers)
# SQLite3 to MySQL
#### A simple Python tool to transfer data from SQLite 3 to MySQL.
### How to run
```bash
pip install sqlite3-to-mysql
sqlite3mysql --help
```
### Usage
```
Usage: sqlite3mysql [OPTIONS]
Transfer SQLite to MySQL using the provided CLI options.
Options:
-f, --sqlite-file PATH SQLite3 database file [required]
-t, --sqlite-tables TUPLE Transfer only these specific tables (space
separated table names). Implies --without-
foreign-keys which inhibits the transfer of
foreign keys.
-X, --without-foreign-keys Do not transfer foreign keys.
-W, --ignore-duplicate-keys Ignore duplicate keys. The default behavior
is to create new ones with a numerical
suffix, e.g. 'exising_key' ->
'existing_key_1'
-d, --mysql-database TEXT MySQL database name [required]
-u, --mysql-user TEXT MySQL user [required]
-p, --prompt-mysql-password Prompt for MySQL password
--mysql-password TEXT MySQL password
-h, --mysql-host TEXT MySQL host. Defaults to localhost.
-P, --mysql-port INTEGER MySQL port. Defaults to 3306.
-S, --skip-ssl Disable MySQL connection encryption.
-i, --mysql-insert-method [UPDATE|IGNORE|DEFAULT]
MySQL insert method. DEFAULT will throw
errors when encountering duplicate records;
UPDATE will update existing rows; IGNORE
will ignore insert errors. Defaults to
IGNORE.
-E, --mysql-truncate-tables Truncates existing tables before inserting
data.
--mysql-integer-type TEXT MySQL default integer field type. Defaults
to INT(11).
--mysql-string-type TEXT MySQL default string field type. Defaults to
VARCHAR(255).
--mysql-text-type [MEDIUMTEXT|TEXT|TINYTEXT|LONGTEXT]
MySQL default text field type. Defaults to
TEXT.
--mysql-charset TEXT MySQL database and table character set
[default: utf8mb4]
--mysql-collation TEXT MySQL database and table collation
-T, --use-fulltext Use FULLTEXT indexes on TEXT columns. Will
throw an error if your MySQL version does
not support InnoDB FULLTEXT indexes!
--with-rowid Transfer rowid columns.
-c, --chunk INTEGER Chunk reading/writing SQL records
-l, --log-file PATH Log file
-q, --quiet Quiet. Display only errors.
--debug Debug mode. Will throw exceptions.
--version Show the version and exit.
--help Show this message and exit.
```
#### Docker
If you don't want to install the tool on your system, you can use the Docker image instead.
```bash
docker run -it \
--workdir $(pwd) \
--volume $(pwd):$(pwd) \
--rm ghcr.io/techouse/sqlite3-to-mysql:latest \
--sqlite-file baz.db \
--mysql-user foo \
--mysql-password bar \
--mysql-database baz \
--mysql-host host.docker.internal
```
This will mount your host current working directory (pwd) inside the Docker container as the current working directory.
Any files Docker would write to the current working directory are written to the host directory where you did docker
run. Note that you have to also use a
[special hostname](https://docs.docker.com/desktop/networking/#use-cases-and-workarounds-for-all-platforms) `host.docker.internal`
to access your host machine from inside the Docker container.
#### Homebrew
If you're on macOS, you can install the tool using [Homebrew](https://brew.sh/).
```bash
brew tap techouse/sqlite3-to-mysql
brew install sqlite3-to-mysql
sqlite3mysql --help
```
07070100000014000081A400000000000000000000000165A2FB8A00000BE4000000000000000000000000000000000000002600000000sqlite3-to-mysql-2.1.7/pyproject.toml[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[project]
name = "sqlite3-to-mysql"
description = "A simple Python tool to transfer data from SQLite 3 to MySQL"
readme = "README.md"
license = { text = "MIT" }
requires-python = ">=3.8"
authors = [
{ name = "Klemen Tusar", email = "techouse@gmail.com" },
]
keywords = [
"sqlite3",
"mysql",
"transfer",
"data",
"migrate",
"migration",
]
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Database",
]
dependencies = [
"Click>=8.1.3",
"mysql-connector-python>=8.2.0",
"pytimeparse2",
"simplejson>=3.19.1",
"tqdm>=4.65.0",
"tabulate",
"Unidecode>=1.3.6",
"typing_extensions",
"packaging",
]
dynamic = ["version"]
[project.urls]
Source = "https://github.com/techouse/sqlite3-to-mysql"
Changelog = "https://github.com/techouse/sqlite3-to-mysql/blob/master/CHANGELOG.md"
Sponsor = "https://github.com/sponsors/techouse"
PayPal = "https://paypal.me/ktusar"
[tool.hatch.version]
path = "sqlite3_to_mysql/__init__.py"
[tool.hatch.build.targets.sdist]
include = [
"sqlite3_to_mysql",
"tests",
"README.md",
"CHANGELOG.md",
"CODE-OF-CONDUCT.md",
"LICENSE",
"requirements_dev.txt",
]
[project.scripts]
sqlite3mysql = "sqlite3_to_mysql.cli:cli"
[tool.black]
line-length = 120
target-version = ["py38", "py39", "py310", "py311", "py312"]
include = '\.pyi?$'
exclude = '''
(
/(
\.eggs
| \.git
| \.hg
| \.mypy_cache
| \.tox
| \.venv
| _build
| buck-out
| build
| dist
)/
| foo.py
)
'''
[tool.isort]
line_length = 120
profile = "black"
lines_after_imports = 2
known_first_party = "sqlite3_to_mysql"
skip_gitignore = true
[tool.pytest.ini_options]
testpaths = ["tests"]
norecursedirs = [".*", "venv", "env", "*.egg", "dist", "build"]
minversion = "7.3.1"
addopts = "-rsxX -l --tb=short --strict-markers"
timeout = 300
markers = [
"init: Run the initialisation test functions",
"transfer: Run the main transfer test functions",
"cli: Run the cli test functions",
]
[tool.mypy]
python_version = "3.8"
exclude = [
"tests",
"build",
"dist",
"venv",
"env",
]
warn_return_any = true
warn_unused_configs = true
[[tool.mypy.overrides]]
module = "pytimeparse2.*,factory.*,docker.*"
ignore_missing_imports = true
07070100000015000081A400000000000000000000000165A2FB8A0000019C000000000000000000000000000000000000002C00000000sqlite3-to-mysql-2.1.7/requirements_dev.txtClick>=8.1.3
docker>=6.1.3
factory-boy
Faker>=18.10.0
mysql-connector-python>=8.2.0
PyMySQL>=1.0.3
pytest>=7.3.1
pytest-cov
pytest-mock
pytest-timeout
pytimeparse2
simplejson>=3.19.1
types-simplejson
sqlalchemy>=2.0.0
sqlalchemy-utils
types-sqlalchemy-utils
tox
tqdm>=4.65.0
types-tqdm
packaging
tabulate
types-tabulate
Unidecode>=1.3.6
typing_extensions
types-Pygments
types-colorama
types-mock
types-setuptools07070100000016000041ED00000000000000000000000265A2FB8A00000000000000000000000000000000000000000000002800000000sqlite3-to-mysql-2.1.7/sqlite3_to_mysql07070100000017000081A400000000000000000000000165A2FB8A00000076000000000000000000000000000000000000003400000000sqlite3-to-mysql-2.1.7/sqlite3_to_mysql/__init__.py"""Utility to transfer data from SQLite 3 to MySQL."""
__version__ = "2.1.7"
from .transporter import SQLite3toMySQL
07070100000018000081A400000000000000000000000165A2FB8A00001BA9000000000000000000000000000000000000002F00000000sqlite3-to-mysql-2.1.7/sqlite3_to_mysql/cli.py"""The command line interface of SQLite3toMySQL."""
import os
import sys
import typing as t
from datetime import datetime
import click
from mysql.connector import CharacterSet
from tabulate import tabulate
from . import SQLite3toMySQL
from . import __version__ as package_version
from .click_utils import OptionEatAll, prompt_password
from .debug_info import info
from .mysql_utils import MYSQL_INSERT_METHOD, MYSQL_TEXT_COLUMN_TYPES, mysql_supported_character_sets
_copyright_header: str = f"sqlite3mysql version {package_version} Copyright (c) 2018-{datetime.now().year} Klemen Tusar"
@click.command(
name="sqlite3mysql",
help=_copyright_header,
no_args_is_help=True,
epilog="For more information, visit https://github.com/techouse/sqlite3-to-mysql",
)
@click.option(
"-f",
"--sqlite-file",
type=click.Path(exists=True),
default=None,
help="SQLite3 database file",
required=True,
)
@click.option(
"-t",
"--sqlite-tables",
type=tuple,
cls=OptionEatAll,
help="Transfer only these specific tables (space separated table names). "
"Implies --without-foreign-keys which inhibits the transfer of foreign keys.",
)
@click.option("-X", "--without-foreign-keys", is_flag=True, help="Do not transfer foreign keys.")
@click.option(
"-W",
"--ignore-duplicate-keys",
is_flag=True,
help="Ignore duplicate keys. The default behavior is to create new ones with a numerical suffix, e.g. "
"'exising_key' -> 'existing_key_1'",
)
@click.option("-d", "--mysql-database", default=None, help="MySQL database name", required=True)
@click.option("-u", "--mysql-user", default=None, help="MySQL user", required=True)
@click.option(
"-p",
"--prompt-mysql-password",
is_flag=True,
default=False,
callback=prompt_password,
help="Prompt for MySQL password",
)
@click.option("--mysql-password", default=None, help="MySQL password")
@click.option("-h", "--mysql-host", default="localhost", help="MySQL host. Defaults to localhost.")
@click.option("-P", "--mysql-port", type=int, default=3306, help="MySQL port. Defaults to 3306.")
@click.option("-S", "--skip-ssl", is_flag=True, help="Disable MySQL connection encryption.")
@click.option(
"-i",
"--mysql-insert-method",
type=click.Choice(MYSQL_INSERT_METHOD, case_sensitive=False),
default="IGNORE",
help="MySQL insert method. DEFAULT will throw errors when encountering duplicate records; "
"UPDATE will update existing rows; IGNORE will ignore insert errors. Defaults to IGNORE.",
)
@click.option(
"-E",
"--mysql-truncate-tables",
is_flag=True,
help="Truncates existing tables before inserting data.",
)
@click.option(
"--mysql-integer-type",
default="INT(11)",
help="MySQL default integer field type. Defaults to INT(11).",
)
@click.option(
"--mysql-string-type",
default="VARCHAR(255)",
help="MySQL default string field type. Defaults to VARCHAR(255).",
)
@click.option(
"--mysql-text-type",
type=click.Choice(MYSQL_TEXT_COLUMN_TYPES, case_sensitive=False),
default="TEXT",
help="MySQL default text field type. Defaults to TEXT.",
)
@click.option(
"--mysql-charset",
metavar="TEXT",
type=click.Choice(list(CharacterSet().get_supported()), case_sensitive=False),
default="utf8mb4",
show_default=True,
help="MySQL database and table character set",
)
@click.option(
"--mysql-collation",
metavar="TEXT",
type=click.Choice(
[charset.collation for charset in mysql_supported_character_sets()],
case_sensitive=False,
),
default=None,
help="MySQL database and table collation",
)
@click.option(
"-T",
"--use-fulltext",
is_flag=True,
help="Use FULLTEXT indexes on TEXT columns. "
"Will throw an error if your MySQL version does not support InnoDB FULLTEXT indexes!",
)
@click.option("--with-rowid", is_flag=True, help="Transfer rowid columns.")
@click.option("-c", "--chunk", type=int, default=None, help="Chunk reading/writing SQL records")
@click.option("-l", "--log-file", type=click.Path(), help="Log file")
@click.option("-q", "--quiet", is_flag=True, help="Quiet. Display only errors.")
@click.option("--debug", is_flag=True, help="Debug mode. Will throw exceptions.")
@click.version_option(message=tabulate(info(), headers=["software", "version"], tablefmt="github"))
def cli(
sqlite_file: t.Union[str, "os.PathLike[t.Any]"],
sqlite_tables: t.Optional[t.Sequence[str]],
without_foreign_keys: bool,
ignore_duplicate_keys: bool,
mysql_user: str,
prompt_mysql_password: bool,
mysql_password: str,
mysql_database: str,
mysql_host: str,
mysql_port: int,
skip_ssl: bool,
mysql_insert_method: str,
mysql_truncate_tables: bool,
mysql_integer_type: str,
mysql_string_type: str,
mysql_text_type: str,
mysql_charset: str,
mysql_collation: str,
use_fulltext: bool,
with_rowid: bool,
chunk: int,
log_file: t.Union[str, "os.PathLike[t.Any]"],
quiet: bool,
debug: bool,
) -> None:
"""Transfer SQLite to MySQL using the provided CLI options."""
click.echo(_copyright_header)
try:
if mysql_collation:
charset_collations: t.Tuple[str, ...] = tuple(
cs.collation for cs in mysql_supported_character_sets(mysql_charset.lower())
)
if mysql_collation not in set(charset_collations):
raise click.ClickException(
f"""Error: Invalid value for '--collation' of charset '{mysql_charset}': '{mysql_collation}' is not one of {"'" + "', '".join(charset_collations) + "'"}."""
)
SQLite3toMySQL(
sqlite_file=sqlite_file,
sqlite_tables=sqlite_tables or tuple(),
without_foreign_keys=without_foreign_keys or (sqlite_tables is not None and len(sqlite_tables) > 0),
mysql_user=mysql_user,
mysql_password=mysql_password or prompt_mysql_password,
mysql_database=mysql_database,
mysql_host=mysql_host,
mysql_port=mysql_port,
mysql_ssl_disabled=skip_ssl,
mysql_insert_method=mysql_insert_method,
mysql_truncate_tables=mysql_truncate_tables,
mysql_integer_type=mysql_integer_type,
mysql_string_type=mysql_string_type,
mysql_text_type=mysql_text_type,
mysql_charset=mysql_charset.lower() if mysql_charset else "utf8mb4",
mysql_collation=mysql_collation.lower() if mysql_collation else None,
ignore_duplicate_keys=ignore_duplicate_keys,
use_fulltext=use_fulltext,
with_rowid=with_rowid,
chunk=chunk,
log_file=log_file,
quiet=quiet,
).transfer()
except KeyboardInterrupt:
if debug:
raise
click.echo("\nProcess interrupted. Exiting...")
sys.exit(1)
except Exception as err: # pylint: disable=W0703
if debug:
raise
click.echo(err)
sys.exit(1)
07070100000019000081A400000000000000000000000165A2FB8A0000096E000000000000000000000000000000000000003700000000sqlite3-to-mysql-2.1.7/sqlite3_to_mysql/click_utils.py"""Click utilities."""
import typing as t
import click
class OptionEatAll(click.Option):
"""Taken from https://stackoverflow.com/questions/48391777/nargs-equivalent-for-options-in-click#answer-48394004.""" # noqa: ignore=E501 pylint: disable=C0301
def __init__(self, *args, **kwargs):
"""Override."""
self.save_other_options = kwargs.pop("save_other_options", True)
nargs = kwargs.pop("nargs", -1)
if nargs != -1:
raise ValueError(f"nargs, if set, must be -1 not {nargs}")
super(OptionEatAll, self).__init__(*args, **kwargs)
self._previous_parser_process = None
self._eat_all_parser = None
def add_to_parser(self, parser, ctx) -> None:
"""Override."""
def parser_process(value, state) -> None:
# method to hook to the parser.process
done: bool = False
value = [value]
if self.save_other_options:
# grab everything up to the next option
while state.rargs and not done:
for prefix in self._eat_all_parser.prefixes:
if state.rargs[0].startswith(prefix):
done = True
if not done:
value.append(state.rargs.pop(0))
else:
# grab everything remaining
value += state.rargs
state.rargs[:] = []
value = tuple(value)
# call the actual process
self._previous_parser_process(value, state)
retval = super(OptionEatAll, self).add_to_parser(parser, ctx) # pylint: disable=E1111
for name in self.opts:
# pylint: disable=W0212
our_parser = parser._long_opt.get(name) or parser._short_opt.get(name)
if our_parser:
self._eat_all_parser = our_parser
self._previous_parser_process = our_parser.process
our_parser.process = parser_process
break
return retval
def prompt_password(ctx: click.core.Context, param: t.Any, use_password: bool): # pylint: disable=W0613
"""Prompt for password."""
if use_password:
mysql_password = ctx.params.get("mysql_password")
if not mysql_password:
mysql_password = click.prompt("MySQL password", hide_input=True)
return mysql_password
0707010000001A000081A400000000000000000000000165A2FB8A00000CBD000000000000000000000000000000000000003600000000sqlite3-to-mysql-2.1.7/sqlite3_to_mysql/debug_info.py"""Module containing bug report helper(s).
Adapted from https://github.com/psf/requests/blob/master/requests/help.py
"""
import platform
import sqlite3
import sys
import typing as t
from shutil import which
from subprocess import check_output
import click
import mysql.connector
import pytimeparse2
import simplejson
import tabulate
import tqdm
from . import __version__ as package_version
def _implementation() -> str:
"""Return a dict with the Python implementation and version.
Provide both the name and the version of the Python implementation
currently running. For example, on CPython 2.7.5 it will return
{'name': 'CPython', 'version': '2.7.5'}.
This function works best on CPython and PyPy: in particular, it probably
doesn't work for Jython or IronPython. Future investigation should be done
to work out the correct shape of the code for those platforms.
"""
implementation: str = platform.python_implementation()
if implementation == "CPython":
implementation_version = platform.python_version()
elif implementation == "PyPy":
implementation_version = "%s.%s.%s" % (
sys.pypy_version_info.major, # type: ignore # noqa: ignore=E1101 pylint: disable=E1101
sys.pypy_version_info.minor, # type: ignore # noqa: ignore=E1101 pylint: disable=E1101
sys.pypy_version_info.micro, # type: ignore # noqa: ignore=E1101 pylint: disable=E1101
)
rel = sys.pypy_version_info.releaselevel # type: ignore # noqa: ignore=E1101 pylint: disable=E1101
if rel != "final":
implementation_version = "".join([implementation_version, rel])
elif implementation == "Jython":
implementation_version = platform.python_version() # Complete Guess
elif implementation == "IronPython":
implementation_version = platform.python_version() # Complete Guess
else:
implementation_version = "Unknown"
return f"{implementation} {implementation_version}"
def _mysql_version() -> str:
if which("mysql") is not None:
try:
mysql_version: t.Union[str, bytes] = check_output(["mysql", "-V"])
try:
return mysql_version.decode().strip() # type: ignore
except (UnicodeDecodeError, AttributeError):
return str(mysql_version)
except Exception: # nosec pylint: disable=W0703
pass
return "MySQL client not found on the system"
def info() -> t.List[t.List[str]]:
"""Generate information for a bug report."""
try:
platform_info = f"{platform.system()} {platform.release()}"
except IOError:
platform_info = "Unknown"
return [
["sqlite3-to-mysql", package_version],
["", ""],
["Operating System", platform_info],
["Python", _implementation()],
["MySQL", _mysql_version()],
["SQLite", sqlite3.sqlite_version],
["", ""],
["click", click.__version__],
["mysql-connector-python", mysql.connector.__version__],
["pytimeparse2", pytimeparse2.__version__],
["simplejson", simplejson.__version__], # type: ignore
["tabulate", tabulate.__version__],
["tqdm", tqdm.__version__],
]
0707010000001B000081A400000000000000000000000165A2FB8A00000E6C000000000000000000000000000000000000003700000000sqlite3-to-mysql-2.1.7/sqlite3_to_mysql/mysql_utils.py"""MySQL helpers."""
import re
import typing as t
from mysql.connector import CharacterSet
from mysql.connector.charsets import MYSQL_CHARACTER_SETS
from packaging import version
from packaging.version import Version
# Shamelessly copied from SQLAlchemy's dialects/mysql/__init__.py
MYSQL_COLUMN_TYPES: t.Tuple[str, ...] = (
"BIGINT",
"BINARY",
"BIT",
"BLOB",
"BOOLEAN",
"CHAR",
"DATE",
"DATETIME",
"DECIMAL",
"DOUBLE",
"ENUM",
"FLOAT",
"INTEGER",
"JSON",
"LONGBLOB",
"LONGTEXT",
"MEDIUMBLOB",
"MEDIUMINT",
"MEDIUMTEXT",
"NCHAR",
"NVARCHAR",
"NUMERIC",
"SET",
"SMALLINT",
"REAL",
"TEXT",
"TIME",
"TIMESTAMP",
"TINYBLOB",
"TINYINT",
"TINYTEXT",
"VARBINARY",
"VARCHAR",
"YEAR",
)
MYSQL_TEXT_COLUMN_TYPES: t.Tuple[str, ...] = (
"LONGTEXT",
"MEDIUMTEXT",
"TEXT",
"TINYTEXT",
)
MYSQL_TEXT_COLUMN_TYPES_WITH_JSON: t.Tuple[str, ...] = ("JSON",) + MYSQL_TEXT_COLUMN_TYPES
MYSQL_BLOB_COLUMN_TYPES: t.Tuple[str, ...] = (
"LONGBLOB",
"MEDIUMBLOB",
"BLOB",
"TINYBLOB",
)
MYSQL_COLUMN_TYPES_WITHOUT_DEFAULT: t.Tuple[str, ...] = (
("GEOMETRY",) + MYSQL_TEXT_COLUMN_TYPES_WITH_JSON + MYSQL_BLOB_COLUMN_TYPES
)
MYSQL_INSERT_METHOD: t.Tuple[str, ...] = (
"DEFAULT",
"IGNORE",
"UPDATE",
)
class CharSet(t.NamedTuple):
"""MySQL character set as a named tuple."""
id: int
charset: str
collation: str
def mysql_supported_character_sets(charset: t.Optional[str] = None) -> t.Iterator[CharSet]:
"""Get supported MySQL character sets."""
index: int
info: t.Optional[t.Tuple[str, str, bool]]
if charset is not None:
for index, info in enumerate(MYSQL_CHARACTER_SETS):
if info is not None:
try:
if info[0] == charset:
yield CharSet(index, charset, info[1])
except KeyError:
continue
else:
for charset in CharacterSet().get_supported():
for index, info in enumerate(MYSQL_CHARACTER_SETS):
if info is not None:
try:
yield CharSet(index, charset, info[1])
except KeyError:
continue
def get_mysql_version(version_string: str) -> version.Version:
"""Get MySQL version."""
return version.parse(re.sub("-.*$", "", version_string))
def check_mysql_json_support(version_string: str) -> bool:
"""Check for MySQL JSON support."""
mysql_version: Version = get_mysql_version(version_string)
if version_string.lower().endswith("-mariadb"):
if mysql_version.major >= 10 and mysql_version.minor >= 2 and mysql_version.micro >= 7:
return True
else:
if mysql_version.major >= 8:
return True
if mysql_version.minor >= 7 and mysql_version.micro >= 8:
return True
return False
def check_mysql_fulltext_support(version_string: str) -> bool:
"""Check for FULLTEXT indexing support."""
mysql_version: Version = get_mysql_version(version_string)
if version_string.lower().endswith("-mariadb"):
if mysql_version.major >= 10 and mysql_version.minor >= 0 and mysql_version.micro >= 5:
return True
else:
if mysql_version.major >= 8:
return True
if mysql_version.minor >= 6:
return True
return False
def safe_identifier_length(identifier_name: str, max_length: int = 64) -> str:
"""https://dev.mysql.com/doc/refman/8.0/en/identifier-length.html."""
return str(identifier_name)[:max_length]
0707010000001C000081A400000000000000000000000165A2FB8A00000000000000000000000000000000000000000000003100000000sqlite3-to-mysql-2.1.7/sqlite3_to_mysql/py.typed0707010000001D000081A400000000000000000000000165A2FB8A000006EA000000000000000000000000000000000000003800000000sqlite3-to-mysql-2.1.7/sqlite3_to_mysql/sqlite_utils.py"""SQLite adapters and converters for unsupported data types."""
from datetime import date, timedelta
from decimal import Decimal
from packaging import version
from packaging.version import Version
from pytimeparse2 import parse
from unidecode import unidecode
def adapt_decimal(value) -> str:
"""Convert decimal.Decimal to string."""
return str(value)
def convert_decimal(value) -> Decimal:
"""Convert string to decimalDecimal."""
return Decimal(str(value.decode()))
def adapt_timedelta(value) -> str:
"""Convert datetime.timedelta to %H:%M:%S string."""
hours, remainder = divmod(value.total_seconds(), 3600)
minutes, seconds = divmod(remainder, 60)
return "{:02}:{:02}:{:02}".format(int(hours), int(minutes), int(seconds))
def convert_timedelta(value) -> timedelta:
"""Convert %H:%M:%S string to datetime.timedelta."""
return timedelta(seconds=parse(value.decode()))
def unicase_compare(string_1: str, string_2: str) -> int:
"""Taken from https://github.com/patarapolw/ankisync2/issues/3#issuecomment-768687431."""
_string_1: str = unidecode(string_1).lower()
_string_2: str = unidecode(string_2).lower()
return 1 if _string_1 > _string_2 else -1 if _string_1 < _string_2 else 0
def convert_date(value) -> date:
"""Handle SQLite date conversion."""
try:
return date.fromisoformat(value.decode())
except ValueError as err:
raise ValueError(f"DATE field contains {err}") # pylint: disable=W0707
def check_sqlite_table_xinfo_support(version_string: str) -> bool:
"""Check for SQLite table_xinfo support."""
sqlite_version: Version = version.parse(version_string)
return sqlite_version.major > 3 or (sqlite_version.major == 3 and sqlite_version.minor >= 26)
0707010000001E000081A400000000000000000000000165A2FB8A00007D0C000000000000000000000000000000000000003700000000sqlite3-to-mysql-2.1.7/sqlite3_to_mysql/transporter.py"""Use to transfer an SQLite 3 database to MySQL."""
import logging
import os
import re
import sqlite3
import typing as t
from datetime import timedelta
from decimal import Decimal
from itertools import chain
from math import ceil
from os.path import isfile, realpath
from sys import stdout
import mysql.connector
import typing_extensions as tx
from mysql.connector import CharacterSet
from mysql.connector import __version__ as mysql_connector_version_string
from mysql.connector import errorcode
from packaging import version
from tqdm import tqdm, trange
from sqlite3_to_mysql.sqlite_utils import (
adapt_decimal,
adapt_timedelta,
check_sqlite_table_xinfo_support,
convert_date,
convert_decimal,
convert_timedelta,
unicase_compare,
)
from .mysql_utils import (
MYSQL_BLOB_COLUMN_TYPES,
MYSQL_COLUMN_TYPES,
MYSQL_COLUMN_TYPES_WITHOUT_DEFAULT,
MYSQL_INSERT_METHOD,
MYSQL_TEXT_COLUMN_TYPES,
MYSQL_TEXT_COLUMN_TYPES_WITH_JSON,
check_mysql_fulltext_support,
check_mysql_json_support,
safe_identifier_length,
)
from .types import SQLite3toMySQLAttributes, SQLite3toMySQLParams
class SQLite3toMySQL(SQLite3toMySQLAttributes):
"""Use this class to transfer an SQLite 3 database to MySQL."""
COLUMN_PATTERN: t.Pattern[str] = re.compile(r"^[^(]+")
COLUMN_LENGTH_PATTERN: t.Pattern[str] = re.compile(r"\(\d+\)")
COLUMN_UNSIGNED_PATTERN: t.Pattern[str] = re.compile(r"\bUNSIGNED\b", re.IGNORECASE)
MYSQL_CONNECTOR_VERSION: version.Version = version.parse(mysql_connector_version_string)
def __init__(self, **kwargs: tx.Unpack[SQLite3toMySQLParams]):
"""Constructor."""
if kwargs.get("sqlite_file") is None:
raise ValueError("Please provide an SQLite file")
elif not isfile(str(kwargs.get("sqlite_file"))):
raise FileNotFoundError("SQLite file does not exist")
else:
self._sqlite_file = realpath(str(kwargs.get("sqlite_file")))
if kwargs.get("mysql_user") is not None:
self._mysql_user = str(kwargs.get("mysql_user"))
else:
raise ValueError("Please provide a MySQL user")
self._mysql_password = str(kwargs.get("mysql_password")) or None
self._mysql_host = kwargs.get("mysql_host") or "localhost"
self._mysql_port = kwargs.get("mysql_port") or 3306
self._sqlite_tables = kwargs.get("sqlite_tables") or tuple()
self._without_foreign_keys = len(self._sqlite_tables) > 0 or kwargs.get("without_foreign_keys") or False
self._mysql_ssl_disabled = kwargs.get("mysql_ssl_disabled") or False
self._chunk_size = kwargs.get("chunk") or None
self._quiet = kwargs.get("quiet") or False
self._logger = self._setup_logger(log_file=kwargs.get("log_file") or None, quiet=self._quiet)
self._mysql_database = kwargs.get("mysql_database") or "transfer"
self._mysql_insert_method = str(kwargs.get("mysql_integer_type") or "IGNORE").upper()
if self._mysql_insert_method not in MYSQL_INSERT_METHOD:
self._mysql_insert_method = "IGNORE"
self._mysql_truncate_tables = kwargs.get("mysql_truncate_tables") or False
self._mysql_integer_type = str(kwargs.get("mysql_integer_type") or "INT(11)").upper()
self._mysql_string_type = str(kwargs.get("mysql_string_type") or "VARCHAR(255)").upper()
self._mysql_text_type = str(kwargs.get("mysql_text_type") or "TEXT").upper()
if self._mysql_text_type not in MYSQL_TEXT_COLUMN_TYPES:
self._mysql_text_type = "TEXT"
self._mysql_charset = kwargs.get("mysql_charset") or "utf8mb4"
self._mysql_collation = (
kwargs.get("mysql_collation") or CharacterSet().get_default_collation(self._mysql_charset.lower())[0]
)
if not kwargs.get("mysql_collation") and self._mysql_collation == "utf8mb4_0900_ai_ci":
self._mysql_collation = "utf8mb4_general_ci"
self._ignore_duplicate_keys = kwargs.get("ignore_duplicate_keys") or False
self._use_fulltext = kwargs.get("use_fulltext") or False
self._with_rowid = kwargs.get("with_rowid") or False
sqlite3.register_adapter(Decimal, adapt_decimal)
sqlite3.register_converter("DECIMAL", convert_decimal)
sqlite3.register_adapter(timedelta, adapt_timedelta)
sqlite3.register_converter("DATE", convert_date)
sqlite3.register_converter("TIME", convert_timedelta)
self._sqlite = sqlite3.connect(realpath(self._sqlite_file), detect_types=sqlite3.PARSE_DECLTYPES)
self._sqlite.row_factory = sqlite3.Row
self._sqlite.create_collation("unicase", unicase_compare)
self._sqlite_cur = self._sqlite.cursor()
self._sqlite_version = self._get_sqlite_version()
self._sqlite_table_xinfo_support = check_sqlite_table_xinfo_support(self._sqlite_version)
try:
_mysql_connection = mysql.connector.connect(
user=self._mysql_user,
password=self._mysql_password,
host=self._mysql_host,
port=self._mysql_port,
ssl_disabled=self._mysql_ssl_disabled,
use_pure=True,
)
if isinstance(_mysql_connection, mysql.connector.MySQLConnection):
self._mysql = _mysql_connection
else:
raise ConnectionError("Unable to connect to MySQL")
if not self._mysql.is_connected():
raise ConnectionError("Unable to connect to MySQL")
self._mysql_cur = self._mysql.cursor(prepared=True)
try:
self._mysql.database = self._mysql_database
except mysql.connector.Error as err:
if err.errno == errorcode.ER_BAD_DB_ERROR:
self._create_database()
else:
self._logger.error(err)
raise
self._mysql_version = self._get_mysql_version()
self._mysql_json_support = check_mysql_json_support(self._mysql_version)
self._mysql_fulltext_support = check_mysql_fulltext_support(self._mysql_version)
if self._use_fulltext and not self._mysql_fulltext_support:
raise ValueError("Your MySQL version does not support InnoDB FULLTEXT indexes!")
except mysql.connector.Error as err:
self._logger.error(err)
raise
@classmethod
def _setup_logger(
cls, log_file: t.Optional[t.Union[str, "os.PathLike[t.Any]"]] = None, quiet: bool = False
) -> logging.Logger:
formatter = logging.Formatter(fmt="%(asctime)s %(levelname)-8s %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
logger = logging.getLogger(cls.__name__)
logger.setLevel(logging.DEBUG)
if not quiet:
screen_handler = logging.StreamHandler(stream=stdout)
screen_handler.setFormatter(formatter)
logger.addHandler(screen_handler)
if log_file:
file_handler = logging.FileHandler(realpath(log_file), mode="w")
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
return logger
def _get_mysql_version(self) -> str:
try:
self._mysql_cur.execute("SHOW VARIABLES LIKE 'version'")
row = self._mysql_cur.fetchone()
if row:
return str(row[1])
else:
self._logger.error("MySQL failed checking for InnoDB version")
raise mysql.connector.Error("MySQL failed checking for InnoDB version")
except (IndexError, mysql.connector.Error) as err:
self._logger.error(
"MySQL failed checking for InnoDB version: %s",
err,
)
raise
def _get_sqlite_version(self) -> str:
try:
self._sqlite_cur.execute("SELECT sqlite_version()")
return str(self._sqlite_cur.fetchone()[0])
except (IndexError, sqlite3.Error) as err:
self._logger.error(
"SQLite failed checking for InnoDB version: %s",
err,
)
raise
def _sqlite_table_has_rowid(self, table: str) -> bool:
try:
self._sqlite_cur.execute(f'SELECT rowid FROM "{table}" LIMIT 1')
self._sqlite_cur.fetchall()
return True
except sqlite3.OperationalError:
return False
def _create_database(self) -> None:
try:
self._mysql_cur.execute(
f"""
CREATE DATABASE IF NOT EXISTS `{self._mysql_database}`
DEFAULT CHARACTER SET {self._mysql_charset}
DEFAULT COLLATE {self._mysql_collation}
"""
)
self._mysql_cur.close()
self._mysql.commit()
self._mysql.database = self._mysql_database
self._mysql_cur = self._mysql.cursor(prepared=True) # pylint: disable=W0201
except mysql.connector.Error as err:
self._logger.error(
"MySQL failed creating databse %s: %s",
self._mysql_database,
err,
)
raise
@classmethod
def _valid_column_type(cls, column_type: str) -> t.Optional[t.Match[str]]:
return cls.COLUMN_PATTERN.match(column_type.strip())
def _translate_type_from_sqlite_to_mysql(self, column_type: str) -> str:
"""This could be optimized even further, however is seems adequate."""
full_column_type: str = column_type.upper()
unsigned: bool = self.COLUMN_UNSIGNED_PATTERN.search(full_column_type) is not None
match: t.Optional[t.Match[str]] = self._valid_column_type(column_type)
if not match:
raise ValueError(f'"{column_type}" is not a valid column_type!')
data_type: str = match.group(0).upper()
if data_type in {"TEXT", "CLOB", "STRING"}:
return self._mysql_text_type
if data_type in {"CHARACTER", "NCHAR", "NATIVE CHARACTER"}:
return "CHAR" + self._column_type_length(column_type)
if data_type in {"VARYING CHARACTER", "NVARCHAR", "VARCHAR"}:
if self._mysql_string_type in MYSQL_TEXT_COLUMN_TYPES:
return self._mysql_string_type
length = self._column_type_length(column_type)
if not length:
return self._mysql_string_type
match = self._valid_column_type(self._mysql_string_type)
if match:
return match.group(0).upper() + length
if data_type == "UNSIGNED BIG INT":
return f"BIGINT{self._column_type_length(column_type)} UNSIGNED"
if data_type.startswith(("TINYINT", "INT1")):
print("length", self._column_type_length(column_type))
return f"TINYINT{self._column_type_length(column_type)}{' UNSIGNED' if unsigned else ''}"
if data_type.startswith(("SMALLINT", "INT2")):
return f"SMALLINT{self._column_type_length(column_type)}{' UNSIGNED' if unsigned else ''}"
if data_type.startswith(("MEDIUMINT", "INT3")):
return f"MEDIUMINT{self._column_type_length(column_type)}{' UNSIGNED' if unsigned else ''}"
if data_type.startswith("INT4"):
return f"INT{self._column_type_length(column_type)}{' UNSIGNED' if unsigned else ''}"
if data_type.startswith(("BIGINT", "INT8")):
return f"BIGINT{self._column_type_length(column_type)}{' UNSIGNED' if unsigned else ''}"
if data_type.startswith(("INT64", "NUMERIC")):
return f"BIGINT{self._column_type_length(column_type, 19)}{' UNSIGNED' if unsigned else ''}"
if data_type.startswith(("INTEGER", "INT")):
length = self._column_type_length(column_type)
if not length:
if "UNSIGNED" in self._mysql_integer_type:
return self._mysql_integer_type
return f"{self._mysql_integer_type}{' UNSIGNED' if unsigned else ''}"
match = self._valid_column_type(self._mysql_integer_type)
if match:
if "UNSIGNED" in self._mysql_integer_type:
return f"{match.group(0).upper()}{length} UNSIGNED"
return f"{match.group(0).upper()}{length}{' UNSIGNED' if unsigned else ''}"
if data_type == "BOOLEAN":
return "TINYINT(1)"
if data_type.startswith(("REAL", "DOUBLE", "FLOAT", "DECIMAL", "DEC", "FIXED")):
return full_column_type
if data_type not in MYSQL_COLUMN_TYPES:
return self._mysql_string_type
return full_column_type
@classmethod
def _column_type_length(cls, column_type: str, default: t.Optional[t.Union[str, int, float]] = None) -> str:
suffix: t.Optional[t.Match[str]] = cls.COLUMN_LENGTH_PATTERN.search(column_type)
if suffix:
return suffix.group(0)
if default:
return f"({default})"
return ""
def _create_table(self, table_name: str, transfer_rowid: bool = False) -> None:
primary_keys: t.List[t.Dict[str, str]] = []
sql: str = f"CREATE TABLE IF NOT EXISTS `{safe_identifier_length(table_name)}` ( "
if transfer_rowid:
sql += " `rowid` BIGINT NOT NULL, "
if self._sqlite_table_xinfo_support:
self._sqlite_cur.execute(f'PRAGMA table_xinfo("{table_name}")')
else:
self._sqlite_cur.execute(f'PRAGMA table_info("{table_name}")')
rows: t.List[t.Any] = self._sqlite_cur.fetchall()
compound_primary_key: bool = len(tuple(True for row in rows if dict(row)["pk"] > 0)) > 1
for row in rows:
column: t.Dict[str, t.Any] = dict(row)
mysql_safe_name: str = safe_identifier_length(column["name"])
column_type: str = self._translate_type_from_sqlite_to_mysql(column["type"])
# The "hidden" value is 0 for visible columns, 1 for "hidden" columns,
# 2 for computed virtual columns and 3 for computed stored columns.
# Read more on hidden columns here https://www.sqlite.org/pragma.html#pragma_table_xinfo
if "hidden" in column and column["hidden"] == 1:
continue
auto_increment: bool = (
column["pk"] > 0 and column_type.startswith(("INT", "BIGINT")) and not compound_primary_key
)
sql += " `{name}` {type} {notnull} {default} {auto_increment}, ".format(
name=mysql_safe_name,
type=column_type,
notnull="NOT NULL" if column["notnull"] or column["pk"] else "NULL",
auto_increment="AUTO_INCREMENT" if auto_increment else "",
default="DEFAULT " + column["dflt_value"]
if column["dflt_value"] and column_type not in MYSQL_COLUMN_TYPES_WITHOUT_DEFAULT and not auto_increment
else "",
)
if column["pk"] > 0:
primary_key: t.Dict[str, str] = {
"column": mysql_safe_name,
"length": "",
}
# In case we have a non-numeric primary key
if column_type in (
MYSQL_TEXT_COLUMN_TYPES_WITH_JSON + MYSQL_BLOB_COLUMN_TYPES
) or column_type.startswith(("CHAR", "VARCHAR")):
primary_key["length"] = self._column_type_length(column_type, 255)
primary_keys.append(primary_key)
sql = sql.rstrip(", ")
if len(primary_keys) > 0:
sql += ", PRIMARY KEY ({columns})".format(
columns=", ".join("`{column}`{length}".format(**primary_key) for primary_key in primary_keys)
)
if transfer_rowid:
sql += f", CONSTRAINT `{safe_identifier_length(table_name)}_rowid` UNIQUE (`rowid`)"
sql += f" ) ENGINE=InnoDB DEFAULT CHARSET={self._mysql_charset} COLLATE={self._mysql_collation}"
try:
self._mysql_cur.execute(sql)
self._mysql.commit()
except mysql.connector.Error as err:
self._logger.error(
"MySQL failed creating table %s: %s",
safe_identifier_length(table_name),
err,
)
raise
def _truncate_table(self, table_name: str) -> None:
self._mysql_cur.execute(
"""
SELECT `TABLE_NAME`
FROM `INFORMATION_SCHEMA`.`TABLES`
WHERE `TABLE_SCHEMA` = %s
AND `TABLE_NAME` = %s
LIMIT 1
""",
(self._mysql_database, safe_identifier_length(table_name)),
)
if len(self._mysql_cur.fetchall()) > 0:
self._logger.info("Truncating table %s", safe_identifier_length(table_name))
self._mysql_cur.execute(f"TRUNCATE TABLE `{safe_identifier_length(table_name)}`")
def _add_indices(self, table_name: str) -> None:
self._sqlite_cur.execute(f'PRAGMA table_info("{table_name}")')
table_columns: t.Dict[str, str] = {}
for row in self._sqlite_cur.fetchall():
column: t.Dict[str, t.Any] = dict(row)
table_columns[column["name"]] = column["type"]
self._sqlite_cur.execute(f'PRAGMA index_list("{table_name}")')
indices: t.Tuple[t.Dict[str, t.Any], ...] = tuple(dict(row) for row in self._sqlite_cur.fetchall())
for index in indices:
if index["origin"] == "pk":
continue
self._sqlite_cur.execute(f'PRAGMA index_info("{index["name"]}")')
index_infos: t.Tuple[t.Dict[str, t.Any], ...] = tuple(dict(row) for row in self._sqlite_cur.fetchall())
index_type: str = "UNIQUE" if int(index["unique"]) == 1 else "INDEX"
if any(
table_columns[index_info["name"]].upper() in MYSQL_TEXT_COLUMN_TYPES_WITH_JSON
for index_info in index_infos
):
if self._use_fulltext and self._mysql_fulltext_support:
# Use fulltext if requested and available
index_type = "FULLTEXT"
index_columns: str = ",".join(
f'`{safe_identifier_length(index_info["name"])}`' for index_info in index_infos
)
else:
# Limit the max TEXT field index length to 255
index_columns = ", ".join(
"`{column}`{length}".format(
column=safe_identifier_length(index_info["name"]),
length="(255)"
if table_columns[index_info["name"]].upper() in MYSQL_TEXT_COLUMN_TYPES_WITH_JSON
else "",
)
for index_info in index_infos
)
else:
column_list: t.List[str] = []
for index_info in index_infos:
index_length: str = ""
# Limit the max BLOB field index length to 255
if table_columns[index_info["name"]].upper() in MYSQL_BLOB_COLUMN_TYPES:
index_length = "(255)"
else:
suffix: t.Optional[t.Match[str]] = self.COLUMN_LENGTH_PATTERN.search(
table_columns[index_info["name"]]
)
if suffix:
index_length = suffix.group(0)
column_list.append(f'`{safe_identifier_length(index_info["name"])}`{index_length}')
index_columns = ", ".join(column_list)
try:
self._add_index(
table_name=table_name,
index_type=index_type,
index=index,
index_columns=index_columns,
index_infos=index_infos,
)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_BAD_FT_COLUMN and index_type == "FULLTEXT":
# handle bad FULLTEXT index
self._add_index(
table_name=table_name,
index_type="UNIQUE" if int(index["unique"]) == 1 else "INDEX",
index=index,
index_columns=", ".join(
"`{column}`{length}".format(
column=safe_identifier_length(index_info["name"]),
length="(255)"
if table_columns[index_info["name"]].upper() in MYSQL_TEXT_COLUMN_TYPES_WITH_JSON
else "",
)
for index_info in index_infos
),
index_infos=index_infos,
)
else:
raise
def _add_index(
self,
table_name: str,
index_type: str,
index: t.Dict[str, t.Any],
index_columns: str,
index_infos: t.Tuple[t.Dict[str, t.Any], ...],
index_iteration: int = 0,
) -> None:
sql: str = """
ALTER TABLE `{table}`
ADD {index_type} `{name}`({columns})
""".format(
table=safe_identifier_length(table_name),
index_type=index_type,
name=safe_identifier_length(index["name"])
if index_iteration == 0
else f'{safe_identifier_length(index["name"], max_length=60)}_{index_iteration}',
columns=index_columns,
)
try:
self._logger.info(
"""Adding %s to column "%s" in table %s""",
"unique index" if int(index["unique"]) == 1 else "index",
", ".join(safe_identifier_length(index_info["name"]) for index_info in index_infos),
safe_identifier_length(table_name),
)
self._mysql_cur.execute(sql)
self._mysql.commit()
except mysql.connector.Error as err:
if err.errno == errorcode.ER_DUP_KEYNAME:
if not self._ignore_duplicate_keys:
# handle a duplicate key name
self._add_index(
table_name=table_name,
index_type=index_type,
index=index,
index_columns=index_columns,
index_infos=index_infos,
index_iteration=index_iteration + 1,
)
self._logger.warning(
"""Duplicate key "%s" in table %s detected! Trying to create new key "%s_%s" ...""",
safe_identifier_length(index["name"]),
safe_identifier_length(table_name),
safe_identifier_length(index["name"]),
index_iteration + 1,
)
else:
self._logger.warning(
"""Ignoring duplicate key "%s" in table %s!""",
safe_identifier_length(index["name"]),
safe_identifier_length(table_name),
)
elif err.errno == errorcode.ER_BAD_FT_COLUMN:
# handle bad FULLTEXT index
self._logger.warning(
"""Failed adding FULLTEXT index to column "%s" in table %s. Retrying without FULLTEXT ...""",
", ".join(safe_identifier_length(index_info["name"]) for index_info in index_infos),
safe_identifier_length(table_name),
)
raise
else:
self._logger.error(
"""MySQL failed adding index to column "%s" in table %s: %s""",
", ".join(safe_identifier_length(index_info["name"]) for index_info in index_infos),
safe_identifier_length(table_name),
err,
)
raise
def _add_foreign_keys(self, table_name: str) -> None:
self._sqlite_cur.execute(f'PRAGMA foreign_key_list("{table_name}")')
for row in self._sqlite_cur.fetchall():
foreign_key: t.Dict[str, t.Any] = dict(row)
sql = """
ALTER TABLE `{table}`
ADD CONSTRAINT `{table}_FK_{id}_{seq}`
FOREIGN KEY (`{column}`)
REFERENCES `{ref_table}`(`{ref_column}`)
ON DELETE {on_delete}
ON UPDATE {on_update}
""".format(
id=foreign_key["id"],
seq=foreign_key["seq"],
table=safe_identifier_length(table_name),
column=safe_identifier_length(foreign_key["from"]),
ref_table=safe_identifier_length(foreign_key["table"]),
ref_column=safe_identifier_length(foreign_key["to"]),
on_delete=foreign_key["on_delete"].upper()
if foreign_key["on_delete"].upper() != "SET DEFAULT"
else "NO ACTION",
on_update=foreign_key["on_update"].upper()
if foreign_key["on_update"].upper() != "SET DEFAULT"
else "NO ACTION",
)
try:
self._logger.info(
"Adding foreign key to %s.%s referencing %s.%s",
safe_identifier_length(table_name),
safe_identifier_length(foreign_key["from"]),
safe_identifier_length(foreign_key["table"]),
safe_identifier_length(foreign_key["to"]),
)
self._mysql_cur.execute(sql)
self._mysql.commit()
except mysql.connector.Error as err:
self._logger.error(
"MySQL failed adding foreign key to %s.%s referencing %s.%s: %s",
safe_identifier_length(table_name),
safe_identifier_length(foreign_key["from"]),
safe_identifier_length(foreign_key["table"]),
safe_identifier_length(foreign_key["to"]),
err,
)
raise
def _transfer_table_data(self, sql: str, total_records: int = 0) -> None:
if self._chunk_size is not None and self._chunk_size > 0:
for _ in trange(0, int(ceil(total_records / self._chunk_size)), disable=self._quiet):
self._mysql_cur.executemany(
sql,
(tuple(row) for row in self._sqlite_cur.fetchmany(self._chunk_size)), # type: ignore
)
else:
self._mysql_cur.executemany(
sql,
( # type: ignore
tuple(row)
for row in tqdm(
self._sqlite_cur.fetchall(),
total=total_records,
disable=self._quiet,
)
),
)
self._mysql.commit()
def transfer(self) -> None:
"""The primary and only method with which we transfer all the data."""
if len(self._sqlite_tables) > 0:
# transfer only specific tables
self._sqlite_cur.execute(
f"""
SELECT name FROM sqlite_master
WHERE type='table'
AND name NOT LIKE 'sqlite_%'
AND name IN({("?, " * len(self._sqlite_tables)).rstrip(" ,")})
""",
self._sqlite_tables,
)
else:
# transfer all tables
self._sqlite_cur.execute(
"""
SELECT name FROM sqlite_master
WHERE type='table'
AND name NOT LIKE 'sqlite_%'
"""
)
try:
self._mysql_cur.execute("SET FOREIGN_KEY_CHECKS=0")
for row in self._sqlite_cur.fetchall():
table: t.Dict[str, t.Any] = dict(row)
# check if we're transferring rowid
transfer_rowid: bool = self._with_rowid and self._sqlite_table_has_rowid(table["name"])
# create the table
self._create_table(table["name"], transfer_rowid=transfer_rowid)
# truncate the table on request
if self._mysql_truncate_tables:
self._truncate_table(table["name"])
# get the size of the data
self._sqlite_cur.execute(f'SELECT COUNT(*) AS total_records FROM "{table["name"]}"')
total_records = int(dict(self._sqlite_cur.fetchone())["total_records"])
# only continue if there is anything to transfer
if total_records > 0:
# populate it
self._logger.info("Transferring table %s", table["name"])
self._sqlite_cur.execute(
'''SELECT {rowid} * FROM "{table_name}"'''.format(
rowid='rowid as "rowid",' if transfer_rowid else "",
table_name=table["name"],
)
)
columns: t.List[str] = [
safe_identifier_length(column[0]) for column in self._sqlite_cur.description
]
if self._mysql_insert_method.upper() == "UPDATE":
sql: str = """
INSERT
INTO `{table}` ({fields})
VALUES ({placeholders}) AS `__new__`
ON DUPLICATE KEY UPDATE {field_updates}
""".format(
table=safe_identifier_length(table["name"]),
fields=("`{}`, " * len(columns)).rstrip(" ,").format(*columns),
placeholders=("%s, " * len(columns)).rstrip(" ,"),
field_updates=("`{}`=`__new__`.`{}`, " * len(columns))
.rstrip(" ,")
.format(*list(chain.from_iterable((column, column) for column in columns))),
)
else:
sql = """
INSERT {ignore}
INTO `{table}` ({fields})
VALUES ({placeholders})
""".format(
ignore="IGNORE" if self._mysql_insert_method.upper() == "IGNORE" else "",
table=safe_identifier_length(table["name"]),
fields=("`{}`, " * len(columns)).rstrip(" ,").format(*columns),
placeholders=("%s, " * len(columns)).rstrip(" ,"),
)
try:
self._transfer_table_data(sql=sql, total_records=total_records)
except mysql.connector.Error as err:
self._logger.error(
"MySQL transfer failed inserting data into table %s: %s",
safe_identifier_length(table["name"]),
err,
)
raise
# add indices
self._add_indices(table["name"])
# add foreign keys
if not self._without_foreign_keys:
self._add_foreign_keys(table["name"])
except Exception: # pylint: disable=W0706
raise
finally:
self._mysql_cur.execute("SET FOREIGN_KEY_CHECKS=1")
self._logger.info("Done!")
0707010000001F000081A400000000000000000000000165A2FB8A00000897000000000000000000000000000000000000003100000000sqlite3-to-mysql-2.1.7/sqlite3_to_mysql/types.py"""Types for sqlite3-to-mysql."""
import os
import typing as t
from logging import Logger
from sqlite3 import Connection, Cursor
import typing_extensions as tx
from mysql.connector import MySQLConnection
from mysql.connector.cursor import MySQLCursor
class SQLite3toMySQLParams(tx.TypedDict):
"""SQLite3toMySQL parameters."""
sqlite_file: t.Union[str, "os.PathLike[t.Any]"]
sqlite_tables: t.Optional[t.Sequence[str]]
without_foreign_keys: t.Optional[bool]
mysql_user: t.Optional[str]
mysql_password: t.Optional[t.Union[str, bool]]
mysql_host: t.Optional[str]
mysql_port: t.Optional[int]
mysql_ssl_disabled: t.Optional[bool]
chunk: t.Optional[int]
quiet: t.Optional[bool]
log_file: t.Optional[t.Union[str, "os.PathLike[t.Any]"]]
mysql_database: t.Optional[str]
mysql_integer_type: t.Optional[str]
mysql_truncate_tables: t.Optional[bool]
mysql_charset: t.Optional[str]
mysql_collation: t.Optional[str]
ignore_duplicate_keys: t.Optional[bool]
use_fulltext: t.Optional[bool]
with_rowid: t.Optional[bool]
mysql_insert_method: t.Optional[str]
mysql_string_type: t.Optional[str]
mysql_text_type: t.Optional[str]
class SQLite3toMySQLAttributes:
"""SQLite3toMySQL attributes."""
_sqlite_file: t.Union[str, "os.PathLike[t.Any]"]
_sqlite_tables: t.Sequence[str]
_without_foreign_keys: bool
_mysql_user: str
_mysql_password: t.Optional[str]
_mysql_host: str
_mysql_port: int
_mysql_ssl_disabled: bool
_chunk_size: t.Optional[int]
_quiet: bool
_logger: Logger
_log_file: t.Union[str, "os.PathLike[t.Any]"]
_mysql_database: str
_mysql_insert_method: str
_mysql_truncate_tables: bool
_mysql_integer_type: str
_mysql_string_type: str
_mysql_text_type: str
_mysql_charset: str
_mysql_collation: str
_ignore_duplicate_keys: bool
_use_fulltext: bool
_with_rowid: bool
_sqlite: Connection
_sqlite_cur: Cursor
_sqlite_version: str
_sqlite_table_xinfo_support: bool
_mysql: MySQLConnection
_mysql_cur: MySQLCursor
_mysql_version: str
_mysql_json_support: bool
_mysql_fulltext_support: bool
07070100000020000041ED00000000000000000000000265A2FB8A00000000000000000000000000000000000000000000001D00000000sqlite3-to-mysql-2.1.7/tests07070100000021000081A400000000000000000000000165A2FB8A00000000000000000000000000000000000000000000002900000000sqlite3-to-mysql-2.1.7/tests/__init__.py07070100000022000081A400000000000000000000000165A2FB8A00002BE3000000000000000000000000000000000000002900000000sqlite3-to-mysql-2.1.7/tests/conftest.pyimport json
import socket
import typing as t
from codecs import open
from contextlib import contextmanager
from os.path import abspath, dirname, isfile, join, realpath
from pathlib import Path
from time import sleep
import docker
import mysql.connector
import pytest
from _pytest._py.path import LocalPath
from _pytest.config import Config
from _pytest.config.argparsing import Parser
from _pytest.legacypath import TempdirFactory
from click.testing import CliRunner
from docker import DockerClient
from docker.errors import NotFound
from docker.models.containers import Container
from faker import Faker
from mysql.connector import MySQLConnection, errorcode
from mysql.connector.connection_cext import CMySQLConnection
from mysql.connector.pooling import PooledMySQLConnection
from requests import HTTPError
from sqlalchemy import create_engine
from sqlalchemy.engine import Engine
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import Session
from sqlalchemy_utils import database_exists, drop_database
from . import database, factories
def pytest_addoption(parser: "Parser") -> None:
parser.addoption(
"--sqlite-file",
dest="sqlite_file",
default=None,
help="SQLite database file. Defaults to none and generates one internally.",
)
parser.addoption(
"--mysql-user",
dest="mysql_user",
default="tester",
help="MySQL user. Defaults to 'tester'.",
)
parser.addoption(
"--mysql-password",
dest="mysql_password",
default="testpass",
help="MySQL password. Defaults to 'testpass'.",
)
parser.addoption(
"--mysql-database",
dest="mysql_database",
default="test_db",
help="MySQL database name. Defaults to 'test_db'.",
)
parser.addoption(
"--mysql-host",
dest="mysql_host",
default="0.0.0.0",
help="Test against a MySQL server running on this host. Defaults to '0.0.0.0'.",
)
parser.addoption(
"--mysql-port",
dest="mysql_port",
type=int,
default=None,
help="The TCP port of the MySQL server.",
)
parser.addoption(
"--no-docker",
dest="use_docker",
default=True,
action="store_false",
help="Do not use a Docker MySQL image to run the tests. "
"If you decide to use this switch you will have to use a physical MySQL server.",
)
parser.addoption(
"--docker-mysql-image",
dest="docker_mysql_image",
default="mysql:latest",
help="Run the tests against a specific MySQL Docker image. Defaults to mysql:latest. "
"Check all supported versions here https://hub.docker.com/_/mysql",
)
@pytest.fixture(scope="session", autouse=True)
def cleanup_hanged_docker_containers() -> None:
try:
client: DockerClient = docker.from_env()
for container in client.containers.list():
if container.name == "pytest_sqlite3_to_mysql":
container.kill()
break
except Exception:
pass
def pytest_keyboard_interrupt() -> None:
try:
client: DockerClient = docker.from_env()
for container in client.containers.list():
if container.name == "pytest_sqlite3_to_mysql":
container.kill()
break
except Exception:
pass
class Helpers:
@staticmethod
@contextmanager
def not_raises(exception: t.Type[Exception]) -> t.Generator:
try:
yield
except exception:
raise pytest.fail(f"DID RAISE {exception}")
@staticmethod
@contextmanager
def session_scope(db: database.Database) -> t.Generator:
"""Provide a transactional scope around a series of operations."""
session: Session = db.Session()
try:
yield session
session.commit()
except Exception:
session.rollback()
raise
finally:
session.close()
@pytest.fixture
def helpers() -> t.Type[Helpers]:
return Helpers
@pytest.fixture(scope="session")
def sqlite_database(pytestconfig: Config, _session_faker: Faker, tmpdir_factory: TempdirFactory) -> str:
db_file: LocalPath = pytestconfig.getoption("sqlite_file")
if db_file:
if not isfile(realpath(db_file)):
pytest.fail(f"{db_file} does not exist")
return str(realpath(db_file))
temp_data_dir: LocalPath = tmpdir_factory.mktemp("data")
temp_image_dir: LocalPath = tmpdir_factory.mktemp("images")
db_file = temp_data_dir.join(Path("db.sqlite3"))
db: database.Database = database.Database(f"sqlite:///{db_file}")
with Helpers.session_scope(db) as session:
for _ in range(_session_faker.pyint(min_value=12, max_value=24)):
article = factories.ArticleFactory()
article.authors.append(factories.AuthorFactory())
article.tags.append(factories.TagFactory())
article.misc.append(factories.MiscFactory())
article.media.append(factories.MediaFactory())
for _ in range(_session_faker.pyint(min_value=1, max_value=4)):
article.images.append(
factories.ImageFactory(
path=join(
str(temp_image_dir),
_session_faker.year(),
_session_faker.month(),
_session_faker.day_of_month(),
_session_faker.file_name(extension="jpg"),
)
)
)
session.add(article)
try:
session.commit()
except IntegrityError:
session.rollback()
return str(db_file)
def is_port_in_use(port: int, host: str = "0.0.0.0") -> bool:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
return s.connect_ex((host, port)) == 0
class MySQLCredentials(t.NamedTuple):
"""MySQL credentials."""
user: str
password: str
host: str
port: int
database: str
@pytest.fixture(scope="session")
def mysql_credentials(pytestconfig: Config) -> MySQLCredentials:
db_credentials_file: str = abspath(join(dirname(__file__), "db_credentials.json"))
if isfile(db_credentials_file):
with open(db_credentials_file, "r", "utf-8") as fh:
db_credentials: t.Dict[str, t.Any] = json.load(fh)
return MySQLCredentials(
user=db_credentials["mysql_user"],
password=db_credentials["mysql_password"],
database=db_credentials["mysql_database"],
host=db_credentials["mysql_host"],
port=db_credentials["mysql_port"],
)
port: int = pytestconfig.getoption("mysql_port") or 3306
if pytestconfig.getoption("use_docker"):
while is_port_in_use(port, pytestconfig.getoption("mysql_host")):
if port >= 2**16 - 1:
pytest.fail(f'No ports appear to be available on the host {pytestconfig.getoption("mysql_host")}')
port += 1
return MySQLCredentials(
user=pytestconfig.getoption("mysql_user") or "tester",
password=pytestconfig.getoption("mysql_password") or "testpass",
database=pytestconfig.getoption("mysql_database") or "test_db",
host=pytestconfig.getoption("mysql_host") or "0.0.0.0",
port=port,
)
@pytest.fixture(scope="session")
def mysql_instance(mysql_credentials: MySQLCredentials, pytestconfig: Config) -> t.Iterator[MySQLConnection]:
container: t.Optional[Container] = None
mysql_connection: t.Optional[t.Union[PooledMySQLConnection, MySQLConnection, CMySQLConnection]] = None
mysql_available: bool = False
mysql_connection_retries: int = 15 # failsafe
db_credentials_file = abspath(join(dirname(__file__), "db_credentials.json"))
if isfile(db_credentials_file):
use_docker = False
else:
use_docker = pytestconfig.getoption("use_docker")
if use_docker:
"""Connecting to a MySQL server within a Docker container is quite tricky :P
Read more on the issue here https://hub.docker.com/_/mysql#no-connections-until-mysql-init-completes
"""
try:
client = docker.from_env()
except Exception as err:
pytest.fail(str(err))
docker_mysql_image = pytestconfig.getoption("docker_mysql_image") or "mysql:latest"
if not any(docker_mysql_image in image.tags for image in client.images.list()):
print(f"Attempting to download Docker image {docker_mysql_image}'")
try:
client.images.pull(docker_mysql_image)
except (HTTPError, NotFound) as err:
pytest.fail(str(err))
container = client.containers.run(
image=docker_mysql_image,
name="pytest_sqlite3_to_mysql",
ports={
"3306/tcp": (
mysql_credentials.host,
f"{mysql_credentials.port}/tcp",
)
},
environment={
"MYSQL_RANDOM_ROOT_PASSWORD": "yes",
"MYSQL_USER": mysql_credentials.user,
"MYSQL_PASSWORD": mysql_credentials.password,
"MYSQL_DATABASE": mysql_credentials.database,
},
command=[
"--character-set-server=utf8mb4",
"--collation-server=utf8mb4_unicode_ci",
],
detach=True,
auto_remove=True,
)
while not mysql_available and mysql_connection_retries > 0:
try:
mysql_connection = mysql.connector.connect(
user=mysql_credentials.user,
password=mysql_credentials.password,
host=mysql_credentials.host,
port=mysql_credentials.port,
)
except mysql.connector.Error as err:
if err.errno == errorcode.CR_SERVER_LOST:
# sleep for two seconds and retry the connection
sleep(2)
else:
raise
finally:
mysql_connection_retries -= 1
if mysql_connection and mysql_connection.is_connected():
mysql_available = True
mysql_connection.close()
else:
if not mysql_available and mysql_connection_retries <= 0:
raise ConnectionAbortedError("Maximum MySQL connection retries exhausted! Are you sure MySQL is running?")
yield # type: ignore[misc]
if use_docker and container is not None:
container.kill()
@pytest.fixture()
def mysql_database(mysql_instance: t.Generator, mysql_credentials: MySQLCredentials) -> t.Iterator[Engine]:
yield # type: ignore[misc]
engine: Engine = create_engine(
f"mysql+pymysql://{mysql_credentials.user}:{mysql_credentials.password}@{mysql_credentials.host}:{mysql_credentials.port}/{mysql_credentials.database}"
)
if database_exists(engine.url):
drop_database(engine.url)
@pytest.fixture()
def cli_runner() -> t.Iterator[CliRunner]:
yield CliRunner()
07070100000023000081A400000000000000000000000165A2FB8A0000055A000000000000000000000000000000000000002900000000sqlite3-to-mysql-2.1.7/tests/database.pyimport typing as t
from datetime import datetime, timedelta
from decimal import Decimal
import simplejson as json
from sqlalchemy import create_engine
from sqlalchemy.engine import Engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy_utils import database_exists
from .models import Base
class Database:
engine: Engine
Session: sessionmaker
def __init__(self, database_uri):
self.Session = sessionmaker()
self.engine = create_engine(database_uri, json_serializer=self.dumps, json_deserializer=json.loads)
if not database_exists(database_uri):
self._create_db_tables()
self.Session.configure(bind=self.engine)
def _create_db_tables(self) -> None:
Base.metadata.create_all(self.engine)
@classmethod
def dumps(cls, data: t.Any) -> str:
return json.dumps(data, default=cls.json_serializer)
@staticmethod
def json_serializer(data: t.Any) -> t.Optional[str]:
if isinstance(data, datetime):
return data.isoformat()
if isinstance(data, Decimal):
return str(data)
if isinstance(data, timedelta):
hours, remainder = divmod(data.total_seconds(), 3600)
minutes, seconds = divmod(remainder, 60)
return "{:02}:{:02}:{:02}".format(int(hours), int(minutes), int(seconds))
return None
07070100000024000081A400000000000000000000000165A2FB8A00001280000000000000000000000000000000000000002A00000000sqlite3-to-mysql-2.1.7/tests/factories.pyimport typing as t
from os import environ
import factory
from . import faker_providers, models
factory.Faker.add_provider(faker_providers.DateTimeProviders)
class AuthorFactory(factory.Factory):
class Meta:
model: t.Type[models.Author] = models.Author
name: factory.Faker = factory.Faker("name")
class ImageFactory(factory.Factory):
class Meta:
model: t.Type[models.Image] = models.Image
path: factory.Faker = factory.Faker("file_path", depth=3, extension="jpg")
description: factory.Faker = factory.Faker("sentence", nb_words=12, variable_nb_words=True)
class TagFactory(factory.Factory):
class Meta:
model: t.Type[models.Tag] = models.Tag
name: factory.Faker = factory.Faker("sentence", nb_words=3, variable_nb_words=True)
class MiscFactory(factory.Factory):
class Meta:
model: t.Type[models.Misc] = models.Misc
big_integer_field: factory.Faker = factory.Faker("pyint", max_value=10**9)
blob_field: factory.Faker = factory.Faker("binary", length=1024 * 10)
boolean_field: factory.Faker = factory.Faker("boolean")
char_field: factory.Faker = factory.Faker("text", max_nb_chars=255)
date_field: factory.Faker = factory.Faker("date_this_decade")
date_time_field: factory.Faker = factory.Faker("date_time_this_century_without_microseconds")
decimal_field: factory.Faker = factory.Faker("pydecimal", left_digits=8, right_digits=2)
float_field: factory.Faker = factory.Faker("pyfloat", left_digits=8, right_digits=4)
integer_field: factory.Faker = factory.Faker("pyint", min_value=-(2**31), max_value=2**31 - 1)
if environ.get("LEGACY_DB", "0") == "0":
json_field = factory.Faker(
"pydict",
nb_elements=10,
variable_nb_elements=True,
value_types=["str", "int", "float", "boolean", "date_time"],
)
numeric_field: factory.Faker = factory.Faker("pyfloat", left_digits=8, right_digits=4)
real_field: factory.Faker = factory.Faker("pyfloat", left_digits=8, right_digits=4)
small_integer_field: factory.Faker = factory.Faker("pyint", min_value=-(2**15), max_value=2**15 - 1)
string_field: factory.Faker = factory.Faker("text", max_nb_chars=255)
text_field: factory.Faker = factory.Faker("text", max_nb_chars=1024)
time_field: factory.Faker = factory.Faker("time_object_without_microseconds")
varchar_field: factory.Faker = factory.Faker("text", max_nb_chars=255)
timestamp_field: factory.Faker = factory.Faker("date_time_this_century_without_microseconds")
my_type_field: factory.Faker = factory.Faker("text", max_nb_chars=255)
class ArticleFactory(factory.Factory):
class Meta:
model: t.Type[models.Article] = models.Article
hash: factory.Faker = factory.Faker("md5")
title: factory.Faker = factory.Faker("sentence", nb_words=6)
slug: factory.Faker = factory.Faker("slug")
content: factory.Faker = factory.Faker("text", max_nb_chars=1024)
status: factory.Faker = factory.Faker("pystr", max_chars=1)
published: factory.Faker = factory.Faker("date_between", start_date="-1y", end_date="-1d")
@factory.post_generation
def authors(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of authors were passed in, use them
for author in extracted:
self.authors.add(author)
@factory.post_generation
def tags(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of authors were passed in, use them
for tag in extracted:
self.tags.add(tag)
@factory.post_generation
def images(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of authors were passed in, use them
for image in extracted:
self.images.add(image)
@factory.post_generation
def misc(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of authors were passed in, use them
for misc in extracted:
self.misc.add(misc)
class MediaFactory(factory.Factory):
class Meta:
model: t.Type[models.Media] = models.Media
id: factory.Faker = factory.Faker("sha256", raw_output=False)
title: factory.Faker = factory.Faker("sentence", nb_words=6)
description: factory.Faker = factory.Faker("sentence", nb_words=12, variable_nb_words=True)
07070100000025000081A400000000000000000000000165A2FB8A00000308000000000000000000000000000000000000003000000000sqlite3-to-mysql-2.1.7/tests/faker_providers.pyimport datetime
from typing import Optional
from faker.providers import BaseProvider, date_time
from faker.typing import DateParseType
class DateTimeProviders(BaseProvider):
def time_object_without_microseconds(self, end_datetime: Optional[DateParseType] = None) -> datetime.time:
return date_time.Provider(self.generator).time_object(end_datetime).replace(microsecond=0)
def date_time_this_century_without_microseconds(
self,
before_now: bool = True,
after_now: bool = False,
tzinfo: Optional[datetime.tzinfo] = None,
) -> datetime.datetime:
return (
date_time.Provider(self.generator)
.date_time_this_century(before_now, after_now, tzinfo)
.replace(microsecond=0)
)
07070100000026000041ED00000000000000000000000265A2FB8A00000000000000000000000000000000000000000000002200000000sqlite3-to-mysql-2.1.7/tests/func07070100000027000081A400000000000000000000000165A2FB8A00000000000000000000000000000000000000000000002E00000000sqlite3-to-mysql-2.1.7/tests/func/__init__.py07070100000028000081A400000000000000000000000165A2FB8A00005E6F000000000000000000000000000000000000003B00000000sqlite3-to-mysql-2.1.7/tests/func/sqlite3_to_mysql_test.pyimport logging
import re
import typing as t
from collections import namedtuple
from itertools import chain
from pathlib import Path
from random import choice, sample
import mysql.connector
import pytest
import simplejson as json
from _pytest._py.path import LocalPath
from _pytest.capture import CaptureFixture
from _pytest.logging import LogCaptureFixture
from faker import Faker
from mysql.connector import MySQLConnection, errorcode
from mysql.connector.connection_cext import CMySQLConnection
from mysql.connector.pooling import PooledMySQLConnection
from pytest_mock import MockFixture
from sqlalchemy import MetaData, Table, create_engine, inspect, select, text
from sqlalchemy.engine import Connection, CursorResult, Engine, Inspector, Row
from sqlalchemy.engine.interfaces import ReflectedIndex
from sqlalchemy.sql import Select
from sqlalchemy.sql.elements import TextClause
from sqlite3_to_mysql import SQLite3toMySQL
from tests.conftest import Helpers, MySQLCredentials
@pytest.mark.usefixtures("sqlite_database", "mysql_instance")
class TestSQLite3toMySQL:
@pytest.mark.init
@pytest.mark.parametrize("quiet", [False, True])
def test_no_sqlite_file_raises_exception(self, quiet: bool) -> None:
with pytest.raises(ValueError) as excinfo:
SQLite3toMySQL(quiet=quiet) # type: ignore
assert "Please provide an SQLite file" in str(excinfo.value)
@pytest.mark.init
@pytest.mark.parametrize("quiet", [False, True])
def test_invalid_sqlite_file_raises_exception(self, faker: Faker, quiet: bool) -> None:
with pytest.raises((FileNotFoundError, IOError)) as excinfo:
SQLite3toMySQL(sqlite_file=faker.file_path(depth=1, extension=".sqlite3"), quiet=quiet) # type: ignore[call-arg]
assert "SQLite file does not exist" in str(excinfo.value)
@pytest.mark.init
@pytest.mark.parametrize("quiet", [False, True])
def test_missing_mysql_user_raises_exception(self, sqlite_database: str, quiet: bool) -> None:
with pytest.raises(ValueError) as excinfo:
SQLite3toMySQL(sqlite_file=sqlite_database, quiet=quiet) # type: ignore[call-arg]
assert "Please provide a MySQL user" in str(excinfo.value)
@pytest.mark.init
@pytest.mark.parametrize("quiet", [False, True])
def test_valid_sqlite_file_and_valid_mysql_credentials(
self,
sqlite_database: str,
mysql_database: Engine,
mysql_credentials: MySQLCredentials,
helpers: Helpers,
quiet: bool,
):
with helpers.not_raises(FileNotFoundError):
SQLite3toMySQL( # type: ignore
sqlite_file=sqlite_database,
mysql_user=mysql_credentials.user,
mysql_password=mysql_credentials.password,
mysql_host=mysql_credentials.host,
mysql_port=mysql_credentials.port,
mysql_database=mysql_credentials.database,
chunk=10,
quiet=quiet,
)
@pytest.mark.init
@pytest.mark.parametrize("quiet", [False, True])
def test_valid_sqlite_file_and_invalid_mysql_credentials_raises_access_denied_exception(
self,
sqlite_database: str,
mysql_database: Engine,
mysql_credentials: MySQLCredentials,
faker: Faker,
quiet: bool,
) -> None:
with pytest.raises(mysql.connector.Error) as excinfo:
SQLite3toMySQL( # type: ignore[call-arg]
sqlite_file=sqlite_database,
mysql_user=faker.first_name().lower(),
mysql_password=faker.password(length=16),
mysql_host=mysql_credentials.host,
mysql_port=mysql_credentials.port,
mysql_database=mysql_credentials.database,
quiet=quiet,
)
assert "Access denied for user" in str(excinfo.value)
@pytest.mark.init
@pytest.mark.parametrize("quiet", [False, True])
def test_unspecified_mysql_error(
self,
sqlite_database: str,
mysql_credentials: MySQLCredentials,
mocker: MockFixture,
caplog: LogCaptureFixture,
quiet: bool,
) -> None:
mocker.patch.object(
mysql.connector,
"connect",
side_effect=mysql.connector.Error(
msg="Error Code: 2000. Unknown MySQL error",
errno=errorcode.CR_UNKNOWN_ERROR,
),
)
caplog.set_level(logging.DEBUG)
with pytest.raises(mysql.connector.Error) as excinfo:
SQLite3toMySQL( # type: ignore[call-arg]
sqlite_file=sqlite_database,
mysql_user=mysql_credentials.user,
mysql_password=mysql_credentials.password,
mysql_host=mysql_credentials.host,
mysql_port=mysql_credentials.port,
mysql_database=mysql_credentials.database,
chunk=10,
quiet=quiet,
)
assert str(errorcode.CR_UNKNOWN_ERROR) in str(excinfo.value)
assert any(str(errorcode.CR_UNKNOWN_ERROR) in message for message in caplog.messages)
@pytest.mark.init
@pytest.mark.parametrize("quiet", [False, True])
def test_bad_database_error(
self,
sqlite_database: str,
mysql_credentials: MySQLCredentials,
mocker: MockFixture,
caplog: LogCaptureFixture,
quiet: bool,
) -> None:
class FakeMySQLConnection(MySQLConnection):
@property
def database(self):
return self._database
@database.setter
def database(self, value):
self._database = value
# raise a fake exception
raise mysql.connector.Error(msg="This is a test", errno=errorcode.ER_UNKNOWN_ERROR)
def is_connected(self):
return True
def cursor(
self,
buffered=None,
raw=None,
prepared=None,
cursor_class=None,
dictionary=None,
named_tuple=None,
):
return True
mocker.patch.object(mysql.connector, "connect", return_value=FakeMySQLConnection())
with pytest.raises(mysql.connector.Error):
caplog.set_level(logging.DEBUG)
SQLite3toMySQL( # type: ignore[call-arg]
sqlite_file=sqlite_database,
mysql_user=mysql_credentials.user,
mysql_password=mysql_credentials.password,
mysql_host=mysql_credentials.host,
mysql_port=mysql_credentials.port,
mysql_database=mysql_credentials.database,
chunk=10,
quiet=quiet,
)
@pytest.mark.init
@pytest.mark.parametrize("quiet", [False, True])
def test_bad_mysql_connection(
self, sqlite_database: str, mysql_credentials: MySQLCredentials, mocker: MockFixture, quiet: bool
) -> None:
FakeConnector = namedtuple("FakeConnector", ["is_connected"])
mocker.patch.object(
mysql.connector,
"connect",
return_value=FakeConnector(is_connected=lambda: False),
)
with pytest.raises((ConnectionError, IOError)) as excinfo:
SQLite3toMySQL( # type: ignore[call-arg]
sqlite_file=sqlite_database,
mysql_user=mysql_credentials.user,
mysql_password=mysql_credentials.password,
mysql_host=mysql_credentials.host,
mysql_port=mysql_credentials.port,
mysql_database=mysql_credentials.database,
chunk=10,
quiet=quiet,
)
assert "Unable to connect to MySQL" in str(excinfo.value)
@pytest.mark.xfail
@pytest.mark.init
@pytest.mark.parametrize("quiet", [False, True])
def test_log_to_file(
self,
sqlite_database: str,
mysql_database: Engine,
mysql_credentials: MySQLCredentials,
faker: Faker,
caplog: LogCaptureFixture,
tmpdir: LocalPath,
quiet: bool,
):
log_file: LocalPath = tmpdir.join(Path("db.log"))
with pytest.raises(mysql.connector.Error):
caplog.set_level(logging.DEBUG)
SQLite3toMySQL( # type: ignore[call-arg]
sqlite_file=sqlite_database,
mysql_user=faker.first_name().lower(),
mysql_password=faker.password(length=16),
mysql_host=mysql_credentials.host,
mysql_port=mysql_credentials.port,
mysql_database=mysql_credentials.database,
log_file=str(log_file),
quiet=quiet,
)
assert any("Access denied for user" in message for message in caplog.messages)
with log_file.open("r") as log_fh:
log: str = log_fh.read()
if len(caplog.messages) > 1:
assert caplog.messages[1] in log
else:
assert caplog.messages[0] in log
assert re.match(r"^\d{4,}-\d{2,}-\d{2,}\s+\d{2,}:\d{2,}:\d{2,}\s+\w+\s+", log) is not None
@pytest.mark.transfer
@pytest.mark.parametrize(
"chunk, with_rowid, mysql_insert_method, ignore_duplicate_keys",
[
(None, False, "IGNORE", False),
(None, False, "IGNORE", True),
(None, False, "UPDATE", True),
(None, False, "UPDATE", False),
(None, False, "DEFAULT", True),
(None, False, "DEFAULT", False),
(None, True, "IGNORE", False),
(None, True, "IGNORE", True),
(None, True, "UPDATE", True),
(None, True, "UPDATE", False),
(None, True, "DEFAULT", True),
(None, True, "DEFAULT", False),
(10, False, "IGNORE", False),
(10, False, "IGNORE", True),
(10, False, "UPDATE", True),
(10, False, "UPDATE", False),
(10, False, "DEFAULT", True),
(10, False, "DEFAULT", False),
(10, True, "IGNORE", False),
(10, True, "IGNORE", True),
(10, True, "UPDATE", True),
(10, True, "UPDATE", False),
(10, True, "DEFAULT", True),
(10, True, "DEFAULT", False),
],
)
def test_transfer_transfers_all_tables_in_sqlite_file(
self,
sqlite_database: str,
mysql_database: Engine,
mysql_credentials: MySQLCredentials,
helpers: Helpers,
capsys: CaptureFixture,
caplog: LogCaptureFixture,
chunk: t.Optional[int],
with_rowid: bool,
mysql_insert_method: str,
ignore_duplicate_keys: bool,
):
proc: SQLite3toMySQL = SQLite3toMySQL( # type: ignore[call-arg]
sqlite_file=sqlite_database,
mysql_user=mysql_credentials.user,
mysql_password=mysql_credentials.password,
mysql_host=mysql_credentials.host,
mysql_port=mysql_credentials.port,
mysql_database=mysql_credentials.database,
chunk=chunk,
with_rowid=with_rowid,
mysql_insert_method=mysql_insert_method,
ignore_duplicate_keys=ignore_duplicate_keys,
)
caplog.set_level(logging.DEBUG)
proc.transfer()
assert all(record.levelname == "INFO" for record in caplog.records)
assert not any(record.levelname == "ERROR" for record in caplog.records)
out, err = capsys.readouterr()
sqlite_engine: Engine = create_engine(
f"sqlite:///{sqlite_database}",
json_serializer=json.dumps,
json_deserializer=json.loads,
)
sqlite_cnx: Connection = sqlite_engine.connect()
sqlite_inspect: Inspector = inspect(sqlite_engine)
sqlite_tables: t.List[str] = sqlite_inspect.get_table_names()
mysql_engine: Engine = create_engine(
f"mysql+pymysql://{mysql_credentials.user}:{mysql_credentials.password}@{mysql_credentials.host}:{mysql_credentials.port}/{mysql_credentials.database}",
json_serializer=json.dumps,
json_deserializer=json.loads,
)
mysql_cnx: Connection = mysql_engine.connect()
mysql_inspect: Inspector = inspect(mysql_engine)
mysql_tables: t.List[str] = mysql_inspect.get_table_names()
mysql_connector_connection: t.Union[
PooledMySQLConnection, MySQLConnection, CMySQLConnection
] = mysql.connector.connect(
user=mysql_credentials.user,
password=mysql_credentials.password,
host=mysql_credentials.host,
port=mysql_credentials.port,
database=mysql_credentials.database,
)
server_version: t.Tuple[int, ...] = mysql_connector_connection.get_server_version()
""" Test if both databases have the same table names """
assert sqlite_tables == mysql_tables
""" Test if all the tables have the same column names """
for table_name in sqlite_tables:
column_names: t.List[str] = [column["name"] for column in sqlite_inspect.get_columns(table_name)]
if with_rowid:
column_names.insert(0, "rowid")
assert column_names == [column["name"] for column in mysql_inspect.get_columns(table_name)]
""" Test if all the tables have the same indices """
index_keys: t.Tuple[str, ...] = ("name", "column_names", "unique")
mysql_indices: t.Tuple[ReflectedIndex, ...] = tuple(
t.cast(ReflectedIndex, {key: index[key] for key in index_keys}) # type: ignore[literal-required]
for index in (chain.from_iterable(mysql_inspect.get_indexes(table_name) for table_name in mysql_tables))
)
for table_name in sqlite_tables:
sqlite_indices: t.List[ReflectedIndex] = sqlite_inspect.get_indexes(table_name)
if with_rowid:
sqlite_indices.insert(
0,
ReflectedIndex(
name=f"{table_name}_rowid",
column_names=["rowid"],
unique=True,
),
)
for sqlite_index in sqlite_indices:
sqlite_index["unique"] = bool(sqlite_index["unique"])
if "dialect_options" in sqlite_index:
sqlite_index.pop("dialect_options", None)
assert sqlite_index in mysql_indices
""" Test if all the tables have the same foreign keys """
for table_name in sqlite_tables:
mysql_fk_stmt: TextClause = text(
"""
SELECT k.REFERENCED_TABLE_NAME AS `table`, k.COLUMN_NAME AS `from`, k.REFERENCED_COLUMN_NAME AS `to`
FROM information_schema.TABLE_CONSTRAINTS AS i
{JOIN} information_schema.KEY_COLUMN_USAGE AS k ON i.CONSTRAINT_NAME = k.CONSTRAINT_NAME
WHERE i.TABLE_SCHEMA = :table_schema
AND i.TABLE_NAME = :table_name
AND i.CONSTRAINT_TYPE = :constraint_type
""".format(
# MySQL 8.0.19 still works with "LEFT JOIN" everything above requires "JOIN"
JOIN="JOIN"
if (server_version[0] == 8 and server_version[2] > 19)
else "LEFT JOIN"
)
).bindparams(
table_schema=mysql_credentials.database,
table_name=table_name,
constraint_type="FOREIGN KEY",
)
mysql_fk_result: CursorResult = mysql_cnx.execute(mysql_fk_stmt)
mysql_foreign_keys: t.List[t.Dict[str, t.Any]] = [
{
"table": fk["table"],
"from": fk["from"],
"to": fk["to"],
}
for fk in mysql_fk_result.mappings()
]
sqlite_fk_stmt: TextClause = text(f'PRAGMA foreign_key_list("{table_name}")')
sqlite_fk_result = sqlite_cnx.execute(sqlite_fk_stmt)
if sqlite_fk_result.returns_rows:
for fk in sqlite_fk_result.mappings():
assert {
"table": fk["table"],
"from": fk["from"],
"to": fk["to"],
} in mysql_foreign_keys
""" Check if all the data was transferred correctly """
sqlite_results: t.List[t.Tuple[t.Tuple[t.Any, ...], ...]] = []
mysql_results: t.List[t.Tuple[t.Tuple[t.Any, ...], ...]] = []
meta: MetaData = MetaData()
for table_name in sqlite_tables:
sqlite_table: Table = Table(table_name, meta, autoload_with=sqlite_engine)
sqlite_stmt: Select = select(sqlite_table)
sqlite_result: t.List[Row[t.Any]] = list(sqlite_cnx.execute(sqlite_stmt).fetchall())
sqlite_result.sort()
sqlite_results.append(tuple(tuple(data for data in row) for row in sqlite_result))
for table_name in mysql_tables:
mysql_table: Table = Table(table_name, meta, autoload_with=mysql_engine)
mysql_stmt: Select = select(mysql_table)
mysql_result: t.List[Row[t.Any]] = list(mysql_cnx.execute(mysql_stmt).fetchall())
mysql_result.sort()
mysql_results.append(tuple(tuple(data for data in row) for row in mysql_result))
assert sqlite_results == mysql_results
mysql_cnx.close()
sqlite_cnx.close()
mysql_engine.dispose()
sqlite_engine.dispose()
@pytest.mark.transfer
@pytest.mark.parametrize(
"chunk, with_rowid, mysql_insert_method, ignore_duplicate_keys",
[
(None, False, "IGNORE", False),
(None, False, "IGNORE", True),
(None, False, "UPDATE", True),
(None, False, "UPDATE", False),
(None, False, "DEFAULT", True),
(None, False, "DEFAULT", False),
(None, True, "IGNORE", False),
(None, True, "IGNORE", True),
(None, True, "UPDATE", True),
(None, True, "UPDATE", False),
(None, True, "DEFAULT", True),
(None, True, "DEFAULT", False),
(10, False, "IGNORE", False),
(10, False, "IGNORE", True),
(10, False, "UPDATE", True),
(10, False, "UPDATE", False),
(10, False, "DEFAULT", True),
(10, False, "DEFAULT", False),
(10, True, "IGNORE", False),
(10, True, "IGNORE", True),
(10, True, "UPDATE", True),
(10, True, "UPDATE", False),
(10, True, "DEFAULT", True),
(10, True, "DEFAULT", False),
],
)
def test_transfer_specific_tables_transfers_only_specified_tables_from_sqlite_file(
self,
sqlite_database: str,
mysql_database: Engine,
mysql_credentials: MySQLCredentials,
helpers: Helpers,
capsys: CaptureFixture,
caplog: LogCaptureFixture,
chunk: t.Optional[int],
with_rowid: bool,
mysql_insert_method: str,
ignore_duplicate_keys: bool,
) -> None:
sqlite_engine: Engine = create_engine(
f"sqlite:///{sqlite_database}",
json_serializer=json.dumps,
json_deserializer=json.loads,
)
sqlite_cnx: Connection = sqlite_engine.connect()
sqlite_inspect: Inspector = inspect(sqlite_engine)
sqlite_tables: t.List[str] = sqlite_inspect.get_table_names()
table_number: int = choice(range(1, len(sqlite_tables)))
random_sqlite_tables: t.List[str] = sample(sqlite_tables, table_number)
random_sqlite_tables.sort()
proc: SQLite3toMySQL = SQLite3toMySQL( # type: ignore[call-arg]
sqlite_file=sqlite_database,
sqlite_tables=random_sqlite_tables,
mysql_user=mysql_credentials.user,
mysql_password=mysql_credentials.password,
mysql_host=mysql_credentials.host,
mysql_port=mysql_credentials.port,
mysql_database=mysql_credentials.database,
chunk=chunk,
with_rowid=with_rowid,
mysql_insert_method=mysql_insert_method,
ignore_duplicate_keys=ignore_duplicate_keys,
)
caplog.set_level(logging.DEBUG)
proc.transfer()
assert all(record.levelname == "INFO" for record in caplog.records)
assert not any(record.levelname == "ERROR" for record in caplog.records)
out, err = capsys.readouterr()
mysql_engine: Engine = create_engine(
f"mysql+pymysql://{mysql_credentials.user}:{mysql_credentials.password}@{mysql_credentials.host}:{mysql_credentials.port}/{mysql_credentials.database}",
json_serializer=json.dumps,
json_deserializer=json.loads,
)
mysql_cnx: Connection = mysql_engine.connect()
mysql_inspect: Inspector = inspect(mysql_engine)
mysql_tables: t.List[str] = mysql_inspect.get_table_names()
""" Test if both databases have the same table names """
assert random_sqlite_tables == mysql_tables
""" Test if all the tables have the same column names """
for table_name in random_sqlite_tables:
column_names: t.List[t.Any] = [column["name"] for column in sqlite_inspect.get_columns(table_name)]
if with_rowid:
column_names.insert(0, "rowid")
assert column_names == [column["name"] for column in mysql_inspect.get_columns(table_name)]
""" Test if all the tables have the same indices """
index_keys: t.Tuple[str, ...] = ("name", "column_names", "unique")
mysql_indices: t.Tuple[ReflectedIndex, ...] = tuple(
t.cast(ReflectedIndex, {key: index[key] for key in index_keys}) # type: ignore[literal-required]
for index in (chain.from_iterable(mysql_inspect.get_indexes(table_name) for table_name in mysql_tables))
)
for table_name in random_sqlite_tables:
sqlite_indices: t.List[ReflectedIndex] = sqlite_inspect.get_indexes(table_name)
if with_rowid:
sqlite_indices.insert(
0,
ReflectedIndex(
name=f"{table_name}_rowid",
column_names=["rowid"],
unique=True,
),
)
for sqlite_index in sqlite_indices:
sqlite_index["unique"] = bool(sqlite_index["unique"])
if "dialect_options" in sqlite_index:
sqlite_index.pop("dialect_options", None)
assert sqlite_index in mysql_indices
""" Check if all the data was transferred correctly """
sqlite_results: t.List[t.Tuple[t.Tuple[t.Any, ...], ...]] = []
mysql_results: t.List[t.Tuple[t.Tuple[t.Any, ...], ...]] = []
meta: MetaData = MetaData()
for table_name in random_sqlite_tables:
sqlite_table: Table = Table(table_name, meta, autoload_with=sqlite_engine)
sqlite_stmt: Select = select(sqlite_table)
sqlite_result: t.List[Row[t.Any]] = list(sqlite_cnx.execute(sqlite_stmt).fetchall())
sqlite_result.sort()
sqlite_results.append(tuple(tuple(data for data in row) for row in sqlite_result))
for table_name in mysql_tables:
mysql_table: Table = Table(table_name, meta, autoload_with=mysql_engine)
mysql_stmt: Select = select(mysql_table)
mysql_result: t.List[Row[t.Any]] = list(mysql_cnx.execute(mysql_stmt).fetchall())
mysql_result.sort()
mysql_results.append(tuple(tuple(data for data in row) for row in mysql_result))
assert sqlite_results == mysql_results
mysql_cnx.close()
sqlite_cnx.close()
mysql_engine.dispose()
sqlite_engine.dispose()
07070100000029000081A400000000000000000000000165A2FB8A00004943000000000000000000000000000000000000002E00000000sqlite3-to-mysql-2.1.7/tests/func/test_cli.pyimport typing as t
from datetime import datetime
from random import choice, sample
import pytest
import simplejson as json
from click.testing import CliRunner, Result
from faker import Faker
from pytest_mock import MockFixture
from sqlalchemy import create_engine, inspect
from sqlalchemy.engine import Engine, Inspector
from sqlite3_to_mysql import SQLite3toMySQL
from sqlite3_to_mysql import __version__ as package_version
from sqlite3_to_mysql.cli import cli as sqlite3mysql
from tests.conftest import MySQLCredentials
@pytest.mark.cli
@pytest.mark.usefixtures("sqlite_database", "mysql_instance")
class TestSQLite3toMySQL:
def test_no_arguments(self, cli_runner: CliRunner, mysql_database: Engine) -> None:
result: Result = cli_runner.invoke(sqlite3mysql)
assert result.exit_code == 0
assert all(
message in result.output
for message in {
f"Usage: {sqlite3mysql.name} [OPTIONS]",
f"{sqlite3mysql.name} version {package_version} Copyright (c) 2018-{datetime.now().year} Klemen Tusar",
}
)
def test_non_existing_sqlite_file(self, cli_runner: CliRunner, mysql_database: Engine, faker: Faker) -> None:
result: Result = cli_runner.invoke(sqlite3mysql, ["-f", faker.file_path(depth=1, extension=".sqlite3")])
assert result.exit_code > 0
assert "Error: Invalid value" in result.output
assert "does not exist" in result.output
def test_no_database_name(self, cli_runner: CliRunner, sqlite_database: str, mysql_database: Engine) -> None:
result = cli_runner.invoke(sqlite3mysql, ["-f", sqlite_database])
assert result.exit_code > 0
assert any(
message in result.output
for message in {
'Error: Missing option "-d" / "--mysql-database"',
"Error: Missing option '-d' / '--mysql-database'",
}
)
def test_no_database_user(
self, cli_runner: CliRunner, sqlite_database: str, mysql_credentials: MySQLCredentials, mysql_database: Engine
) -> None:
result: Result = cli_runner.invoke(sqlite3mysql, ["-f", sqlite_database, "-d", mysql_credentials.database])
assert result.exit_code > 0
assert any(
message in result.output
for message in {
'Error: Missing option "-u" / "--mysql-user"',
"Error: Missing option '-u' / '--mysql-user'",
}
)
def test_invalid_database_name(
self,
cli_runner: CliRunner,
sqlite_database: str,
mysql_credentials: MySQLCredentials,
mysql_database: Engine,
faker: Faker,
) -> None:
result: Result = cli_runner.invoke(
sqlite3mysql,
[
"-f",
sqlite_database,
"-d",
"_".join(faker.words(nb=3)),
"-u",
faker.first_name().lower(),
],
)
assert result.exit_code > 0
assert "1045 (28000): Access denied" in result.output
def test_invalid_database_user(
self,
cli_runner: CliRunner,
sqlite_database: str,
mysql_credentials: MySQLCredentials,
mysql_database: Engine,
faker: Faker,
) -> None:
result: Result = cli_runner.invoke(
sqlite3mysql,
[
"-f",
sqlite_database,
"-d",
mysql_credentials.database,
"-u",
faker.first_name().lower(),
],
)
assert result.exit_code > 0
assert "1045 (28000): Access denied" in result.output
def test_invalid_database_password(
self,
cli_runner: CliRunner,
sqlite_database: str,
mysql_credentials: MySQLCredentials,
mysql_database: Engine,
faker: Faker,
) -> None:
result: Result = cli_runner.invoke(
sqlite3mysql,
[
"-f",
sqlite_database,
"-d",
mysql_credentials.database,
"-u",
mysql_credentials.user,
"--mysql-password",
faker.password(length=16),
],
)
assert result.exit_code > 0
assert "1045 (28000): Access denied" in result.output
def test_database_password_prompt(
self,
cli_runner: CliRunner,
sqlite_database: str,
mysql_credentials: MySQLCredentials,
mysql_database: Engine,
) -> None:
result: Result = cli_runner.invoke(
sqlite3mysql,
args=[
"-f",
sqlite_database,
"-d",
mysql_credentials.database,
"-u",
mysql_credentials.user,
"-p",
],
input=mysql_credentials.password,
)
assert result.exit_code == 0
def test_invalid_database_password_prompt(
self,
cli_runner: CliRunner,
sqlite_database: str,
mysql_credentials: MySQLCredentials,
mysql_database: Engine,
faker: Faker,
) -> None:
result: Result = cli_runner.invoke(
sqlite3mysql,
args=[
"-f",
sqlite_database,
"-d",
mysql_credentials.database,
"-u",
mysql_credentials.user,
"-p",
],
input=faker.password(length=16),
)
assert result.exit_code > 0
assert "1045 (28000): Access denied" in result.output
def test_invalid_database_port(
self,
cli_runner: CliRunner,
sqlite_database: str,
mysql_credentials: MySQLCredentials,
mysql_database: Engine,
faker: Faker,
) -> None:
port: int = choice(range(2, 2**16 - 1))
if port == mysql_credentials.port:
port -= 1
result: Result = cli_runner.invoke(
sqlite3mysql,
[
"-f",
sqlite_database,
"-d",
mysql_credentials.database,
"-u",
mysql_credentials.user,
"--mysql-password",
mysql_credentials.password,
"-h",
mysql_credentials.host,
"-P",
str(port),
],
)
assert result.exit_code > 0
assert any(
message in result.output
for message in {
"2003 (HY000): Can't connect to MySQL server on",
"2003: Can't connect to MySQL server",
}
)
@pytest.mark.parametrize(
"mysql_integer_type,"
"mysql_string_type,"
"mysql_text_type,"
"chunk,"
"with_rowid,"
"mysql_insert_method,"
"ignore_duplicate_keys",
[
# 00000
(None, None, None, None, False, "DEFAULT", False),
# 10000
("BIGINT(19)", None, None, None, False, "UPDATE", True),
# 01000
(None, "VARCHAR(512)", None, None, False, "IGNORE", False),
# 11000
("BIGINT(19)", "VARCHAR(512)", None, None, False, "DEFAULT", True),
# 00100
(None, None, "MEDIUMTEXT", None, False, "UPDATE", False),
# 10100
("BIGINT(19)", None, "MEDIUMTEXT", None, False, "IGNORE", True),
# 01100
(None, "VARCHAR(512)", "MEDIUMTEXT", None, False, "DEFAULT", False),
# 11100
("BIGINT(19)", "VARCHAR(512)", "MEDIUMTEXT", None, False, "UPDATE", True),
# 00010
(None, None, None, 10, False, "IGNORE", False),
# 10010
("BIGINT(19)", None, None, 10, False, "DEFAULT", True),
# 01010
(None, "VARCHAR(512)", None, 10, False, "UPDATE", False),
# 11010
("BIGINT(19)", "VARCHAR(512)", None, 10, False, "IGNORE", True),
# 00110
(None, None, "MEDIUMTEXT", 10, False, "DEFAULT", False),
# 10110
("BIGINT(19)", None, "MEDIUMTEXT", 10, False, "UPDATE", True),
# 01110
(None, "VARCHAR(512)", "MEDIUMTEXT", 10, False, "IGNORE", False),
# 11110
("BIGINT(19)", "VARCHAR(512)", "MEDIUMTEXT", 10, False, "DEFAULT", True),
# 00001
(None, None, None, None, True, "UPDATE", False),
# 10001
("BIGINT(19)", None, None, None, True, "IGNORE", True),
# 01001
(None, "VARCHAR(512)", None, None, True, "DEFAULT", False),
# 11001
("BIGINT(19)", "VARCHAR(512)", None, None, True, "UPDATE", True),
# 00101
(None, None, "MEDIUMTEXT", None, True, "IGNORE", False),
# 10101
("BIGINT(19)", None, "MEDIUMTEXT", None, True, "DEFAULT", True),
# 01101
(None, "VARCHAR(512)", "MEDIUMTEXT", None, True, "UPDATE", False),
# 11101
("BIGINT(19)", "VARCHAR(512)", "MEDIUMTEXT", None, True, "IGNORE", True),
# 00011
(None, None, None, 10, True, "DEFAULT", False),
# 10011
("BIGINT(19)", None, None, 10, True, "UPDATE", True),
# 01011
(None, "VARCHAR(512)", None, 10, True, "IGNORE", False),
# 11011
("BIGINT(19)", "VARCHAR(512)", None, 10, True, "DEFAULT", True),
# 00111
(None, None, "MEDIUMTEXT", 10, True, "UPDATE", False),
# 10111
("BIGINT(19)", None, "MEDIUMTEXT", 10, True, "IGNORE", True),
# 01111
(None, "VARCHAR(512)", "MEDIUMTEXT", 10, True, "DEFAULT", False),
# 11111
("BIGINT(19)", "VARCHAR(512)", "MEDIUMTEXT", 10, True, "UPDATE", True),
],
)
def test_minimum_valid_parameters(
self,
cli_runner: CliRunner,
sqlite_database: str,
mysql_credentials: MySQLCredentials,
mysql_integer_type: t.Optional[str],
mysql_string_type: t.Optional[str],
mysql_text_type: t.Optional[str],
mysql_database: Engine,
chunk: t.Optional[int],
with_rowid: bool,
mysql_insert_method: str,
ignore_duplicate_keys: bool,
) -> None:
arguments: t.List[str] = [
"-f",
sqlite_database,
"-d",
mysql_credentials.database,
"-u",
mysql_credentials.user,
"--mysql-password",
mysql_credentials.password,
"-h",
mysql_credentials.host,
"-P",
str(mysql_credentials.port),
"--mysql-insert-method",
mysql_insert_method,
]
if mysql_integer_type:
arguments.append("--mysql-integer-type")
arguments.append(mysql_integer_type)
if mysql_string_type:
arguments.append("--mysql-string-type")
arguments.append(mysql_string_type)
if mysql_text_type:
arguments.append("--mysql-text-type")
arguments.append(mysql_text_type)
if chunk:
arguments.append("-c")
arguments.append(str(chunk))
if with_rowid:
arguments.append("--with-rowid")
if ignore_duplicate_keys:
arguments.append("--ignore-duplicate-keys")
result: Result = cli_runner.invoke(
sqlite3mysql,
arguments,
)
assert result.exit_code == 0
def test_keyboard_interrupt(
self,
cli_runner: CliRunner,
sqlite_database: str,
mysql_credentials: MySQLCredentials,
mocker: MockFixture,
) -> None:
mocker.patch.object(SQLite3toMySQL, "transfer", side_effect=KeyboardInterrupt())
result: Result = cli_runner.invoke(
sqlite3mysql,
[
"-f",
sqlite_database,
"-d",
mysql_credentials.database,
"-u",
mysql_credentials.user,
"--mysql-password",
mysql_credentials.password,
"-h",
mysql_credentials.host,
"-P",
str(mysql_credentials.port),
],
)
assert result.exit_code > 0
assert "Process interrupted" in result.output
def test_transfer_specific_tables_only(
self,
cli_runner: CliRunner,
sqlite_database: str,
mysql_credentials: MySQLCredentials,
) -> None:
sqlite_engine: Engine = create_engine(
f"sqlite:///{sqlite_database}",
json_serializer=json.dumps,
json_deserializer=json.loads,
)
sqlite_inspect: Inspector = inspect(sqlite_engine)
sqlite_tables: t.List[str] = sqlite_inspect.get_table_names()
table_number: int = choice(range(1, len(sqlite_tables)))
result: Result = cli_runner.invoke(
sqlite3mysql,
[
"-f",
sqlite_database,
"-t",
" ".join(sample(sqlite_tables, table_number)),
"-d",
mysql_credentials.database,
"-u",
mysql_credentials.user,
"--mysql-password",
mysql_credentials.password,
"-h",
mysql_credentials.host,
"-P",
str(mysql_credentials.port),
],
)
assert result.exit_code == 0
@pytest.mark.xfail
def test_version(self, cli_runner: CliRunner) -> None:
result: Result = cli_runner.invoke(sqlite3mysql, ["--version"])
assert result.exit_code == 0
assert all(
message in result.output
for message in {
"sqlite3-to-mysql",
"Operating",
"System",
"Python",
"MySQL",
"SQLite",
"click",
"mysql-connector-python",
"pytimeparse2",
"simplejson",
"tabulate",
"tqdm",
}
)
@pytest.mark.xfail
@pytest.mark.parametrize(
"mysql_integer_type, mysql_string_type, mysql_text_type, chunk, with_rowid",
[
# 00000
(None, None, None, None, False),
# 10000
("BIGINT(19)", None, None, None, False),
# 01000
(None, "VARCHAR(512)", None, None, False),
# 11000
("BIGINT(19)", "VARCHAR(512)", None, None, False),
# 00100
(None, None, "MEDIUMTEXT", None, False),
# 10100
("BIGINT(19)", None, "MEDIUMTEXT", None, False),
# 01100
(None, "VARCHAR(512)", "MEDIUMTEXT", None, False),
# 11100
("BIGINT(19)", "VARCHAR(512)", "MEDIUMTEXT", None, False),
# 00010
(None, None, None, 10, False),
# 10010
("BIGINT(19)", None, None, 10, False),
# 01010
(None, "VARCHAR(512)", None, 10, False),
# 11010
("BIGINT(19)", "VARCHAR(512)", None, 10, False),
# 00110
(None, None, "MEDIUMTEXT", 10, False),
# 10110
("BIGINT(19)", None, "MEDIUMTEXT", 10, False),
# 01110
(None, "VARCHAR(512)", "MEDIUMTEXT", 10, False),
# 11110
("BIGINT(19)", "VARCHAR(512)", "MEDIUMTEXT", 10, False),
# 00001
(None, None, None, None, True),
# 10001
("BIGINT(19)", None, None, None, True),
# 01001
(None, "VARCHAR(512)", None, None, True),
# 11001
("BIGINT(19)", "VARCHAR(512)", None, None, True),
# 00101
(None, None, "MEDIUMTEXT", None, True),
# 10101
("BIGINT(19)", None, "MEDIUMTEXT", None, True),
# 01101
(None, "VARCHAR(512)", "MEDIUMTEXT", None, True),
# 11101
("BIGINT(19)", "VARCHAR(512)", "MEDIUMTEXT", None, True),
# 00011
(None, None, None, 10, True),
# 10011
("BIGINT(19)", None, None, 10, True),
# 01011
(None, "VARCHAR(512)", None, 10, True),
# 11011
("BIGINT(19)", "VARCHAR(512)", None, 10, True),
# 00111
(None, None, "MEDIUMTEXT", 10, True),
# 10111
("BIGINT(19)", None, "MEDIUMTEXT", 10, True),
# 01111
(None, "VARCHAR(512)", "MEDIUMTEXT", 10, True),
# 11111
("BIGINT(19)", "VARCHAR(512)", "MEDIUMTEXT", 10, True),
],
)
def test_quiet(
self,
cli_runner: CliRunner,
mysql_database: Engine,
sqlite_database: str,
mysql_credentials: MySQLCredentials,
mysql_integer_type: t.Optional[str],
mysql_string_type: t.Optional[str],
mysql_text_type: t.Optional[str],
chunk: t.Optional[int],
with_rowid: bool,
) -> None:
arguments: t.List[str] = [
"-f",
sqlite_database,
"-d",
mysql_credentials.database,
"-u",
mysql_credentials.user,
"--mysql-password",
mysql_credentials.password,
"-h",
mysql_credentials.host,
"-P",
str(mysql_credentials.port),
"-q",
]
if mysql_integer_type:
arguments.append("--mysql-integer-type")
arguments.append(mysql_integer_type)
if mysql_string_type:
arguments.append("--mysql-string-type")
arguments.append(mysql_string_type)
if mysql_text_type:
arguments.append("--mysql-text-type")
arguments.append(mysql_text_type)
if chunk:
arguments.append("-c")
arguments.append(str(chunk))
if with_rowid:
arguments.append("--with-rowid")
result = cli_runner.invoke(
sqlite3mysql,
arguments,
)
print(result.output)
assert result.exit_code == 0
assert (
f"{sqlite3mysql.name} version {package_version} Copyright (c) 2018-{datetime.now().year} Klemen Tusar"
in result.output
)
0707010000002A000081A400000000000000000000000165A2FB8A00001904000000000000000000000000000000000000002700000000sqlite3-to-mysql-2.1.7/tests/models.pyimport typing as t
from datetime import date, datetime, time
from decimal import Decimal
from os import environ
import sqlalchemy.types as types
from sqlalchemy import (
BLOB,
CHAR,
DECIMAL,
JSON,
REAL,
TIMESTAMP,
VARCHAR,
BigInteger,
Column,
Dialect,
ForeignKey,
Integer,
SmallInteger,
String,
Table,
Text,
)
from sqlalchemy.orm import DeclarativeBase, Mapped, backref, mapped_column, relationship
from sqlalchemy.sql.functions import current_timestamp
class MyCustomType(types.TypeDecorator):
impl: t.Type[String] = types.String
def load_dialect_impl(self, dialect: Dialect) -> t.Any:
return dialect.type_descriptor(types.VARCHAR(self.length))
def process_bind_param(self, value: t.Any, dialect: Dialect) -> str:
return str(value)
def process_result_value(self, value: t.Any, dialect: Dialect) -> str:
return str(value)
class Base(DeclarativeBase):
pass
class Author(Base):
__tablename__ = "authors"
id: Mapped[int] = mapped_column(primary_key=True)
name: Mapped[str] = mapped_column(String(128), nullable=False, index=True)
def __repr__(self):
return f"<Author(id='{self.id}', name='{self.name}')>"
article_authors: Table = Table(
"article_authors",
Base.metadata,
Column("article_id", Integer, ForeignKey("articles.id"), primary_key=True),
Column("author_id", Integer, ForeignKey("authors.id"), primary_key=True),
)
class Image(Base):
__tablename__ = "images"
id: Mapped[int] = mapped_column(primary_key=True)
path: Mapped[str] = mapped_column(String(255), index=True)
description: Mapped[str] = mapped_column(String(255), nullable=True)
def __repr__(self):
return f"<Image(id='{self.id}', path='{self.path}')>"
article_images: Table = Table(
"article_images",
Base.metadata,
Column("article_id", Integer, ForeignKey("articles.id"), primary_key=True),
Column("image_id", Integer, ForeignKey("images.id"), primary_key=True),
)
class Tag(Base):
__tablename__ = "tags"
id: Mapped[int] = mapped_column(primary_key=True)
name: Mapped[str] = mapped_column(String(128), nullable=False, index=True)
def __repr__(self):
return f"<Tag(id='{self.id}', name='{self.name}')>"
article_tags = Table(
"article_tags",
Base.metadata,
Column("article_id", Integer, ForeignKey("articles.id"), primary_key=True),
Column("tag_id", Integer, ForeignKey("tags.id"), primary_key=True),
)
class Misc(Base):
"""This model contains all possible MySQL types"""
__tablename__ = "misc"
id: Mapped[int] = mapped_column(primary_key=True)
big_integer_field: Mapped[int] = mapped_column(BigInteger, default=0)
blob_field: Mapped[bytes] = mapped_column(BLOB, nullable=True, index=True)
boolean_field: Mapped[bool] = mapped_column(default=False)
char_field: Mapped[str] = mapped_column(CHAR(255), nullable=True)
date_field: Mapped[date] = mapped_column(nullable=True)
date_time_field: Mapped[datetime] = mapped_column(nullable=True)
decimal_field: Mapped[Decimal] = mapped_column(DECIMAL(10, 2), nullable=True)
float_field: Mapped[Decimal] = mapped_column(DECIMAL(12, 4), default=0)
integer_field: Mapped[int] = mapped_column(default=0)
if environ.get("LEGACY_DB", "0") == "0":
json_field: Mapped[t.Mapping[str, t.Any]] = mapped_column(JSON, nullable=True)
numeric_field: Mapped[Decimal] = mapped_column(DECIMAL(12, 4), default=0)
real_field: Mapped[float] = mapped_column(REAL(12), default=0)
small_integer_field: Mapped[int] = mapped_column(SmallInteger, default=0)
string_field: Mapped[str] = mapped_column(String(255), nullable=True)
text_field: Mapped[str] = mapped_column(Text, nullable=True)
time_field: Mapped[time] = mapped_column(nullable=True)
varchar_field: Mapped[str] = mapped_column(VARCHAR(255), nullable=True)
timestamp_field: Mapped[datetime] = mapped_column(TIMESTAMP, default=current_timestamp())
my_type_field: Mapped[t.Any] = mapped_column(MyCustomType(255), nullable=True)
article_misc: Table = Table(
"article_misc",
Base.metadata,
Column("article_id", Integer, ForeignKey("articles.id"), primary_key=True),
Column("misc_id", Integer, ForeignKey("misc.id"), primary_key=True),
)
class Media(Base):
__tablename__ = "media"
id: Mapped[str] = mapped_column(CHAR(64), primary_key=True)
title: Mapped[str] = mapped_column(String(255), index=True)
description: Mapped[str] = mapped_column(String(255), nullable=True)
def __repr__(self):
return f"<Media(id='{self.id}', title='{self.title}')>"
article_media = Table(
"article_media",
Base.metadata,
Column("article_id", Integer, ForeignKey("articles.id"), primary_key=True),
Column("media_id", CHAR(64), ForeignKey("media.id"), primary_key=True),
)
class Article(Base):
__tablename__ = "articles"
id: Mapped[int] = mapped_column(primary_key=True)
hash: Mapped[str] = mapped_column(String(32), unique=True)
slug: Mapped[str] = mapped_column(String(255), index=True)
title: Mapped[str] = mapped_column(String(255), index=True)
content: Mapped[str] = mapped_column(Text, nullable=True, index=True)
status: Mapped[str] = mapped_column(CHAR(1), index=True)
published: Mapped[datetime] = mapped_column(nullable=True)
# relationships
authors: Mapped[t.List[Author]] = relationship(
"Author",
secondary=article_authors,
backref=backref("authors", lazy="dynamic"),
lazy="dynamic",
)
tags: Mapped[t.List[Tag]] = relationship(
"Tag",
secondary=article_tags,
backref=backref("tags", lazy="dynamic"),
lazy="dynamic",
)
images: Mapped[t.List[Image]] = relationship(
"Image",
secondary=article_images,
backref=backref("images", lazy="dynamic"),
lazy="dynamic",
)
media: Mapped[t.List[Media]] = relationship(
"Media",
secondary=article_media,
backref=backref("media", lazy="dynamic"),
lazy="dynamic",
)
misc: Mapped[t.List[Misc]] = relationship(
"Misc",
secondary=article_misc,
backref=backref("misc", lazy="dynamic"),
lazy="dynamic",
)
def __repr__(self):
return f"<Article(id='{self.id}', title='{self.title}')>"
0707010000002B000041ED00000000000000000000000265A2FB8A00000000000000000000000000000000000000000000002200000000sqlite3-to-mysql-2.1.7/tests/unit0707010000002C000081A400000000000000000000000165A2FB8A00000000000000000000000000000000000000000000002E00000000sqlite3-to-mysql-2.1.7/tests/unit/__init__.py0707010000002D000081A400000000000000000000000165A2FB8A0000490E000000000000000000000000000000000000003B00000000sqlite3-to-mysql-2.1.7/tests/unit/sqlite3_to_mysql_test.pyimport logging
import re
import typing as t
from random import choice
import mysql.connector
import pytest
from _pytest.logging import LogCaptureFixture
from faker import Faker
from mysql.connector import errorcode
from pytest_mock import MockerFixture, MockFixture
from sqlalchemy import Connection, CursorResult, Engine, Inspector, TextClause, create_engine, inspect, text
from sqlalchemy.dialects.sqlite import __all__ as sqlite_column_types
from sqlite3_to_mysql import SQLite3toMySQL
from tests.conftest import MySQLCredentials
@pytest.mark.usefixtures("sqlite_database", "mysql_instance")
class TestSQLite3toMySQL:
@pytest.mark.parametrize("quiet", [False, True])
def test_translate_type_from_sqlite_to_mysql_invalid_column_type(
self,
sqlite_database: str,
mysql_database: Engine,
mysql_credentials: MySQLCredentials,
mocker: MockerFixture,
quiet: bool,
) -> None:
proc: SQLite3toMySQL = SQLite3toMySQL( # type: ignore[call-arg]
sqlite_file=sqlite_database,
mysql_user=mysql_credentials.user,
mysql_password=mysql_credentials.password,
mysql_host=mysql_credentials.host,
mysql_port=mysql_credentials.port,
mysql_database=mysql_credentials.database,
quiet=quiet,
)
with pytest.raises(ValueError) as excinfo:
mocker.patch.object(proc, "_valid_column_type", return_value=False)
proc._translate_type_from_sqlite_to_mysql("text")
assert "is not a valid column_type!" in str(excinfo.value)
@pytest.mark.parametrize(
"mysql_integer_type, mysql_string_type, mysql_text_type",
[
("INT(11)", "VARCHAR(300)", "TEXT"),
("BIGINT(19)", "TEXT", "MEDIUMTEXT"),
("BIGINT(19)", "MEDIUMTEXT", "TINYTEXT"),
("BIGINT(20) UNSIGNED", "CHAR(100)", "LONGTEXT"),
],
)
def test_translate_type_from_sqlite_to_mysql_all_valid_columns(
self,
sqlite_database: str,
mysql_database: Engine,
mysql_credentials: MySQLCredentials,
faker: Faker,
mysql_integer_type: str,
mysql_string_type: str,
mysql_text_type: str,
) -> None:
proc: SQLite3toMySQL = SQLite3toMySQL( # type: ignore
sqlite_file=sqlite_database,
mysql_user=mysql_credentials.user,
mysql_password=mysql_credentials.password,
mysql_host=mysql_credentials.host,
mysql_port=mysql_credentials.port,
mysql_database=mysql_credentials.database,
mysql_integer_type=mysql_integer_type,
mysql_string_type=mysql_string_type,
mysql_text_type=mysql_text_type,
)
for column in sqlite_column_types + ("INT64",):
if column in {"Insert", "insert", "dialect"}:
continue
elif column == "VARCHAR":
assert proc._translate_type_from_sqlite_to_mysql(column) == proc._mysql_string_type
elif column in {"INTEGER", "INT"}:
assert proc._translate_type_from_sqlite_to_mysql(column) == proc._mysql_integer_type
elif column in {"INT64", "NUMERIC"}:
assert proc._translate_type_from_sqlite_to_mysql(column) == "BIGINT(19)"
elif column in {"TINYTEXT", "TEXT", "MEDIUMTEXT", "LONGTEXT"}:
assert proc._translate_type_from_sqlite_to_mysql(column) == proc._mysql_text_type
elif column == "BOOLEAN":
assert proc._translate_type_from_sqlite_to_mysql(column) == "TINYINT(1)"
else:
assert proc._translate_type_from_sqlite_to_mysql(column) == column
assert proc._translate_type_from_sqlite_to_mysql("TEXT") == proc._mysql_text_type
assert proc._translate_type_from_sqlite_to_mysql("CLOB") == proc._mysql_text_type
assert proc._translate_type_from_sqlite_to_mysql("CHARACTER") == "CHAR"
length: int = faker.pyint(min_value=1, max_value=99)
assert proc._translate_type_from_sqlite_to_mysql(f"CHARACTER({length})") == f"CHAR({length})"
assert proc._translate_type_from_sqlite_to_mysql("NCHAR") == "CHAR"
length = faker.pyint(min_value=1, max_value=99)
assert proc._translate_type_from_sqlite_to_mysql(f"NCHAR({length})") == f"CHAR({length})"
assert proc._translate_type_from_sqlite_to_mysql("NATIVE CHARACTER") == "CHAR"
length = faker.pyint(min_value=1, max_value=99)
assert proc._translate_type_from_sqlite_to_mysql(f"NATIVE CHARACTER({length})") == f"CHAR({length})"
assert proc._translate_type_from_sqlite_to_mysql("VARCHAR") == proc._mysql_string_type
length = faker.pyint(min_value=1, max_value=255)
assert proc._translate_type_from_sqlite_to_mysql(f"VARCHAR({length})") == re.sub(
r"\d+", str(length), proc._mysql_string_type
)
assert proc._translate_type_from_sqlite_to_mysql("DOUBLE PRECISION") == "DOUBLE PRECISION"
assert proc._translate_type_from_sqlite_to_mysql("UNSIGNED BIG INT") == "BIGINT UNSIGNED"
length = faker.pyint(min_value=1000000000, max_value=99999999999999999999)
assert proc._translate_type_from_sqlite_to_mysql(f"UNSIGNED BIG INT({length})") == f"BIGINT({length}) UNSIGNED"
assert proc._translate_type_from_sqlite_to_mysql("INT1") == "TINYINT"
assert proc._translate_type_from_sqlite_to_mysql("INT2") == "SMALLINT"
assert proc._translate_type_from_sqlite_to_mysql("INT3") == "MEDIUMINT"
assert proc._translate_type_from_sqlite_to_mysql("INT4") == "INT"
assert proc._translate_type_from_sqlite_to_mysql("INT8") == "BIGINT"
length = faker.pyint(min_value=1, max_value=11)
assert proc._translate_type_from_sqlite_to_mysql(f"INT({length})") == re.sub(
r"\d+", str(length), proc._mysql_integer_type
)
for column in {"META", "FOO", "BAR"}:
assert proc._translate_type_from_sqlite_to_mysql(column) == proc._mysql_string_type
precision: int = faker.pyint(min_value=3, max_value=19)
scale: int = faker.pyint(min_value=0, max_value=precision - 1)
assert (
proc._translate_type_from_sqlite_to_mysql(f"DECIMAL({precision},{scale})")
== f"DECIMAL({precision},{scale})"
)
@pytest.mark.parametrize(
"sqlite_data_type, mysql_data_type",
[
("INT", "INT(11)"),
("INT(5)", "INT(5)"),
("INT UNSIGNED", "INT(11) UNSIGNED"),
("INT(5) UNSIGNED", "INT(5) UNSIGNED"),
("INTEGER", "INT(11)"),
("TINYINT", "TINYINT"),
("TINYINT UNSIGNED", "TINYINT UNSIGNED"),
("TINYINT(4)", "TINYINT(4)"),
("TINYINT(4) UNSIGNED", "TINYINT(4) UNSIGNED"),
("SMALLINT", "SMALLINT"),
("SMALLINT UNSIGNED", "SMALLINT UNSIGNED"),
("SMALLINT(6)", "SMALLINT(6)"),
("SMALLINT(6) UNSIGNED", "SMALLINT(6) UNSIGNED"),
("MEDIUMINT", "MEDIUMINT"),
("MEDIUMINT UNSIGNED", "MEDIUMINT UNSIGNED"),
("MEDIUMINT(9)", "MEDIUMINT(9)"),
("MEDIUMINT(9) UNSIGNED", "MEDIUMINT(9) UNSIGNED"),
("BIGINT", "BIGINT"),
("BIGINT UNSIGNED", "BIGINT UNSIGNED"),
("BIGINT(20)", "BIGINT(20)"),
("BIGINT(20) UNSIGNED", "BIGINT(20) UNSIGNED"),
("UNSIGNED BIG INT", "BIGINT UNSIGNED"),
("INT1", "TINYINT"),
("INT1 UNSIGNED", "TINYINT UNSIGNED"),
("INT1(3)", "TINYINT(3)"),
("INT1(3) UNSIGNED", "TINYINT(3) UNSIGNED"),
("INT2", "SMALLINT"),
("INT2 UNSIGNED", "SMALLINT UNSIGNED"),
("INT2(6)", "SMALLINT(6)"),
("INT2(6) UNSIGNED", "SMALLINT(6) UNSIGNED"),
("INT3", "MEDIUMINT"),
("INT3 UNSIGNED", "MEDIUMINT UNSIGNED"),
("INT3(9)", "MEDIUMINT(9)"),
("INT3(9) UNSIGNED", "MEDIUMINT(9) UNSIGNED"),
("INT4", "INT"),
("INT4 UNSIGNED", "INT UNSIGNED"),
("INT4(11)", "INT(11)"),
("INT4(11) UNSIGNED", "INT(11) UNSIGNED"),
("INT8", "BIGINT"),
("INT8 UNSIGNED", "BIGINT UNSIGNED"),
("INT8(19)", "BIGINT(19)"),
("INT8(19) UNSIGNED", "BIGINT(19) UNSIGNED"),
("NUMERIC", "BIGINT(19)"),
("DOUBLE", "DOUBLE"),
("DOUBLE UNSIGNED", "DOUBLE UNSIGNED"),
("DOUBLE(10,5)", "DOUBLE(10,5)"),
("DOUBLE(10,5) UNSIGNED", "DOUBLE(10,5) UNSIGNED"),
("DOUBLE PRECISION", "DOUBLE PRECISION"),
("DOUBLE PRECISION UNSIGNED", "DOUBLE PRECISION UNSIGNED"),
("DOUBLE PRECISION(10,5)", "DOUBLE PRECISION(10,5)"),
("DOUBLE PRECISION(10,5) UNSIGNED", "DOUBLE PRECISION(10,5) UNSIGNED"),
("DECIMAL", "DECIMAL"),
("DECIMAL UNSIGNED", "DECIMAL UNSIGNED"),
("DECIMAL(10,5)", "DECIMAL(10,5)"),
("DECIMAL(10,5) UNSIGNED", "DECIMAL(10,5) UNSIGNED"),
("REAL", "REAL"),
("REAL UNSIGNED", "REAL UNSIGNED"),
("REAL(10,5)", "REAL(10,5)"),
("REAL(10,5) UNSIGNED", "REAL(10,5) UNSIGNED"),
("FLOAT", "FLOAT"),
("FLOAT UNSIGNED", "FLOAT UNSIGNED"),
("FLOAT(10,5)", "FLOAT(10,5)"),
("FLOAT(10,5) UNSIGNED", "FLOAT(10,5) UNSIGNED"),
("DEC", "DEC"),
("DEC UNSIGNED", "DEC UNSIGNED"),
("DEC(10,5)", "DEC(10,5)"),
("DEC(10,5) UNSIGNED", "DEC(10,5) UNSIGNED"),
("FIXED", "FIXED"),
("FIXED UNSIGNED", "FIXED UNSIGNED"),
("FIXED(10,5)", "FIXED(10,5)"),
("FIXED(10,5) UNSIGNED", "FIXED(10,5) UNSIGNED"),
("BOOLEAN", "TINYINT(1)"),
("INT64", "BIGINT(19)"),
],
)
def test_translate_type_from_sqlite_to_mysql_all_valid_numeric_columns_signed_unsigned(
self,
sqlite_database: str,
mysql_database: Engine,
mysql_credentials: MySQLCredentials,
sqlite_data_type: str,
mysql_data_type: str,
) -> None:
proc: SQLite3toMySQL = SQLite3toMySQL( # type: ignore
sqlite_file=sqlite_database,
mysql_user=mysql_credentials.user,
mysql_password=mysql_credentials.password,
mysql_host=mysql_credentials.host,
mysql_port=mysql_credentials.port,
mysql_database=mysql_credentials.database,
)
assert proc._translate_type_from_sqlite_to_mysql(sqlite_data_type) == mysql_data_type
@pytest.mark.parametrize("quiet", [False, True])
def test_create_database_connection_error(
self,
sqlite_database: str,
mysql_database: Engine,
mysql_credentials: MySQLCredentials,
mocker: MockerFixture,
faker: Faker,
caplog: LogCaptureFixture,
quiet: bool,
) -> None:
proc: SQLite3toMySQL = SQLite3toMySQL( # type: ignore[call-arg]
sqlite_file=sqlite_database,
mysql_user=mysql_credentials.user,
mysql_password=mysql_credentials.password,
mysql_host=mysql_credentials.host,
mysql_port=mysql_credentials.port,
mysql_database=mysql_credentials.database,
quiet=quiet,
)
class FakeCursor:
def execute(self, statement: t.Any) -> None:
raise mysql.connector.Error(msg="Unknown MySQL error", errno=errorcode.CR_UNKNOWN_ERROR)
mocker.patch.object(proc, "_mysql_cur", FakeCursor())
with pytest.raises(mysql.connector.Error) as excinfo:
caplog.set_level(logging.DEBUG)
proc._create_database()
assert str(errorcode.CR_UNKNOWN_ERROR) in str(excinfo.value)
assert any(str(errorcode.CR_UNKNOWN_ERROR) in message for message in caplog.messages)
@pytest.mark.parametrize("quiet", [False, True])
def test_create_table_cursor_error(
self,
sqlite_database: str,
mysql_database: Engine,
mysql_credentials: MySQLCredentials,
mocker: MockerFixture,
faker: Faker,
caplog: LogCaptureFixture,
quiet: bool,
) -> None:
proc = SQLite3toMySQL( # type: ignore[call-arg]
sqlite_file=sqlite_database,
mysql_user=mysql_credentials.user,
mysql_password=mysql_credentials.password,
mysql_host=mysql_credentials.host,
mysql_port=mysql_credentials.port,
mysql_database=mysql_credentials.database,
quiet=quiet,
)
class FakeCursor:
def execute(self, statement):
raise mysql.connector.Error(msg="Unknown MySQL error", errno=errorcode.CR_UNKNOWN_ERROR)
mocker.patch.object(proc, "_mysql_cur", FakeCursor())
sqlite_engine: Engine = create_engine(f"sqlite:///{sqlite_database}")
sqlite_inspect: Inspector = inspect(sqlite_engine)
sqlite_tables: t.List[str] = sqlite_inspect.get_table_names()
with pytest.raises(mysql.connector.Error) as excinfo:
caplog.set_level(logging.DEBUG)
proc._create_table(choice(sqlite_tables))
assert str(errorcode.CR_UNKNOWN_ERROR) in str(excinfo.value)
assert any(str(errorcode.CR_UNKNOWN_ERROR) in message for message in caplog.messages)
sqlite_engine.dispose()
@pytest.mark.parametrize("quiet", [False, True])
def test_process_cursor_error(
self,
sqlite_database: str,
mysql_database: Engine,
mysql_credentials: MySQLCredentials,
mocker: MockerFixture,
faker: Faker,
caplog: LogCaptureFixture,
quiet: bool,
) -> None:
proc = SQLite3toMySQL( # type: ignore[call-arg]
sqlite_file=sqlite_database,
mysql_user=mysql_credentials.user,
mysql_password=mysql_credentials.password,
mysql_host=mysql_credentials.host,
mysql_port=mysql_credentials.port,
mysql_database=mysql_credentials.database,
quiet=quiet,
)
def fake_transfer_table_data(sql, total_records=0):
raise mysql.connector.Error(msg="Unknown MySQL error", errno=errorcode.CR_UNKNOWN_ERROR)
mocker.patch.object(proc, "_transfer_table_data", fake_transfer_table_data)
with pytest.raises(mysql.connector.Error) as excinfo:
caplog.set_level(logging.DEBUG)
proc.transfer()
assert str(errorcode.CR_UNKNOWN_ERROR) in str(excinfo.value)
assert any(str(errorcode.CR_UNKNOWN_ERROR) in message for message in caplog.messages)
@pytest.mark.parametrize("quiet", [False, True])
def test_add_indices_error(
self,
sqlite_database: str,
mysql_database: Engine,
mysql_credentials: MySQLCredentials,
mocker: MockerFixture,
faker: Faker,
caplog: LogCaptureFixture,
quiet: bool,
) -> None:
proc = SQLite3toMySQL( # type: ignore[call-arg]
sqlite_file=sqlite_database,
mysql_user=mysql_credentials.user,
mysql_password=mysql_credentials.password,
mysql_host=mysql_credentials.host,
mysql_port=mysql_credentials.port,
mysql_database=mysql_credentials.database,
quiet=quiet,
)
sqlite_engine: Engine = create_engine(f"sqlite:///{sqlite_database}")
sqlite_inspect: Inspector = inspect(sqlite_engine)
sqlite_tables: t.List[str] = sqlite_inspect.get_table_names()
tables_with_indices: t.List[str] = []
for table in sqlite_tables:
if sqlite_inspect.get_indexes(table):
tables_with_indices.append(table)
table_name: str = choice(tables_with_indices)
proc._create_table(table_name)
class FakeCursor:
def execute(self, statement):
raise mysql.connector.Error(msg="Unknown MySQL error", errno=errorcode.CR_UNKNOWN_ERROR)
mocker.patch.object(proc, "_mysql_cur", FakeCursor())
with pytest.raises(mysql.connector.Error) as excinfo:
caplog.set_level(logging.DEBUG)
proc._add_indices(table_name)
assert str(errorcode.CR_UNKNOWN_ERROR) in str(excinfo.value)
assert any(str(errorcode.CR_UNKNOWN_ERROR) in message for message in caplog.messages)
sqlite_engine.dispose()
@pytest.mark.parametrize("quiet", [False, True])
def test_add_foreign_keys_error(
self,
sqlite_database: str,
mysql_database: Engine,
mysql_credentials: MySQLCredentials,
mocker: MockFixture,
faker: Faker,
caplog: LogCaptureFixture,
quiet: bool,
) -> None:
proc = SQLite3toMySQL( # type: ignore[call-arg]
sqlite_file=sqlite_database,
mysql_user=mysql_credentials.user,
mysql_password=mysql_credentials.password,
mysql_host=mysql_credentials.host,
mysql_port=mysql_credentials.port,
mysql_database=mysql_credentials.database,
quiet=quiet,
)
sqlite_engine: Engine = create_engine(f"sqlite:///{sqlite_database}")
sqlite_inspect: Inspector = inspect(sqlite_engine)
sqlite_cnx: Connection = sqlite_engine.connect()
sqlite_tables: t.List[str] = sqlite_inspect.get_table_names()
tables_with_foreign_keys: t.List[str] = []
for table in sqlite_tables:
sqlite_fk_stmt: TextClause = text(f'PRAGMA foreign_key_list("{table}")')
sqlite_fk_result: CursorResult[t.Any] = sqlite_cnx.execute(sqlite_fk_stmt)
if sqlite_fk_result.returns_rows:
for _ in sqlite_fk_result:
tables_with_foreign_keys.append(table)
break
table_name: str = choice(tables_with_foreign_keys)
proc._create_table(table_name)
class FakeCursor:
def execute(self, statement):
raise mysql.connector.Error(msg="Unknown MySQL error", errno=errorcode.CR_UNKNOWN_ERROR)
mocker.patch.object(proc, "_mysql_cur", FakeCursor())
with pytest.raises(mysql.connector.Error) as excinfo:
caplog.set_level(logging.DEBUG)
proc._add_foreign_keys(table_name)
assert str(errorcode.CR_UNKNOWN_ERROR) in str(excinfo.value)
assert any(str(errorcode.CR_UNKNOWN_ERROR) in message for message in caplog.messages)
sqlite_cnx.close()
sqlite_engine.dispose()
0707010000002E000081A400000000000000000000000165A2FB8A00000811000000000000000000000000000000000000001F00000000sqlite3-to-mysql-2.1.7/tox.ini[tox]
isolated_build = true
envlist =
python3.8,
python3.9,
python3.10,
python3.11,
python3.12,
black,
flake8,
linters,
skip_missing_interpreters = true
[gh-actions]
python =
3.8: python3.8
3.9: python3.9
3.10: python3.10
3.11: python3.11
3.12: python3.12
[testenv]
passenv =
LANG
LEGACY_DB
deps =
-rrequirements_dev.txt
commands =
pytest -v --cov=sqlite3_to_mysql --cov-report=xml
[testenv:black]
basepython = python3
skip_install = true
deps =
black
commands =
black sqlite3_to_mysql tests/
[testenv:isort]
basepython = python3
skip_install = true
deps =
isort
commands =
isort --check-only --diff .
[testenv:flake8]
basepython = python3
skip_install = true
deps =
flake8
flake8-colors
flake8-docstrings
flake8-import-order
flake8-typing-imports
pep8-naming
commands = flake8 sqlite3_to_mysql
[testenv:pylint]
basepython = python3
skip_install = true
deps =
pylint
-rrequirements_dev.txt
commands =
pylint --rcfile=tox.ini sqlite3_to_mysql
[testenv:bandit]
basepython = python3
skip_install = true
deps =
bandit
commands =
bandit -r sqlite3_to_mysql -c .bandit.yml
[testenv:mypy]
basepython = python3
skip_install = true
deps =
mypy>=1.3.0
-rrequirements_dev.txt
commands =
mypy sqlite3_to_mysql --enable-incomplete-feature=Unpack
[testenv:linters]
basepython = python3
skip_install = true
deps =
{[testenv:black]deps}
{[testenv:isort]deps}
{[testenv:flake8]deps}
{[testenv:pylint]deps}
{[testenv:bandit]deps}
{[testenv:mypy]deps}
commands =
{[testenv:black]commands}
{[testenv:isort]commands}
{[testenv:flake8]commands}
{[testenv:pylint]commands}
{[testenv:bandit]commands}
{[testenv:mypy]commands}
[flake8]
ignore = I100,I201,I202,D203,D401,W503,E203,F401,F403,C901,E501
exclude =
*__init__.py
.tox
max-complexity = 10
max-line-length = 88
import-order-style = pycharm
application-import-names = flake8
[pylint]
disable = C0209,C0301,C0411,R,W0107,W0622
07070100000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000B00000000TRAILER!!!405 blocks