pax_global_header00006660000000000000000000000064146130213150014506gustar00rootroot0000000000000052 comment=9f0f5629aad757dbaf700179ddd663e189220742 mssql-django-1.5/000077500000000000000000000000001461302131500137525ustar00rootroot00000000000000mssql-django-1.5/.editorconfig000066400000000000000000000003461461302131500164320ustar00rootroot00000000000000# https://editorconfig.org/ root = true [*] indent_style = space indent_size = 4 insert_final_newline = true trim_trailing_whitespace = true end_of_line = lf charset = utf-8 max_line_length = 119 [*.{yml,yaml}] indent_size = 2 mssql-django-1.5/.github/000077500000000000000000000000001461302131500153125ustar00rootroot00000000000000mssql-django-1.5/.github/ISSUE_TEMPLATE/000077500000000000000000000000001461302131500174755ustar00rootroot00000000000000mssql-django-1.5/.github/ISSUE_TEMPLATE/bug_report.md000066400000000000000000000017361461302131500221760ustar00rootroot00000000000000--- name: Bug report about: Create a report to help us improve title: '' labels: '' assignees: '' --- There are some features which are not supported yet. Please check the [Limitations](https://github.com/microsoft/mssql-django#limitations) first to see if your bug is listed. **Software versions** * Django: * mssql-django: * python: * SQL Server: * OS: **Table schema and Model** **Database Connection Settings** ` // Paste your database settings from Settings.py here. ` **Problem description and steps to reproduce** **Expected behavior and actual behavior** **Error message/stack trace** **Any other details that can be helpful** mssql-django-1.5/.github/ISSUE_TEMPLATE/feature_request.md000066400000000000000000000014701461302131500232240ustar00rootroot00000000000000--- name: Feature Request about: Suggest an idea for this project title: "[FEATURE REQUEST]" labels: enhancement assignees: '' --- **Is your feature request related to a problem? If so, please give a short summary of the problem and how the feature would resolve it** **Describe the preferred solution** **Describe alternatives you've considered** **Additional context** **Reference Documentations/Specifications** mssql-django-1.5/.github/ISSUE_TEMPLATE/question.md000066400000000000000000000005011461302131500216620ustar00rootroot00000000000000--- name: Question about: Ask a question title: "[QUESTION]" labels: question assignees: '' --- **Question** **Relevant Issues and Pull Requests** mssql-django-1.5/.github/workflows/000077500000000000000000000000001461302131500173475ustar00rootroot00000000000000mssql-django-1.5/.github/workflows/codeql-analysis.yml000066400000000000000000000044631461302131500231710ustar00rootroot00000000000000# For most projects, this workflow file will not need changing; you simply need # to commit it to your repository. # # You may wish to alter this file to override the set of languages analyzed, # or to provide custom queries or build logic. # # ******** NOTE ******** # We have attempted to detect the languages in your repository. Please check # the `language` matrix defined below to confirm you have the correct set of # supported CodeQL languages. # name: "CodeQL" on: push: branches: [ dev ] pull_request: # The branches below must be a subset of the branches above branches: [ dev ] schedule: - cron: '40 13 * * 3' jobs: analyze: name: Analyze runs-on: ubuntu-latest strategy: fail-fast: false matrix: language: [ 'python' ] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] # Learn more: # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed steps: - name: Checkout repository uses: actions/checkout@v3 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. # queries: ./path/to/local/query, your-org/your-repo/queries@main # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild uses: github/codeql-action/autobuild@v3 # â„šī¸ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl # âœī¸ If the Autobuild fails above, remove it and uncomment the following three lines # and modify them (or add more) to build your code if your project # uses a compiled language #- run: | # make bootstrap # make release - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v3 mssql-django-1.5/.github/workflows/devskim.yml000066400000000000000000000014471461302131500215420ustar00rootroot00000000000000# This workflow uses actions that are not certified by GitHub. # They are provided by a third-party and are governed by # separate terms of service, privacy policy, and support # documentation. name: DevSkim on: push: branches: [ dev, master ] pull_request: branches: [ dev ] schedule: - cron: '29 14 * * 3' jobs: lint: name: DevSkim runs-on: ubuntu-20.04 permissions: actions: read contents: read security-events: write steps: - name: Checkout code uses: actions/checkout@v3 - name: Run DevSkim scanner uses: microsoft/DevSkim-Action@v1 - name: Upload DevSkim scan results to GitHub Security tab uses: github/codeql-action/upload-sarif@v2 with: sarif_file: devskim-results.sarif mssql-django-1.5/.gitignore000066400000000000000000000001701461302131500157400ustar00rootroot00000000000000*.py[co] *.sw[a-z] *.orig *~ .DS_Store Thumbs.db *.egg-info *.dll tests/local_settings.py # Virtual Env /venv/ .idea/ mssql-django-1.5/CODE_OF_CONDUCT.md000066400000000000000000000006731461302131500165570ustar00rootroot00000000000000# Microsoft Open Source Code of Conduct This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). Resources: - [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/) - [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) - Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concernsmssql-django-1.5/CONTRIBUTING.md000066400000000000000000000037251461302131500162120ustar00rootroot00000000000000# Contributing ## How to contribute ### Run unit tests After changes made to the project, it's a good idea to run the unit tests before making a pull request. 1. **Run SQL Server** Make sure you have SQL Server running in your local machine. Download and install SQL Server [here](https://www.microsoft.com/en-us/sql-server/sql-server-downloads), or you could use docker. Change `testapp/settings.py` to match your SQL Server login username and password. ``` docker run -e 'ACCEPT_EULA=Y' -e 'SA_PASSWORD=Placeholder' -p 1433:1433 -d mcr.microsoft.com/mssql/server:2019-latest ``` 2. **Clone Django** In `mssql-django` folder. ``` # Install your local mssql-django pip install -e . # The unit test suite are in `Django` folder, so we need to clone it git clone https://github.com/django/django.git --depth 1 ``` 3. **Install Tox** ``` # we use `tox` to run tests and install dependencies pip install tox ``` 4. **Run Tox** ``` # eg. run django 3.1 tests with Python 3.7 tox -e py37-django31 ``` --- This project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com. When you submit a pull request, a CLA bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repos using our CLA. This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. mssql-django-1.5/CodeQL.yml000066400000000000000000000000531461302131500156020ustar00rootroot00000000000000path_classifiers: library: - "django"mssql-django-1.5/LICENSE.txt000066400000000000000000000032111461302131500155720ustar00rootroot00000000000000Project Name: mssql-django BSD 3-Clause License Copyright (c) 2021, Microsoft Corporation 2019, ES Solutions AB 2018, Michiya Takahashi 2008, 2009 django-pyodbc developers All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. mssql-django-1.5/MANIFEST.in000066400000000000000000000001621461302131500155070ustar00rootroot00000000000000include LICENSE.txt include MANIFEST.in include README.md recursive-include mssql *.py recursive-exclude docker * mssql-django-1.5/NOTICE.md000066400000000000000000000033771461302131500152670ustar00rootroot00000000000000# Notices This repository incorporates material as listed below or described in the code. ## django-mssql-backend Please see below for the associated license for the incorporated material from django-mssql-backend (https://github.com/ESSolutions/django-mssql-backend). ### BSD 3-Clause License Copyright (c) 2019, ES Solutions AB All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. mssql-django-1.5/README.md000066400000000000000000000246011461302131500152340ustar00rootroot00000000000000# Microsoft Django backend for SQL Server Welcome to the MSSQL-Django 3rd party backend project! *mssql-django* is a fork of [django-mssql-backend](https://pypi.org/project/django-mssql-backend/). This project provides an enterprise database connectivity option for the Django Web Framework, with support for Microsoft SQL Server and Azure SQL Database. We'd like to give thanks to the community that made this project possible, with particular recognition of the contributors: OskarPersson, michiya, dlo and the original Google Code django-pyodbc team. Moving forward we encourage partipation in this project from both old and new contributors! We hope you enjoy using the MSSQL-Django 3rd party backend. ## Features - Supports Django 3.2, 4.0, 4.1, 4.2 and 5.0 - Tested on Microsoft SQL Server 2016, 2017, 2019, 2022 - Passes most of the tests of the Django test suite - Compatible with [Micosoft ODBC Driver for SQL Server](https://docs.microsoft.com/en-us/sql/connect/odbc/microsoft-odbc-driver-for-sql-server), [SQL Server Native Client](https://msdn.microsoft.com/en-us/library/ms131321(v=sql.120).aspx), and [FreeTDS](https://www.freetds.org/) ODBC drivers ## Dependencies - pyodbc 3.0 or newer ## Installation 1. Install pyodbc 3.0 (or newer) and Django 2. Install mssql-django: pip install mssql-django 3. Set the `ENGINE` setting in the `settings.py` file used by your Django application or project to `'mssql'`: 'ENGINE': 'mssql' ## Configuration ### Standard Django settings The following entries in a database-level settings dictionary in DATABASES control the behavior of the backend: - ENGINE String. It must be `"mssql"`. - NAME String. Database name. Required. - HOST String. SQL Server instance in `"server\instance"` format. - PORT String. Server instance port. An empty string means the default port. - USER String. Database user name in `"user"` format. If not given then MS Integrated Security will be used. - PASSWORD String. Database user password. - TOKEN String. Access token fetched as a user or service principal which has access to the database. E.g. when using `azure.identity`, the result of `DefaultAzureCredential().get_token('https://database.windows.net/.default')` can be passed. - AUTOCOMMIT Boolean. Set this to `False` if you want to disable Django's transaction management and implement your own. - Trusted_Connection String. Default is `"yes"`. Can be set to `"no"` if required. and the following entries are also available in the `TEST` dictionary for any given database-level settings dictionary: - NAME String. The name of database to use when running the test suite. If the default value (`None`) is used, the test database will use the name `"test_" + NAME`. - COLLATION String. The collation order to use when creating the test database. If the default value (`None`) is used, the test database is assigned the default collation of the instance of SQL Server. - DEPENDENCIES String. The creation-order dependencies of the database. See the official Django documentation for more details. - MIRROR String. The alias of the database that this database should mirror during testing. Default value is `None`. See the official Django documentation for more details. ### OPTIONS Dictionary. Current available keys are: - driver String. ODBC Driver to use (`"ODBC Driver 17 for SQL Server"`, `"SQL Server Native Client 11.0"`, `"FreeTDS"` etc). Default is `"ODBC Driver 17 for SQL Server"`. - isolation_level String. Sets [transaction isolation level](https://docs.microsoft.com/en-us/sql/t-sql/statements/set-transaction-isolation-level-transact-sql) for each database session. Valid values for this entry are `READ UNCOMMITTED`, `READ COMMITTED`, `REPEATABLE READ`, `SNAPSHOT`, and `SERIALIZABLE`. Default is `None` which means no isolation level is set to a database session and SQL Server default will be used. - dsn String. A named DSN can be used instead of `HOST`. - host_is_server Boolean. Only relevant if using the FreeTDS ODBC driver under Unix/Linux. By default, when using the FreeTDS ODBC driver the value specified in the ``HOST`` setting is used in a ``SERVERNAME`` ODBC connection string component instead of being used in a ``SERVER`` component; this means that this value should be the name of a *dataserver* definition present in the ``freetds.conf`` FreeTDS configuration file instead of a hostname or an IP address. But if this option is present and its value is ``True``, this special behavior is turned off. Instead, connections to the database server will be established using ``HOST`` and ``PORT`` options, without requiring ``freetds.conf`` to be configured. See https://www.freetds.org/userguide/dsnless.html for more information. - unicode_results Boolean. If it is set to ``True``, pyodbc's *unicode_results* feature is activated and strings returned from pyodbc are always Unicode. Default value is ``False``. - extra_params String. Additional parameters for the ODBC connection. The format is ``"param=value;param=value"``, [Azure AD Authentication](https://github.com/microsoft/mssql-django/wiki/Azure-AD-Authentication) (Service Principal, Interactive, Msi) can be added to this field. - collation String. Name of the collation to use when performing text field lookups against the database. Default is ``None``; this means no collation specifier is added to your lookup SQL (the default collation of your database will be used). For Chinese language you can set it to ``"Chinese_PRC_CI_AS"``. - connection_timeout Integer. Sets the timeout in seconds for the database connection process. Default value is ``0`` which disables the timeout. - connection_retries Integer. Sets the times to retry the database connection process. Default value is ``5``. - connection_retry_backoff_time Integer. Sets the back off time in seconds for reries of the database connection process. Default value is ``5``. - query_timeout Integer. Sets the timeout in seconds for the database query. Default value is ``0`` which disables the timeout. - [setencoding](https://github.com/mkleehammer/pyodbc/wiki/Connection#setencoding) and [setdecoding](https://github.com/mkleehammer/pyodbc/wiki/Connection#setdecoding) ```python # Example "OPTIONS": { "setdecoding": [ {"sqltype": pyodbc.SQL_CHAR, "encoding": 'utf-8'}, {"sqltype": pyodbc.SQL_WCHAR, "encoding": 'utf-8'}], "setencoding": [ {"encoding": "utf-8"}], ... }, ``` - return_rows_bulk_insert Boolean. Sets if backend can return rows from bulk insert. Default value is False which doesn't allows for the backend to return rows from bulk insert. Must be set to False if database has tables with triggers to prevent errors when inserting. ```python # Examples "OPTIONS": { # This database doesn't have any triggers so can use return # rows from bulk insert feature "return_rows_bulk_insert": True } "OPTIONS": { # This database has triggers so leave return_rows_bulk_insert as blank (False) # to prevent errors related to inserting and returning rows from bulk insert } ``` ### Backend-specific settings The following project-level settings also control the behavior of the backend: - DATABASE_CONNECTION_POOLING Boolean. If it is set to ``False``, pyodbc's connection pooling feature won't be activated. ### Example Here is an example of the database settings: ```python DATABASES = { 'default': { 'ENGINE': 'mssql', 'NAME': 'mydb', 'USER': 'user@myserver', 'PASSWORD': 'password', 'HOST': 'myserver.database.windows.net', 'PORT': '', 'OPTIONS': { 'driver': 'ODBC Driver 17 for SQL Server', }, }, } # set this to False if you want to turn off pyodbc's connection pooling DATABASE_CONNECTION_POOLING = False ``` ## Limitations The following features are currently not fully supported: - Altering a model field from or to AutoField at migration - Django annotate functions have floating point arithmetic problems in some cases - Annotate function with exists - Exists function in order_by - Righthand power and arithmetic with datatimes - Timezones, timedeltas not fully supported - Rename field/model with foreign key constraint - Database level constraints - Filtered index - Date extract function - Bulk insert into a table with a trigger and returning the rows inserted JSONField lookups have limitations, more details [here](https://github.com/microsoft/mssql-django/wiki/JSONField). ## Contributing More details on contributing can be found [here](CONTRIBUTING.md). This project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com. When you submit a pull request, a CLA bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repos using our CLA. This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. ## Security Reporting Instructions For security reporting instructions please refer to the [`SECURITY.md`](SECURITY.md) file in this repository. ## Trademarks This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft trademarks or logos is subject to and must follow [Microsoft's Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks/usage/general). Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship. Any use of third-party trademarks or logos are subject to those third-party's policies. mssql-django-1.5/SECURITY.md000066400000000000000000000052501461302131500155450ustar00rootroot00000000000000# Security Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/). If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://docs.microsoft.com/en-us/previous-versions/tn-archive/cc751383(v=technet.10)), please report it to us as described below. ## Reporting Security Issues **Please do not report security vulnerabilities through public GitHub issues.** Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://msrc.microsoft.com/create-report). If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/en-us/msrc/pgp-key-msrc). You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc). Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) * Full paths of source file(s) related to the manifestation of the issue * The location of the affected source code (tag/branch/commit or direct URL) * Any special configuration required to reproduce the issue * Step-by-step instructions to reproduce the issue * Proof-of-concept or exploit code (if possible) * Impact of the issue, including how an attacker might exploit the issue This information will help us triage your report more quickly. If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://microsoft.com/msrc/bounty) page for more details about our active programs. ## Preferred Languages We prefer all communications to be in English. ## Policy Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://www.microsoft.com/en-us/msrc/cvd). mssql-django-1.5/SUPPORT.md000066400000000000000000000005541461302131500154540ustar00rootroot00000000000000# Support ## How to file issues and get help This project uses GitHub Issues to track bugs and feature requests. Please search the existing issues before filing new issues to avoid duplicates. For new issues, file your bug or feature request as a new Issue. ## Microsoft Support Policy Support for this project is limited to the resources listed above. mssql-django-1.5/azure-pipelines.yml000066400000000000000000000166101461302131500176150ustar00rootroot00000000000000trigger: - master - dev - 1ES schedules: - cron: "0 9 * * *" displayName: Daily midnight build branches: include: - dev always: true variables: - group: DjangoTestApp jobs: - job: Windows pool: name: Django-1ES-pool demands: - imageOverride -equals JDBC-MMS2019-SQL2019-2 timeoutInMinutes: 120 strategy: matrix: Python3.12 - Django 5.0: python.version: '3.12' tox.env: 'py312-django50' Python3.11 - Django 5.0: python.version: '3.11' tox.env: 'py311-django50' Python3.10 - Django 5.0: python.version: '3.10' tox.env: 'py310-django50' Python3.11 - Django 4.2: python.version: '3.11' tox.env: 'py311-django42' Python3.10 - Django 4.2: python.version: '3.10' tox.env: 'py310-django42' Python 3.9 - Django 4.2: python.version: '3.9' tox.env: 'py39-django42' Python 3.8 - Django 4.2: python.version: '3.8' tox.env: 'py38-django42' Python3.11 - Django 4.1: python.version: '3.11' tox.env: 'py311-django41' Python3.10 - Django 4.1: python.version: '3.10' tox.env: 'py310-django41' Python 3.9 - Django 4.1: python.version: '3.9' tox.env: 'py39-django41' Python 3.8 - Django 4.1: python.version: '3.8' tox.env: 'py38-django41' Python3.11 - Django 4.0: python.version: '3.11' tox.env: 'py311-django40' Python3.10 - Django 4.0: python.version: '3.10' tox.env: 'py310-django40' Python 3.9 - Django 4.0: python.version: '3.9' tox.env: 'py39-django40' Python 3.8 - Django 4.0: python.version: '3.8' tox.env: 'py38-django40' Python3.11 - Django 3.2: python.version: '3.11' tox.env: 'py311-django32' Python 3.9 - Django 3.2: python.version: '3.9' tox.env: 'py39-django32' Python 3.8 - Django 3.2: python.version: '3.8' tox.env: 'py38-django32' steps: - task: CredScan@3 inputs: toolMajorVersion: 'V2' - task: UsePythonVersion@0 inputs: versionSpec: "$(python.version)" displayName: Use Python $(python.version) - powershell: | $IP=Get-NetIPAddress -AddressFamily IPv4 -InterfaceIndex $(Get-NetConnectionProfile -IPv4Connectivity Internet | Select-Object -ExpandProperty InterfaceIndex) | Select-Object -ExpandProperty IPAddress (Get-Content $pwd/testapp/settings.py).replace('localhost', $IP) | Set-Content $pwd/testapp/settings.py Invoke-WebRequest https://download.microsoft.com/download/6/f/f/6ffefc73-39ab-4cc0-bb7c-4093d64c2669/en-US/17.10.5.1/x64/msodbcsql.msi -OutFile msodbcsql.msi msiexec /quiet /passive /qn /i msodbcsql.msi IACCEPTMSODBCSQLLICENSETERMS=YES Get-OdbcDriver displayName: Install ODBC - powershell: | Import-Module "sqlps" Invoke-Sqlcmd @" EXEC xp_instance_regwrite N'HKEY_LOCAL_MACHINE', N'Software\Microsoft\MSSQLServer\MSSQLServer', N'LoginMode', REG_DWORD, 2 ALTER LOGIN [sa] ENABLE; ALTER LOGIN [sa] WITH PASSWORD = '$(TestAppPassword)', CHECK_POLICY=OFF; "@ displayName: Set up SQL Server - powershell: | Restart-Service -Name MSSQLSERVER -Force displayName: Restart SQL Server - powershell: | (Get-Content -ReadCount 0 testapp\settings.py) -replace 'MyPassword42', '$(TestAppPassWord)' | Set-Content testapp\settings.py displayName: Change PASSWORD in settings.py - powershell: | python -m pip install --upgrade pip wheel setuptools python -m pip install tox git clone https://github.com/django/django.git python -m tox -e $(tox.env) displayName: Run tox - job: Linux pool: name: Django-1ES-pool demands: - imageOverride -equals Ubuntu22.04-AzurePipelines timeoutInMinutes: 120 strategy: matrix: Python3.12 - Django 5.0: python.version: '3.12' tox.env: 'py312-django50' Python3.11 - Django 5.0: python.version: '3.11' tox.env: 'py311-django50' Python3.10 - Django 5.0: python.version: '3.10' tox.env: 'py310-django50' Python3.11 - Django 4.2: python.version: '3.11' tox.env: 'py311-django42' Python3.10 - Django 4.2: python.version: '3.10' tox.env: 'py310-django42' Python 3.9 - Django 4.2: python.version: '3.9' tox.env: 'py39-django42' Python 3.8 - Django 4.2: python.version: '3.8' tox.env: 'py38-django42' Python3.11 - Django 4.1: python.version: '3.11' tox.env: 'py311-django41' Python3.10 - Django 4.1: python.version: '3.10' tox.env: 'py310-django41' Python 3.9 - Django 4.1: python.version: '3.9' tox.env: 'py39-django41' Python 3.8 - Django 4.1: python.version: '3.8' tox.env: 'py38-django41' Python3.11 - Django 4.0: python.version: '3.11' tox.env: 'py311-django40' Python3.10 - Django 4.0: python.version: '3.10' tox.env: 'py310-django40' Python 3.9 - Django 4.0: python.version: '3.9' tox.env: 'py39-django40' Python 3.8 - Django 4.0: python.version: '3.8' tox.env: 'py38-django40' Python3.11 - Django 3.2: python.version: '3.11' tox.env: 'py311-django32' Python 3.9 - Django 3.2: python.version: '3.9' tox.env: 'py39-django32' Python 3.8 - Django 3.2: python.version: '3.8' tox.env: 'py38-django32' steps: - task: UsePythonVersion@0 inputs: versionSpec: "$(python.version)" displayName: Use Python $(python.version) - script: | docker version docker pull mcr.microsoft.com/mssql/server:2022-latest docker run -e 'ACCEPT_EULA=Y' -e 'SA_PASSWORD=$(TestAppPassword)' -p 1433:1433 -d mcr.microsoft.com/mssql/server:2022-latest curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add - curl https://packages.microsoft.com/config/ubuntu/22.04/prod.list | sudo tee /etc/apt/sources.list.d/mssql-release.list displayName: Install SQL Server - script: | python -m pip install --upgrade pip wheel setuptools pip install tox git clone https://github.com/django/django.git displayName: Install requirements - script: | sed -i 's/MyPassword42/$(TestAppPassword)/g' testapp/settings.py displayName: Change PASSWORD in settings.py - script: tox -e $(tox.env) displayName: Run tox - task: PublishCodeCoverageResults@1 inputs: codeCoverageTool: 'Cobertura' summaryFileLocation: 'django/coverage.xml' - task: PublishTestResults@2 displayName: Publish test results via jUnit inputs: testResultsFormat: 'JUnit' testResultsFiles: 'django/result.xml' testRunTitle: 'junit-$(Agent.OS)-$(Agent.OSArchitecture)-$(tox.env)' mssql-django-1.5/manage.py000077500000000000000000000005051461302131500155570ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testapp.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv) mssql-django-1.5/mssql/000077500000000000000000000000001461302131500151115ustar00rootroot00000000000000mssql-django-1.5/mssql/__init__.py000066400000000000000000000001511461302131500172170ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import mssql.functions # noqa mssql-django-1.5/mssql/base.py000066400000000000000000000664401461302131500164070ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. """ MS SQL Server database backend for Django. """ import os import re import time import struct import datetime from decimal import Decimal from uuid import UUID from django.core.exceptions import ImproperlyConfigured from django.utils.functional import cached_property try: import pyodbc as Database except ImportError as e: raise ImproperlyConfigured("Error loading pyodbc module: %s" % e) from django.utils.version import get_version_tuple # noqa pyodbc_ver = get_version_tuple(Database.version) if pyodbc_ver < (3, 0): raise ImproperlyConfigured("pyodbc 3.0 or newer is required; you have %s" % Database.version) from django.conf import settings # noqa from django.db import NotSupportedError # noqa from django.db.backends.base.base import BaseDatabaseWrapper # noqa from django.utils.encoding import smart_str # noqa from django.utils.functional import cached_property # noqa if hasattr(settings, 'DATABASE_CONNECTION_POOLING'): if not settings.DATABASE_CONNECTION_POOLING: Database.pooling = False from .client import DatabaseClient # noqa from .creation import DatabaseCreation # noqa from .features import DatabaseFeatures # noqa from .introspection import DatabaseIntrospection, SQL_TIMESTAMP_WITH_TIMEZONE # noqa from .operations import DatabaseOperations # noqa from .schema import DatabaseSchemaEditor # noqa EDITION_AZURE_SQL_DB = 5 EDITION_AZURE_SQL_MANAGED_INSTANCE = 8 def encode_connection_string(fields): """Encode dictionary of keys and values as an ODBC connection String. See [MS-ODBCSTR] document: https://msdn.microsoft.com/en-us/library/ee208909%28v=sql.105%29.aspx """ # As the keys are all provided by us, don't need to encode them as we know # they are ok. return ';'.join( '%s=%s' % (k, encode_value(v)) for k, v in fields.items() ) def prepare_token_for_odbc(token): """ Will prepare token for passing it to the odbc driver, as it expects bytes and not a string :param token: :return: packed binary byte representation of token string """ if not isinstance(token, str): raise TypeError("Invalid token format provided.") tokenstr = token.encode() exptoken = b"" for i in tokenstr: exptoken += bytes({i}) exptoken += bytes(1) return struct.pack("=i", len(exptoken)) + exptoken def encode_value(v): """If the value contains a semicolon, or starts with a left curly brace, then enclose it in curly braces and escape all right curly braces. """ if ';' in v or v.strip(' ').startswith('{'): return '{%s}' % (v.replace('}', '}}'),) return v def handle_datetimeoffset(dto_value): # Decode bytes returned from SQL Server # source: https://github.com/mkleehammer/pyodbc/wiki/Using-an-Output-Converter-function tup = struct.unpack("<6hI2h", dto_value) # e.g., (2017, 3, 16, 10, 35, 18, 500000000) return datetime.datetime(tup[0], tup[1], tup[2], tup[3], tup[4], tup[5], tup[6] // 1000) class DatabaseWrapper(BaseDatabaseWrapper): vendor = 'microsoft' display_name = 'SQL Server' # This dictionary maps Field objects to their associated MS SQL column # types, as strings. Column-type strings can contain format strings; they'll # be interpolated against the values of Field.__dict__ before being output. # If a column type is set to None, it won't be included in the output. data_types = { 'AutoField': 'int', 'BigAutoField': 'bigint', 'BigIntegerField': 'bigint', 'BinaryField': 'varbinary(%(max_length)s)', 'BooleanField': 'bit', 'CharField': 'nvarchar(%(max_length)s)', 'DateField': 'date', 'DateTimeField': 'datetime2', 'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)', 'DurationField': 'bigint', 'FileField': 'nvarchar(%(max_length)s)', 'FilePathField': 'nvarchar(%(max_length)s)', 'FloatField': 'double precision', 'IntegerField': 'int', 'IPAddressField': 'nvarchar(15)', 'GenericIPAddressField': 'nvarchar(39)', 'JSONField': 'nvarchar(max)', 'NullBooleanField': 'bit', 'OneToOneField': 'int', 'PositiveIntegerField': 'int', 'PositiveSmallIntegerField': 'smallint', 'PositiveBigIntegerField' : 'bigint', 'SlugField': 'nvarchar(%(max_length)s)', 'SmallAutoField': 'smallint', 'SmallIntegerField': 'smallint', 'TextField': 'nvarchar(max)', 'TimeField': 'time', 'UUIDField': 'char(32)', } data_types_suffix = { 'AutoField': 'IDENTITY (1, 1)', 'BigAutoField': 'IDENTITY (1, 1)', 'SmallAutoField': 'IDENTITY (1, 1)', } data_type_check_constraints = { 'JSONField': '(ISJSON ("%(column)s") = 1)', 'PositiveIntegerField': '[%(column)s] >= 0', 'PositiveSmallIntegerField': '[%(column)s] >= 0', 'PositiveBigIntegerField': '[%(column)s] >= 0', } operators = { # Since '=' is used not only for string comparision there is no way # to make it case (in)sensitive. 'exact': '= %s', 'iexact': "= UPPER(%s)", 'contains': "LIKE %s ESCAPE '\\'", 'icontains': "LIKE UPPER(%s) ESCAPE '\\'", 'gt': '> %s', 'gte': '>= %s', 'lt': '< %s', 'lte': '<= %s', 'startswith': "LIKE %s ESCAPE '\\'", 'endswith': "LIKE %s ESCAPE '\\'", 'istartswith': "LIKE UPPER(%s) ESCAPE '\\'", 'iendswith': "LIKE UPPER(%s) ESCAPE '\\'", } # The patterns below are used to generate SQL pattern lookup clauses when # the right-hand side of the lookup isn't a raw string (it might be an expression # or the result of a bilateral transformation). # In those cases, special characters for LIKE operators (e.g. \, *, _) should be # escaped on database side. # # Note: we use str.format() here for readability as '%' is used as a wildcard for # the LIKE operator. pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '[\]'), '%%', '[%%]'), '_', '[_]')" pattern_ops = { 'contains': "LIKE '%%' + {} + '%%'", 'icontains': "LIKE '%%' + UPPER({}) + '%%'", 'startswith': "LIKE {} + '%%'", 'istartswith': "LIKE UPPER({}) + '%%'", 'endswith': "LIKE '%%' + {}", 'iendswith': "LIKE '%%' + UPPER({})", } Database = Database SchemaEditorClass = DatabaseSchemaEditor # Classes instantiated in __init__(). client_class = DatabaseClient creation_class = DatabaseCreation features_class = DatabaseFeatures introspection_class = DatabaseIntrospection ops_class = DatabaseOperations _codes_for_networkerror = ( '08S01', '08S02', ) _sql_server_versions = { 9: 2005, 10: 2008, 11: 2012, 12: 2014, 13: 2016, 14: 2017, 15: 2019, 16: 2022, } # https://azure.microsoft.com/en-us/documentation/articles/sql-database-develop-csharp-retry-windows/ _transient_error_numbers = ( '4060', '10928', '10929', '40197', '40501', '40613', '49918', '49919', '49920', ) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) opts = self.settings_dict["OPTIONS"] # capability for multiple result sets or cursors self.supports_mars = False # Some drivers need unicode encoded as UTF8. If this is left as # None, it will be determined based on the driver, namely it'll be # False if the driver is a windows driver and True otherwise. # # However, recent versions of FreeTDS and pyodbc (0.91 and 3.0.6 as # of writing) are perfectly okay being fed unicode, which is why # this option is configurable. if 'driver_needs_utf8' in opts: self.driver_charset = 'utf-8' else: self.driver_charset = opts.get('driver_charset', None) # interval to wait for recovery from network error interval = opts.get('connection_recovery_interval_msec', 0.0) self.connection_recovery_interval_msec = float(interval) / 1000 # make lookup operators to be collation-sensitive if needed collation = opts.get('collation', None) if collation: self.operators = dict(self.__class__.operators) ops = {} for op in self.operators: sql = self.operators[op] if sql.startswith('LIKE '): ops[op] = '%s COLLATE %s' % (sql, collation) self.operators.update(ops) if (settings.USE_TZ): self.data_types['DateTimeField'] ='datetimeoffset' def create_cursor(self, name=None): return CursorWrapper(self.connection.cursor(), self) def _cursor(self): new_conn = False if self.connection is None: new_conn = True conn = super()._cursor() if new_conn: if self.sql_server_version <= 2005: self.data_types['DateField'] = 'datetime' self.data_types['DateTimeField'] = 'datetime' self.data_types['TimeField'] = 'datetime' return conn def get_connection_params(self): settings_dict = self.settings_dict if settings_dict['NAME'] == '': raise ImproperlyConfigured( "settings.DATABASES is improperly configured. " "Please supply the NAME value.") conn_params = settings_dict.copy() if conn_params['NAME'] is None: conn_params['NAME'] = 'master' return conn_params def get_new_connection(self, conn_params): database = conn_params['NAME'] host = conn_params.get('HOST', 'localhost') user = conn_params.get('USER', None) password = conn_params.get('PASSWORD', None) port = conn_params.get('PORT', None) trusted_connection = conn_params.get('Trusted_Connection', 'yes') options = conn_params.get('OPTIONS', {}) driver = options.get('driver', 'ODBC Driver 17 for SQL Server') dsn = options.get('dsn', None) options_extra_params = options.get('extra_params', '') # Microsoft driver names assumed here are: # * SQL Server Native Client 10.0/11.0 # * ODBC Driver 11/13 for SQL Server ms_drivers = re.compile('^ODBC Driver .* for SQL Server$|^SQL Server Native Client') # available ODBC connection string keywords: # (Microsoft drivers for Windows) # https://docs.microsoft.com/en-us/sql/relational-databases/native-client/applications/using-connection-string-keywords-with-sql-server-native-client # (Microsoft drivers for Linux/Mac) # https://docs.microsoft.com/en-us/sql/connect/odbc/linux-mac/connection-string-keywords-and-data-source-names-dsns # (FreeTDS) # http://www.freetds.org/userguide/odbcconnattr.htm cstr_parts = {} if dsn: cstr_parts['DSN'] = dsn else: # Only append DRIVER if DATABASE_ODBC_DSN hasn't been set cstr_parts['DRIVER'] = driver if ms_drivers.match(driver): if port: host = ','.join((host, str(port))) cstr_parts['SERVER'] = host elif options.get('host_is_server', False): if port: cstr_parts['PORT'] = str(port) cstr_parts['SERVER'] = host else: cstr_parts['SERVERNAME'] = host if user: cstr_parts['UID'] = user if 'Authentication=ActiveDirectoryInteractive' not in options_extra_params: cstr_parts['PWD'] = password elif 'TOKEN' not in conn_params: if ms_drivers.match(driver) and 'Authentication=ActiveDirectoryMsi' not in options_extra_params: cstr_parts['Trusted_Connection'] = trusted_connection else: cstr_parts['Integrated Security'] = 'SSPI' cstr_parts['DATABASE'] = database if ms_drivers.match(driver) and os.name == 'nt': cstr_parts['MARS_Connection'] = 'yes' connstr = encode_connection_string(cstr_parts) # extra_params are glued on the end of the string without encoding, # so it's up to the settings writer to make sure they're appropriate - # use encode_connection_string if constructing from external input. if options.get('extra_params', None): connstr += ';' + options['extra_params'] unicode_results = options.get('unicode_results', False) timeout = options.get('connection_timeout', 0) retries = options.get('connection_retries', 5) backoff_time = options.get('connection_retry_backoff_time', 5) query_timeout = options.get('query_timeout', 0) setencoding = options.get('setencoding', None) setdecoding = options.get('setdecoding', None) conn = None retry_count = 0 need_to_retry = False args = { 'unicode_results': unicode_results, 'timeout': timeout, } if 'TOKEN' in conn_params: args['attrs_before'] = { 1256: prepare_token_for_odbc(conn_params['TOKEN']) } while conn is None: try: conn = Database.connect(connstr, **args) except Exception as e: for error_number in self._transient_error_numbers: if error_number in e.args[1]: if error_number in e.args[1] and retry_count < retries: time.sleep(backoff_time) need_to_retry = True retry_count = retry_count + 1 else: need_to_retry = False break if not need_to_retry: raise # Handling values from DATETIMEOFFSET columns # source: https://github.com/mkleehammer/pyodbc/wiki/Using-an-Output-Converter-function conn.add_output_converter(SQL_TIMESTAMP_WITH_TIMEZONE, handle_datetimeoffset) conn.timeout = query_timeout if setencoding: for entry in setencoding: conn.setencoding(**entry) if setdecoding: for entry in setdecoding: conn.setdecoding(**entry) return conn def init_connection_state(self): drv_name = self.connection.getinfo(Database.SQL_DRIVER_NAME).upper() if drv_name.startswith('LIBTDSODBC'): try: drv_ver = self.connection.getinfo(Database.SQL_DRIVER_VER) ver = get_version_tuple(drv_ver)[:2] if ver < (0, 95): raise ImproperlyConfigured( "FreeTDS 0.95 or newer is required.") except Exception: # unknown driver version pass ms_drv_names = re.compile('^(LIB)?(SQLNCLI|MSODBCSQL)') if ms_drv_names.match(drv_name): self.driver_charset = None # http://msdn.microsoft.com/en-us/library/ms131686.aspx self.supports_mars = True self.features.can_use_chunked_reads = True settings_dict = self.settings_dict cursor = self.create_cursor() options = settings_dict.get('OPTIONS', {}) isolation_level = options.get('isolation_level', None) if isolation_level: cursor.execute('SET TRANSACTION ISOLATION LEVEL %s' % isolation_level) # Set date format for the connection. Also, make sure Sunday is # considered the first day of the week (to be consistent with the # Django convention for the 'week_day' Django lookup) if the user # hasn't told us otherwise datefirst = options.get('datefirst', 7) cursor.execute('SET DATEFORMAT ymd; SET DATEFIRST %s' % datefirst) # Let user choose if driver can return rows from bulk insert since # inserting into tables with triggers causes errors. See issue #130 if (options.get('return_rows_bulk_insert', False)): self.features_class.can_return_rows_from_bulk_insert = True val = self.get_system_datetime if isinstance(val, str): raise ImproperlyConfigured( "The database driver doesn't support modern datatime types.") def is_usable(self): try: self.create_cursor().execute("SELECT 1") except Database.Error: return False else: return True @cached_property def get_system_datetime(self): # http://blogs.msdn.com/b/sqlnativeclient/archive/2008/02/27/microsoft-sql-server-native-client-and-microsoft-sql-server-2008-native-client.aspx with self.temporary_connection() as cursor: if self.sql_server_version <= 2005: return cursor.execute('SELECT GETDATE()').fetchone()[0] else: return cursor.execute('SELECT SYSDATETIME()').fetchone()[0] @cached_property def sql_server_version(self, _known_versions={}): """ Get the SQL server version The _known_versions default dictionary is created on the class. This is intentional - it allows us to cache this property's value across instances. Therefore, when Django creates a new database connection using the same alias, we won't need query the server again. """ if self.alias not in _known_versions: with self.temporary_connection() as cursor: cursor.execute("SELECT CAST(SERVERPROPERTY('ProductVersion') AS varchar)") ver = cursor.fetchone()[0] ver = int(ver.split('.')[0]) if ver not in self._sql_server_versions: raise NotSupportedError('SQL Server v%d is not supported.' % ver) _known_versions[self.alias] = self._sql_server_versions[ver] return _known_versions[self.alias] @cached_property def to_azure_sql_db(self, _known_azures={}): """ Whether this connection is to a Microsoft Azure database server The _known_azures default dictionary is created on the class. This is intentional - it allows us to cache this property's value across instances. Therefore, when Django creates a new database connection using the same alias, we won't need query the server again. """ if self.alias not in _known_azures: with self.temporary_connection() as cursor: cursor.execute("SELECT CAST(SERVERPROPERTY('EngineEdition') AS integer)") edition = cursor.fetchone()[0] _known_azures[self.alias] = edition == EDITION_AZURE_SQL_DB or edition == EDITION_AZURE_SQL_MANAGED_INSTANCE return _known_azures[self.alias] def _execute_foreach(self, sql, table_names=None): cursor = self.cursor() if table_names is None: table_names = self.introspection.table_names(cursor) for table_name in table_names: cursor.execute(sql % self.ops.quote_name(table_name)) def _get_trancount(self): with self.connection.cursor() as cursor: return cursor.execute('SELECT @@TRANCOUNT').fetchone()[0] def _on_error(self, e): if e.args[0] in self._codes_for_networkerror: try: # close the stale connection self.close() # wait a moment for recovery from network error time.sleep(self.connection_recovery_interval_msec) except Exception: pass self.connection = None def _savepoint(self, sid): with self.cursor() as cursor: cursor.execute('SELECT @@TRANCOUNT') trancount = cursor.fetchone()[0] if trancount == 0: cursor.execute(self.ops.start_transaction_sql()) cursor.execute(self.ops.savepoint_create_sql(sid)) def _savepoint_commit(self, sid): # SQL Server has no support for partial commit in a transaction pass def _savepoint_rollback(self, sid): with self.cursor() as cursor: # FreeTDS requires TRANCOUNT that is greater than 0 cursor.execute('SELECT @@TRANCOUNT') trancount = cursor.fetchone()[0] if trancount > 0: cursor.execute(self.ops.savepoint_rollback_sql(sid)) def _set_autocommit(self, autocommit): with self.wrap_database_errors: allowed = not autocommit if not allowed: # FreeTDS requires TRANCOUNT that is greater than 0 allowed = self._get_trancount() > 0 if allowed: self.connection.autocommit = autocommit def check_constraints(self, table_names=None): self._execute_foreach('ALTER TABLE %s WITH CHECK CHECK CONSTRAINT ALL', table_names) def disable_constraint_checking(self): if not self.needs_rollback: self._execute_foreach('ALTER TABLE %s NOCHECK CONSTRAINT ALL') return not self.needs_rollback def enable_constraint_checking(self): if not self.needs_rollback: self._execute_foreach('ALTER TABLE %s WITH NOCHECK CHECK CONSTRAINT ALL') class CursorWrapper(object): """ A wrapper around the pyodbc's cursor that takes in account a) some pyodbc DB-API 2.0 implementation and b) some common ODBC driver particularities. """ def __init__(self, cursor, connection): self.active = True self.cursor = cursor self.connection = connection self.driver_charset = connection.driver_charset self.last_sql = '' self.last_params = () def _as_sql_type(self, typ, value): if isinstance(value, str): length = len(value) if length == 0: return 'NVARCHAR' elif length > 4000: return 'NVARCHAR(max)' return 'NVARCHAR(%s)' % len(value) elif typ == int: if value < 0x7FFFFFFF and value > -0x7FFFFFFF: return 'INT' else: return 'BIGINT' elif typ == float: return 'DOUBLE PRECISION' elif typ == bool: return 'BIT' elif isinstance(value, Decimal): return 'NUMERIC' elif isinstance(value, datetime.datetime): return 'DATETIME2' elif isinstance(value, datetime.date): return 'DATE' elif isinstance(value, datetime.time): return 'TIME' elif isinstance(value, UUID): return 'uniqueidentifier' else: raise NotImplementedError('Not supported type %s (%s)' % (type(value), repr(value))) def close(self): if self.active: self.active = False self.cursor.close() def format_sql(self, sql, params): if self.driver_charset and isinstance(sql, str): # FreeTDS (and other ODBC drivers?) doesn't support Unicode # yet, so we need to encode the SQL clause itself in utf-8 sql = smart_str(sql, self.driver_charset) # pyodbc uses '?' instead of '%s' as parameter placeholder. if params is not None and params != []: sql = sql % tuple('?' * len(params)) return sql def format_group_by_params(self, query, params): # Prepare query for string formatting query = re.sub(r'%\w+', '{}', query) if params: # Insert None params directly into the query if None in params: null_params = ['NULL' if param is None else '{}' for param in params] query = query.format(*null_params) params = tuple(p for p in params if p is not None) params = [(param, type(param)) for param in params] params_dict = {param: '@var%d' % i for i, param in enumerate(set(params))} args = [params_dict[param] for param in params] variables = [] params = [] for key, value in params_dict.items(): datatype = self._as_sql_type(key[1], key[0]) variables.append("%s %s = %%s " % (value, datatype)) params.append(key[0]) query = ('DECLARE %s \n' % ','.join(variables)) + (query.format(*args)) return query, params def format_params(self, params): fp = [] if params is not None: for p in params: if isinstance(p, str): if self.driver_charset: # FreeTDS (and other ODBC drivers?) doesn't support Unicode # yet, so we need to encode parameters in utf-8 fp.append(smart_str(p, self.driver_charset)) else: fp.append(p) elif isinstance(p, bytes): fp.append(p) elif isinstance(p, type(True)): if p: fp.append(1) else: fp.append(0) else: fp.append(p) return tuple(fp) def execute(self, sql, params=None): self.last_sql = sql if 'GROUP BY' in sql: sql, params = self.format_group_by_params(sql, params) sql = self.format_sql(sql, params) params = self.format_params(params) self.last_params = params try: return self.cursor.execute(sql, params) except Database.Error as e: self.connection._on_error(e) raise def executemany(self, sql, params_list=()): if not params_list: return None raw_pll = [p for p in params_list] sql = self.format_sql(sql, raw_pll[0]) params_list = [self.format_params(p) for p in raw_pll] try: return self.cursor.executemany(sql, params_list) except Database.Error as e: self.connection._on_error(e) raise def format_rows(self, rows): return list(map(self.format_row, rows)) def format_row(self, row): """ Decode data coming from the database if needed and convert rows to tuples (pyodbc Rows are not hashable). """ if self.driver_charset: for i in range(len(row)): f = row[i] # FreeTDS (and other ODBC drivers?) doesn't support Unicode # yet, so we need to decode utf-8 data coming from the DB if isinstance(f, bytes): row[i] = f.decode(self.driver_charset) return tuple(row) def fetchone(self): row = self.cursor.fetchone() if row is not None: row = self.format_row(row) # Any remaining rows in the current set must be discarded # before changing autocommit mode when you use FreeTDS if not self.connection.supports_mars: self.cursor.nextset() return row def fetchmany(self, chunk): return self.format_rows(self.cursor.fetchmany(chunk)) def fetchall(self): return self.format_rows(self.cursor.fetchall()) def __getattr__(self, attr): if attr in self.__dict__: return self.__dict__[attr] return getattr(self.cursor, attr) def __iter__(self): return iter(self.cursor) mssql-django-1.5/mssql/client.py000066400000000000000000000036061461302131500167460ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import re import subprocess from django.db.backends.base.client import BaseDatabaseClient class DatabaseClient(BaseDatabaseClient): executable_name = 'sqlcmd' @classmethod def settings_to_cmd_args(cls, settings_dict, parameters): options = settings_dict['OPTIONS'] user = options.get('user', settings_dict['USER']) password = options.get('passwd', settings_dict['PASSWORD']) driver = options.get('driver', 'ODBC Driver 13 for SQL Server') ms_drivers = re.compile('^ODBC Driver .* for SQL Server$|^SQL Server Native Client') if not ms_drivers.match(driver): cls.executable_name = 'isql' if cls.executable_name == 'sqlcmd': db = options.get('db', settings_dict['NAME']) server = options.get('host', settings_dict['HOST']) port = options.get('port', settings_dict['PORT']) defaults_file = options.get('read_default_file') args = [cls.executable_name] if server: if port: server = ','.join((server, str(port))) args += ["-S", server] if user: args += ["-U", user] if password: args += ["-P", password] else: args += ["-E"] # Try trusted connection instead if db: args += ["-d", db] if defaults_file: args += ["-i", defaults_file] else: dsn = options.get('dsn', '') args = ['%s -v %s %s %s' % (cls.executable_name, dsn, user, password)] args.extend(parameters) return args def runshell(self, parameters=[]): args = DatabaseClient.settings_to_cmd_args(self.connection.settings_dict, parameters) subprocess.run(args, check=True) mssql-django-1.5/mssql/compiler.py000066400000000000000000000724621461302131500173100ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import types from itertools import chain import django from django.db.models.aggregates import Avg, Count, StdDev, Variance from django.db.models.expressions import Ref, Subquery, Value, Window from django.db.models.functions import ( Chr, ConcatPair, Greatest, Least, Length, LPad, Random, Repeat, RPad, StrIndex, Substr, Trim ) from django.db.models.sql import compiler from django.db.transaction import TransactionManagementError from django.db.utils import NotSupportedError if django.VERSION >= (3, 1): from django.db.models.fields.json import compile_json_path, KeyTransform as json_KeyTransform if django.VERSION >= (4, 2): from django.core.exceptions import EmptyResultSet, FullResultSet def _as_sql_agv(self, compiler, connection): return self.as_sql(compiler, connection, template='%(function)s(CONVERT(float, %(field)s))') def _as_sql_chr(self, compiler, connection): return self.as_sql(compiler, connection, function='NCHAR') def _as_sql_concatpair(self, compiler, connection): if connection.sql_server_version < 2012: node = self.coalesce() return node.as_sql(compiler, connection, arg_joiner=' + ', template='%(expressions)s') else: return self.as_sql(compiler, connection) def _as_sql_count(self, compiler, connection): return self.as_sql(compiler, connection, function='COUNT_BIG') def _as_sql_greatest(self, compiler, connection): # SQL Server does not provide GREATEST function, # so we emulate it with a table value constructor # https://msdn.microsoft.com/en-us/library/dd776382.aspx template = '(SELECT MAX(value) FROM (VALUES (%(expressions)s)) AS _%(function)s(value))' return self.as_sql(compiler, connection, arg_joiner='), (', template=template) def _as_sql_json_keytransform(self, compiler, connection): lhs, params, key_transforms = self.preprocess_lhs(compiler, connection) json_path = compile_json_path(key_transforms) return ( "COALESCE(JSON_QUERY(%s, '%s'), JSON_VALUE(%s, '%s'))" % ((lhs, json_path) * 2) ), tuple(params) * 2 def _as_sql_least(self, compiler, connection): # SQL Server does not provide LEAST function, # so we emulate it with a table value constructor # https://msdn.microsoft.com/en-us/library/dd776382.aspx template = '(SELECT MIN(value) FROM (VALUES (%(expressions)s)) AS _%(function)s(value))' return self.as_sql(compiler, connection, arg_joiner='), (', template=template) def _as_sql_length(self, compiler, connection): return self.as_sql(compiler, connection, function='LEN') def _as_sql_lpad(self, compiler, connection): i = iter(self.get_source_expressions()) expression, expression_arg = compiler.compile(next(i)) length, length_arg = compiler.compile(next(i)) fill_text, fill_text_arg = compiler.compile(next(i)) params = [] params.extend(fill_text_arg) params.extend(length_arg) params.extend(length_arg) params.extend(expression_arg) params.extend(length_arg) params.extend(expression_arg) params.extend(expression_arg) params.extend(length_arg) template = ('LEFT(LEFT(REPLICATE(%(fill_text)s, %(length)s), CASE WHEN %(length)s > LEN(%(expression)s) ' 'THEN %(length)s - LEN(%(expression)s) ELSE 0 END) + %(expression)s, %(length)s)') return template % {'expression': expression, 'length': length, 'fill_text': fill_text}, params def _as_sql_repeat(self, compiler, connection): return self.as_sql(compiler, connection, function='REPLICATE') def _as_sql_rpad(self, compiler, connection): i = iter(self.get_source_expressions()) expression, expression_arg = compiler.compile(next(i)) length, length_arg = compiler.compile(next(i)) fill_text, fill_text_arg = compiler.compile(next(i)) params = [] params.extend(expression_arg) params.extend(fill_text_arg) params.extend(length_arg) params.extend(length_arg) template = 'LEFT(%(expression)s + REPLICATE(%(fill_text)s, %(length)s), %(length)s)' return template % {'expression': expression, 'length': length, 'fill_text': fill_text}, params def _as_sql_stddev(self, compiler, connection): function = 'STDEV' if self.function == 'STDDEV_POP': function = '%sP' % function return self.as_sql(compiler, connection, function=function) def _as_sql_strindex(self, compiler, connection): self.source_expressions.reverse() sql = self.as_sql(compiler, connection, function='CHARINDEX') self.source_expressions.reverse() return sql def _as_sql_substr(self, compiler, connection): if len(self.get_source_expressions()) < 3: self.get_source_expressions().append(Value(2**31 - 1)) return self.as_sql(compiler, connection) def _as_sql_trim(self, compiler, connection): return self.as_sql(compiler, connection, template='LTRIM(RTRIM(%(expressions)s))') def _as_sql_variance(self, compiler, connection): function = 'VAR' if self.function == 'VAR_POP': function = '%sP' % function return self.as_sql(compiler, connection, function=function) def _as_sql_window(self, compiler, connection, template=None): connection.ops.check_expression_support(self) if not connection.features.supports_over_clause: raise NotSupportedError("This backend does not support window expressions.") expr_sql, params = compiler.compile(self.source_expression) window_sql, window_params = [], () if self.partition_by is not None: sql_expr, sql_params = self.partition_by.as_sql( compiler=compiler, connection=connection, template="PARTITION BY %(expressions)s", ) window_sql.append(sql_expr) window_params += tuple(sql_params) if self.order_by is not None: order_sql, order_params = compiler.compile(self.order_by) window_sql.append(order_sql) window_params += tuple(order_params) else: # MSSQL window functions require an OVER clause with ORDER BY window_sql.append('ORDER BY (SELECT NULL)') if self.frame: frame_sql, frame_params = compiler.compile(self.frame) window_sql.append(frame_sql) window_params += tuple(frame_params) template = template or self.template return ( template % {"expression": expr_sql, "window": " ".join(window_sql).strip()}, (*params, *window_params), ) def _cursor_iter(cursor, sentinel, col_count, itersize): """ Yields blocks of rows from a cursor and ensures the cursor is closed when done. """ if not hasattr(cursor.db, 'supports_mars') or cursor.db.supports_mars: # same as the original Django implementation try: for rows in iter((lambda: cursor.fetchmany(itersize)), sentinel): yield rows if col_count is None else [r[:col_count] for r in rows] finally: cursor.close() else: # retrieve all chunks from the cursor and close it before yielding # so that we can open an another cursor over an iteration # (for drivers such as FreeTDS) chunks = [] try: for rows in iter((lambda: cursor.fetchmany(itersize)), sentinel): chunks.append(rows if col_count is None else [r[:col_count] for r in rows]) finally: cursor.close() for rows in chunks: yield rows compiler.cursor_iter = _cursor_iter class SQLCompiler(compiler.SQLCompiler): def as_sql(self, with_limits=True, with_col_aliases=False): """ Create the SQL for this query. Return the SQL string and list of parameters. If 'with_limits' is False, any limit/offset information is not included in the query. """ refcounts_before = self.query.alias_refcount.copy() try: extra_select, order_by, group_by = self.pre_sql_setup() for_update_part = None # Is a LIMIT/OFFSET clause needed? with_limit_offset = with_limits and (self.query.high_mark is not None or self.query.low_mark) combinator = self.query.combinator features = self.connection.features # The do_offset flag indicates whether we need to construct # the SQL needed to use limit/offset w/SQL Server. high_mark = self.query.high_mark low_mark = self.query.low_mark do_limit = with_limits and high_mark is not None do_offset = with_limits and low_mark != 0 # SQL Server 2012 or newer supports OFFSET/FETCH clause supports_offset_clause = self.connection.sql_server_version >= 2012 do_offset_emulation = do_offset and not supports_offset_clause if combinator: if not getattr(features, 'supports_select_{}'.format(combinator)): raise NotSupportedError('{} is not supported on this database backend.'.format(combinator)) result, params = self.get_combinator_sql(combinator, self.query.combinator_all) elif django.VERSION >= (4, 2) and self.qualify: result, params = self.get_qualify_sql() order_by = None else: distinct_fields, distinct_params = self.get_distinct() # This must come after 'select', 'ordering', and 'distinct' -- see # docstring of get_from_clause() for details. from_, f_params = self.get_from_clause() if django.VERSION >= (4, 2): try: where, w_params = self.compile(self.where) if self.where is not None else ("", []) except EmptyResultSet: if self.elide_empty: raise # Use a predicate that's always False. where, w_params = "0 = 1", [] except FullResultSet: where, w_params = "", [] try: having, h_params = self.compile(self.having) if self.having is not None else ("", []) except FullResultSet: having, h_params = "", [] else: where, w_params = self.compile(self.where) if self.where is not None else ("", []) having, h_params = self.compile(self.having) if self.having is not None else ("", []) params = [] result = ['SELECT'] if self.query.distinct: distinct_result, distinct_params = self.connection.ops.distinct_sql( distinct_fields, distinct_params, ) result += distinct_result params += distinct_params # SQL Server requires the keword for limitting at the begenning if do_limit and not do_offset: result.append('TOP %d' % high_mark) out_cols = [] col_idx = 1 for _, (s_sql, s_params), alias in self.select + extra_select: if alias: s_sql = '%s AS %s' % (s_sql, self.connection.ops.quote_name(alias)) elif with_col_aliases or do_offset_emulation: s_sql = '%s AS %s' % (s_sql, 'Col%d' % col_idx) col_idx += 1 params.extend(s_params) out_cols.append(s_sql) # SQL Server requires an order-by clause for offsetting if do_offset: meta = self.query.get_meta() qn = self.quote_name_unless_alias offsetting_order_by = '%s.%s' % (qn(meta.db_table), qn(meta.pk.db_column or meta.pk.column)) if do_offset_emulation: if order_by: ordering = [] for expr, (o_sql, o_params, _) in order_by: # value_expression in OVER clause cannot refer to # expressions or aliases in the select list. See: # http://msdn.microsoft.com/en-us/library/ms189461.aspx src = next(iter(expr.get_source_expressions())) if isinstance(src, Ref): src = next(iter(src.get_source_expressions())) o_sql, _ = src.as_sql(self, self.connection) odir = 'DESC' if expr.descending else 'ASC' o_sql = '%s %s' % (o_sql, odir) ordering.append(o_sql) params.extend(o_params) offsetting_order_by = ', '.join(ordering) order_by = [] out_cols.append('ROW_NUMBER() OVER (ORDER BY %s) AS [rn]' % offsetting_order_by) elif not order_by: order_by.append(((None, ('%s ASC' % offsetting_order_by, [], None)))) if self.query.select_for_update and self.connection.features.has_select_for_update: if self.connection.get_autocommit(): raise TransactionManagementError('select_for_update cannot be used outside of a transaction.') if with_limit_offset and not self.connection.features.supports_select_for_update_with_limit: raise NotSupportedError( 'LIMIT/OFFSET is not supported with ' 'select_for_update on this database backend.' ) nowait = self.query.select_for_update_nowait skip_locked = self.query.select_for_update_skip_locked of = self.query.select_for_update_of # If it's a NOWAIT/SKIP LOCKED/OF query but the backend # doesn't support it, raise NotSupportedError to prevent a # possible deadlock. if nowait and not self.connection.features.has_select_for_update_nowait: raise NotSupportedError('NOWAIT is not supported on this database backend.') elif skip_locked and not self.connection.features.has_select_for_update_skip_locked: raise NotSupportedError('SKIP LOCKED is not supported on this database backend.') elif of and not self.connection.features.has_select_for_update_of: raise NotSupportedError('FOR UPDATE OF is not supported on this database backend.') for_update_part = self.connection.ops.for_update_sql( nowait=nowait, skip_locked=skip_locked, of=self.get_select_for_update_of_arguments(), ) if for_update_part and self.connection.features.for_update_after_from: from_.insert(1, for_update_part) result += [', '.join(out_cols)] if from_: result += ['FROM', *from_] params.extend(f_params) if where: result.append('WHERE %s' % where) params.extend(w_params) grouping = [] for g_sql, g_params in group_by: grouping.append(g_sql) params.extend(g_params) if grouping: if distinct_fields: raise NotImplementedError('annotate() + distinct(fields) is not implemented.') order_by = order_by or self.connection.ops.force_no_ordering() result.append('GROUP BY %s' % ', '.join(grouping)) if having: result.append('HAVING %s' % having) params.extend(h_params) explain = self.query.explain_info if django.VERSION >= (4, 0) else self.query.explain_query if explain: result.insert(0, self.connection.ops.explain_query_prefix( self.query.explain_format, **self.query.explain_options )) if order_by: ordering = [] for expr, (o_sql, o_params, _) in order_by: if expr: src = next(iter(expr.get_source_expressions())) if isinstance(src, Random): # ORDER BY RAND() doesn't return rows in random order # replace it with NEWID() o_sql = o_sql.replace('RAND()', 'NEWID()') ordering.append(o_sql) params.extend(o_params) result.append('ORDER BY %s' % ', '.join(ordering)) # For subqueres with an ORDER BY clause, SQL Server also # requires a TOP or OFFSET clause which is not generated for # Django 2.x. See https://github.com/microsoft/mssql-django/issues/12 # Add OFFSET for all Django versions. # https://github.com/microsoft/mssql-django/issues/109 if not (do_offset or do_limit) and supports_offset_clause: result.append("OFFSET 0 ROWS") # SQL Server requires the backend-specific emulation (2008 or earlier) # or an offset clause (2012 or newer) for offsetting if do_offset: if do_offset_emulation: # Construct the final SQL clause, using the initial select SQL # obtained above. result = ['SELECT * FROM (%s) AS X WHERE X.rn' % ' '.join(result)] # Place WHERE condition on `rn` for the desired range. if do_limit: result.append('BETWEEN %d AND %d' % (low_mark + 1, high_mark)) else: result.append('>= %d' % (low_mark + 1)) if not self.query.subquery: result.append('ORDER BY X.rn') else: result.append(self.connection.ops.limit_offset_sql(self.query.low_mark, self.query.high_mark)) if self.query.subquery and extra_select: # If the query is used as a subquery, the extra selects would # result in more columns than the left-hand side expression is # expecting. This can happen when a subquery uses a combination # of order_by() and distinct(), forcing the ordering expressions # to be selected as well. Wrap the query in another subquery # to exclude extraneous selects. sub_selects = [] sub_params = [] for index, (select, _, alias) in enumerate(self.select, start=1): if not alias and with_col_aliases: alias = 'col%d' % index if alias: sub_selects.append("%s.%s" % ( self.connection.ops.quote_name('subquery'), self.connection.ops.quote_name(alias), )) else: select_clone = select.relabeled_clone({select.alias: 'subquery'}) subselect, subparams = select_clone.as_sql(self, self.connection) sub_selects.append(subselect) sub_params.extend(subparams) return 'SELECT %s FROM (%s) subquery' % ( ', '.join(sub_selects), ' '.join(result), ), tuple(sub_params + params) return ' '.join(result), tuple(params) finally: # Finally do cleanup - get rid of the joins we created above. self.query.reset_refcounts(refcounts_before) def compile(self, node, *args, **kwargs): node = self._as_microsoft(node) return super().compile(node, *args, **kwargs) def collapse_group_by(self, expressions, having): expressions = super().collapse_group_by(expressions, having) # SQL server does not allow subqueries or constant expressions in the group by # For constants: Each GROUP BY expression must contain at least one column that is not an outer reference. # For subqueries: Cannot use an aggregate or a subquery in an expression used for the group by list of a GROUP BY clause. return self._filter_subquery_and_constant_expressions(expressions) def _is_constant_expression(self, expression): if isinstance(expression, Value): return True sub_exprs = expression.get_source_expressions() if not sub_exprs: return False for each in sub_exprs: if not self._is_constant_expression(each): return False return True def _filter_subquery_and_constant_expressions(self, expressions): ret = [] for expression in expressions: if self._is_subquery(expression): continue if self._is_constant_expression(expression): continue if not self._has_nested_subquery(expression): ret.append(expression) return ret def _has_nested_subquery(self, expression): if self._is_subquery(expression): return True for sub_expr in expression.get_source_expressions(): if self._has_nested_subquery(sub_expr): return True return False def _is_subquery(self, expression): return isinstance(expression, Subquery) def _as_microsoft(self, node): as_microsoft = None if isinstance(node, Avg): as_microsoft = _as_sql_agv elif isinstance(node, Chr): as_microsoft = _as_sql_chr elif isinstance(node, ConcatPair): as_microsoft = _as_sql_concatpair elif isinstance(node, Count): as_microsoft = _as_sql_count elif isinstance(node, Greatest): as_microsoft = _as_sql_greatest elif isinstance(node, Least): as_microsoft = _as_sql_least elif isinstance(node, Length): as_microsoft = _as_sql_length elif isinstance(node, RPad): as_microsoft = _as_sql_rpad elif isinstance(node, LPad): as_microsoft = _as_sql_lpad elif isinstance(node, Repeat): as_microsoft = _as_sql_repeat elif isinstance(node, StdDev): as_microsoft = _as_sql_stddev elif isinstance(node, StrIndex): as_microsoft = _as_sql_strindex elif isinstance(node, Substr): as_microsoft = _as_sql_substr elif isinstance(node, Trim): as_microsoft = _as_sql_trim elif isinstance(node, Variance): as_microsoft = _as_sql_variance if django.VERSION >= (3, 1): if isinstance(node, json_KeyTransform): as_microsoft = _as_sql_json_keytransform if django.VERSION >= (4, 1): if isinstance(node, Window): as_microsoft = _as_sql_window if as_microsoft: node = node.copy() node.as_microsoft = types.MethodType(as_microsoft, node) return node class SQLInsertCompiler(compiler.SQLInsertCompiler, SQLCompiler): def get_returned_fields(self): if django.VERSION >= (3, 0, 0): return self.returning_fields return self.return_id def can_return_columns_from_insert(self): if django.VERSION >= (3, 0, 0): return self.connection.features.can_return_columns_from_insert return self.connection.features.can_return_id_from_insert def can_return_rows_from_bulk_insert(self): if django.VERSION >= (3, 0, 0): return self.connection.features.can_return_rows_from_bulk_insert return self.connection.features.can_return_ids_from_bulk_insert def fix_auto(self, sql, opts, fields, qn): if opts.auto_field is not None: # db_column is None if not explicitly specified by model field auto_field_column = opts.auto_field.db_column or opts.auto_field.column columns = [f.column for f in fields] if auto_field_column in columns: id_insert_sql = [] table = qn(opts.db_table) sql_format = 'SET IDENTITY_INSERT %s ON; %s; SET IDENTITY_INSERT %s OFF' for q, p in sql: id_insert_sql.append((sql_format % (table, q, table), p)) sql = id_insert_sql return sql def bulk_insert_default_values_sql(self, table): seed_rows_number = 8 cross_join_power = 4 # 8^4 = 4096 > maximum allowed batch size for the backend = 1000 def generate_seed_rows(n): return " UNION ALL ".join("SELECT 1 AS x" for _ in range(n)) def cross_join(p): return ", ".join("SEED_ROWS AS _%s" % i for i in range(p)) return """ WITH SEED_ROWS AS (%s) MERGE INTO %s USING ( SELECT TOP %s * FROM (SELECT 1 as x FROM %s) FAKE_ROWS ) FAKE_DATA ON 1 = 0 WHEN NOT MATCHED THEN INSERT DEFAULT VALUES """ % (generate_seed_rows(seed_rows_number), table, len(self.query.objs), cross_join(cross_join_power)) def as_sql(self): # We don't need quote_name_unless_alias() here, since these are all # going to be column names (so we can avoid the extra overhead). qn = self.connection.ops.quote_name opts = self.query.get_meta() result = ['INSERT INTO %s' % qn(opts.db_table)] if self.query.fields: fields = self.query.fields result.append('(%s)' % ', '.join(qn(f.column) for f in fields)) values_format = 'VALUES (%s)' value_rows = [ [self.prepare_value(field, self.pre_save_val(field, obj)) for field in fields] for obj in self.query.objs ] else: values_format = '%s VALUES' # An empty object. value_rows = [[self.connection.ops.pk_default_value()] for _ in self.query.objs] fields = [None] # Currently the backends just accept values when generating bulk # queries and generate their own placeholders. Doing that isn't # necessary and it should be possible to use placeholders and # expressions in bulk inserts too. can_bulk = (not self.get_returned_fields() and self.connection.features.has_bulk_insert) and self.query.fields placeholder_rows, param_rows = self.assemble_as_sql(fields, value_rows) if self.get_returned_fields() and self.can_return_columns_from_insert(): if self.can_return_rows_from_bulk_insert(): if not(self.query.fields): # There isn't really a single statement to bulk multiple DEFAULT VALUES insertions, # so we have to use a workaround: # https://dba.stackexchange.com/questions/254771/insert-multiple-rows-into-a-table-with-only-an-identity-column result = [self.bulk_insert_default_values_sql(qn(opts.db_table))] r_sql, self.returning_params = self.connection.ops.return_insert_columns(self.get_returned_fields()) if r_sql: result.append(r_sql) sql = " ".join(result) + ";" return [(sql, None)] # Regular bulk insert params = [] r_sql, self.returning_params = self.connection.ops.return_insert_columns(self.get_returned_fields()) if r_sql: result.append(r_sql) params += [self.returning_params] params += param_rows result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows)) else: result.insert(0, 'SET NOCOUNT ON') result.append((values_format + ';') % ', '.join(placeholder_rows[0])) params = [param_rows[0]] result.append('SELECT CAST(SCOPE_IDENTITY() AS bigint)') sql = [(" ".join(result), tuple(chain.from_iterable(params)))] else: if can_bulk: result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows)) sql = [(" ".join(result), tuple(p for ps in param_rows for p in ps))] else: sql = [ (" ".join(result + [values_format % ", ".join(p)]), vals) for p, vals in zip(placeholder_rows, param_rows) ] if self.query.fields: sql = self.fix_auto(sql, opts, fields, qn) return sql class SQLDeleteCompiler(compiler.SQLDeleteCompiler, SQLCompiler): def as_sql(self): sql, params = super().as_sql() if sql: sql = '; '.join(['SET NOCOUNT OFF', sql]) return sql, params class SQLUpdateCompiler(compiler.SQLUpdateCompiler, SQLCompiler): def as_sql(self): sql, params = super().as_sql() if sql: sql = '; '.join(['SET NOCOUNT OFF', sql]) return sql, params class SQLAggregateCompiler(compiler.SQLAggregateCompiler, SQLCompiler): pass mssql-django-1.5/mssql/creation.py000066400000000000000000000107121461302131500172700ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import binascii import os from django.db.utils import InterfaceError from django.db.backends.base.creation import BaseDatabaseCreation from django import VERSION as django_version class DatabaseCreation(BaseDatabaseCreation): def cursor(self): if django_version >= (3, 1): return self.connection._nodb_cursor() return self.connection._nodb_connection.cursor() def _create_test_db(self, verbosity, autoclobber, keepdb=False): """ Internal implementation - create the test db tables. """ # Try to create the test DB, but if we fail due to 28000 (Login failed for user), # it's probably because the user doesn't have permission to [dbo].[master], # so we can proceed if we're keeping the DB anyway. # https://github.com/microsoft/mssql-django/issues/61 try: return super()._create_test_db(verbosity, autoclobber, keepdb) except InterfaceError as err: if err.args[0] == '28000' and keepdb: self.log('Received error %s, proceeding because keepdb=True' % ( err.args[1], )) else: raise err def _destroy_test_db(self, test_database_name, verbosity): """ Internal implementation - remove the test db tables. """ # Remove the test database to clean up after # ourselves. Connect to the previous database (not the test database) # to do so, because it's not allowed to delete a database while being # connected to it. with self.cursor() as cursor: to_azure_sql_db = self.connection.to_azure_sql_db if not to_azure_sql_db: cursor.execute("ALTER DATABASE %s SET SINGLE_USER WITH ROLLBACK IMMEDIATE" % self.connection.ops.quote_name(test_database_name)) cursor.execute("DROP DATABASE %s" % self.connection.ops.quote_name(test_database_name)) def sql_table_creation_suffix(self): suffix = [] collation = self.connection.settings_dict['TEST'].get('COLLATION', None) if collation: suffix.append('COLLATE %s' % collation) return ' '.join(suffix) # The following code to add regex support in SQLServer is taken from django-mssql # see https://bitbucket.org/Manfre/django-mssql def enable_clr(self): """ Enables clr for server if not already enabled This function will not fail if current user doesn't have permissions to enable clr, and clr is already enabled """ with self.cursor() as cursor: # check whether clr is enabled cursor.execute(''' SELECT value FROM sys.configurations WHERE name = 'clr enabled' ''') res = None try: res = cursor.fetchone() except Exception: pass if not res or not res[0]: # if not enabled enable clr cursor.execute("sp_configure 'clr enabled', 1") cursor.execute("RECONFIGURE") cursor.execute("sp_configure 'show advanced options', 1") cursor.execute("RECONFIGURE") cursor.execute("sp_configure 'clr strict security', 0") cursor.execute("RECONFIGURE") def install_regex_clr(self, database_name): sql = ''' USE {database_name}; -- Drop and recreate the function if it already exists IF OBJECT_ID('REGEXP_LIKE') IS NOT NULL DROP FUNCTION [dbo].[REGEXP_LIKE] IF EXISTS(select * from sys.assemblies where name like 'regex_clr') DROP ASSEMBLY regex_clr ; CREATE ASSEMBLY regex_clr FROM 0x{assembly_hex} WITH PERMISSION_SET = SAFE; create function [dbo].[REGEXP_LIKE] ( @input nvarchar(max), @pattern nvarchar(max), @caseSensitive int ) RETURNS INT AS EXTERNAL NAME regex_clr.UserDefinedFunctions.REGEXP_LIKE '''.format( database_name=self.connection.ops.quote_name(database_name), assembly_hex=self.get_regex_clr_assembly_hex(), ).split(';') self.enable_clr() with self.cursor() as cursor: for s in sql: cursor.execute(s) def get_regex_clr_assembly_hex(self): with open(os.path.join(os.path.dirname(__file__), 'regex_clr.dll'), 'rb') as f: return binascii.hexlify(f.read()).decode('ascii') mssql-django-1.5/mssql/features.py000066400000000000000000000057431461302131500173120ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. from django.db.backends.base.features import BaseDatabaseFeatures from django.utils.functional import cached_property class DatabaseFeatures(BaseDatabaseFeatures): allows_group_by_select_index = False allow_sliced_subqueries_with_in = False can_introspect_autofield = True can_introspect_json_field = False can_introspect_small_integer_field = True can_return_columns_from_insert = True can_return_id_from_insert = True can_return_rows_from_bulk_insert = False can_rollback_ddl = True can_use_chunked_reads = False for_update_after_from = True greatest_least_ignores_nulls = True has_case_insensitive_like = True has_json_object_function = False has_json_operators = False has_native_json_field = False has_native_uuid_field = False has_real_datatype = True has_select_for_update = True has_select_for_update_nowait = True has_select_for_update_skip_locked = True ignores_quoted_identifier_case = True ignores_table_name_case = True order_by_nulls_first = True requires_literal_defaults = True requires_sqlparse_for_splitting = False supports_boolean_expr_in_select_clause = False supports_comparing_boolean_expr = False supports_comments = True supports_covering_indexes = True supports_deferrable_unique_constraints = False supports_expression_indexes = False supports_ignore_conflicts = False supports_index_on_text_field = False supports_json_field_contains = False supports_order_by_nulls_modifier = False supports_over_clause = True supports_paramstyle_pyformat = False supports_primitives_in_json_field = False supports_regex_backreferencing = True supports_sequence_reset = False supports_subqueries_in_group_by = False supports_tablespaces = True supports_temporal_subtraction = True supports_timezones = True supports_transactions = True uses_savepoints = True has_bulk_insert = True supports_nullable_unique_constraints = True supports_partially_nullable_unique_constraints = True supports_partial_indexes = True supports_functions_in_partial_indexes = True supports_default_keyword_in_insert = True supports_expression_defaults = True supports_default_keyword_in_bulk_insert = True supports_stored_generated_columns = True supports_virtual_generated_columns = True @cached_property def has_zoneinfo_database(self): with self.connection.cursor() as cursor: cursor.execute("SELECT TOP 1 1 FROM sys.time_zone_info") return cursor.fetchone() is not None @cached_property def supports_json_field(self): return self.connection.sql_server_version >= 2016 or self.connection.to_azure_sql_db @cached_property def introspected_field_types(self): return { **super().introspected_field_types, "DurationField": "BigIntegerField", } mssql-django-1.5/mssql/functions.py000066400000000000000000000502611461302131500174770ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import json from django import VERSION from django.core import validators from django.db import NotSupportedError, connections, transaction from django.db.models import BooleanField, CheckConstraint, Value from django.db.models.expressions import Case, Exists, OrderBy, When, Window from django.db.models.fields import BinaryField, Field from django.db.models.functions import Cast, NthValue, MD5, SHA1, SHA224, SHA256, SHA384, SHA512 from django.db.models.functions.datetime import Now from django.db.models.functions.math import ATan2, Ln, Log, Mod, Round, Degrees, Radians, Power from django.db.models.functions.text import Replace from django.db.models.lookups import In, Lookup from django.db.models.query import QuerySet from django.db.models.sql.query import Query if VERSION >= (3, 1): from django.db.models.fields.json import ( KeyTransform, KeyTransformIn, KeyTransformExact, HasKeyLookup, compile_json_path) if VERSION >= (3, 2): from django.db.models.functions.math import Random DJANGO3 = VERSION[0] >= 3 DJANGO41 = VERSION >= (4, 1) class TryCast(Cast): function = 'TRY_CAST' def sqlserver_cast(self, compiler, connection, **extra_context): if hasattr(self.source_expressions[0], 'lookup_name'): if self.source_expressions[0].lookup_name in ['gt', 'gte', 'lt', 'lte']: return self.as_sql( compiler, connection, template = 'CASE WHEN %(expressions)s THEN 1 ELSE 0 END', **extra_context ) return self.as_sql(compiler, connection, **extra_context) def sqlserver_atan2(self, compiler, connection, **extra_context): return self.as_sql(compiler, connection, function='ATN2', **extra_context) def sqlserver_log(self, compiler, connection, **extra_context): clone = self.copy() clone.set_source_expressions(self.get_source_expressions()[::-1]) return clone.as_sql(compiler, connection, **extra_context) def sqlserver_ln(self, compiler, connection, **extra_context): return self.as_sql(compiler, connection, function='LOG', **extra_context) def sqlserver_replace(self, compiler, connection, **extra_context): current_db = "CONVERT(varchar, (SELECT DB_NAME()))" with connection.cursor() as cursor: cursor.execute("SELECT CONVERT(varchar, DATABASEPROPERTYEX(%s, 'collation'))" % current_db) default_collation = cursor.fetchone()[0] current_collation = default_collation.replace('_CI', '_CS') return self.as_sql( compiler, connection, function='REPLACE', template = 'REPLACE(%s COLLATE %s)' % ('%(expressions)s', current_collation), **extra_context ) def sqlserver_degrees(self, compiler, connection, **extra_context): return self.as_sql( compiler, connection, function='DEGREES', template= 'DEGREES(CONVERT(float, %(expressions)s))', **extra_context ) def sqlserver_radians(self, compiler, connection, **extra_context): return self.as_sql( compiler, connection, function='RADIANS', template= 'RADIANS(CONVERT(float, %(expressions)s))', **extra_context ) def sqlserver_power(self, compiler, connection, **extra_context): expr = self.get_source_expressions() number_a = compiler.compile(expr[0]) number_b = compiler.compile(expr[1]) return self.as_sql( compiler, connection, function='POWER', template = 'POWER(CONVERT(float,{a}),{b})'.format(a=number_a[0], b=number_b[0]), **extra_context ) def sqlserver_mod(self, compiler, connection): # MSSQL doesn't have keyword MOD expr = self.get_source_expressions() number_a = compiler.compile(expr[0]) number_b = compiler.compile(expr[1]) return self.as_sql( compiler, connection, function="", template='(ABS({a}) - FLOOR(ABS({a}) / ABS({b})) * ABS({b})) * SIGN({a}) * SIGN({b})'.format( a=number_a[0], b=number_b[0]), arg_joiner="" ) def sqlserver_nth_value(self, compiler, connection, **extra_content): raise NotSupportedError('This backend does not support the NthValue function') def sqlserver_round(self, compiler, connection, **extra_context): return self.as_sql(compiler, connection, template='%(function)s(%(expressions)s, 0)', **extra_context) def sqlserver_random(self, compiler, connection, **extra_context): return self.as_sql(compiler, connection, function='RAND', **extra_context) def sqlserver_window(self, compiler, connection, template=None): # MSSQL window functions require an OVER clause with ORDER BY if VERSION < (4, 1) and self.order_by is None: self.order_by = Value('SELECT NULL') return self.as_sql(compiler, connection, template) def sqlserver_exists(self, compiler, connection, template=None, **extra_context): # MS SQL doesn't allow EXISTS() in the SELECT list, so wrap it with a # CASE WHEN expression. Change the template since the When expression # requires a left hand side (column) to compare against. sql, params = self.as_sql(compiler, connection, template, **extra_context) sql = 'CASE WHEN {} THEN 1 ELSE 0 END'.format(sql) return sql, params def sqlserver_now(self, compiler, connection, **extra_context): return self.as_sql( compiler, connection, template="SYSDATETIME()", **extra_context ) def sqlserver_lookup(self, compiler, connection): # MSSQL doesn't allow EXISTS() to be compared to another expression # unless it's wrapped in a CASE WHEN. wrapped = False exprs = [] for expr in (self.lhs, self.rhs): if isinstance(expr, Exists): expr = Case(When(expr, then=True), default=False, output_field=BooleanField()) wrapped = True exprs.append(expr) lookup = type(self)(*exprs) if wrapped else self return lookup.as_sql(compiler, connection) def sqlserver_orderby(self, compiler, connection): template = None if self.nulls_last: template = 'CASE WHEN %(expression)s IS NULL THEN 1 ELSE 0 END, %(expression)s %(ordering)s' if self.nulls_first: template = 'CASE WHEN %(expression)s IS NULL THEN 0 ELSE 1 END, %(expression)s %(ordering)s' copy = self.copy() # Prevent OrderBy.as_sql() from modifying supplied templates copy.nulls_first = False copy.nulls_last = False # MSSQL doesn't allow ORDER BY EXISTS() unless it's wrapped in a CASE WHEN. if isinstance(self.expression, Exists): copy.expression = Case( When(self.expression, then=True), default=False, output_field=BooleanField(), ) return copy.as_sql(compiler, connection, template=template) def split_parameter_list_as_sql(self, compiler, connection): if connection.vendor == 'microsoft': return mssql_split_parameter_list_as_sql(self, compiler, connection) else: return in_split_parameter_list_as_sql(self, compiler, connection) def mssql_split_parameter_list_as_sql(self, compiler, connection): # Insert In clause parameters 1000 at a time into a temp table. lhs, _ = self.process_lhs(compiler, connection) _, rhs_params = self.batch_process_rhs(compiler, connection) with connection.cursor() as cursor: cursor.execute("IF OBJECT_ID('tempdb.dbo.#Temp_params', 'U') IS NOT NULL DROP TABLE #Temp_params; ") parameter_data_type = self.lhs.field.db_type(connection) Temp_table_collation = 'COLLATE DATABASE_DEFAULT' if 'char' in parameter_data_type else '' cursor.execute(f"CREATE TABLE #Temp_params (params {parameter_data_type} {Temp_table_collation})") for offset in range(0, len(rhs_params), 1000): sqls_params = rhs_params[offset: offset + 1000] sql = "INSERT INTO [#Temp_params] ([params]) VALUES " + ', '.join(['(%s)'] * len(sqls_params)) cursor.execute(sql, sqls_params) in_clause = lhs + ' IN ' + '(SELECT params from #Temp_params)' return in_clause, () def unquote_json_rhs(rhs_params): for value in rhs_params: value = json.loads(value) if not isinstance(value, (list, dict)): rhs_params = [param.replace('"', '') for param in rhs_params] return rhs_params def json_KeyTransformExact_process_rhs(self, compiler, connection): rhs, rhs_params = key_transform_exact_process_rhs(self, compiler, connection) if connection.vendor == 'microsoft': rhs_params = unquote_json_rhs(rhs_params) return rhs, rhs_params def json_KeyTransformIn(self, compiler, connection): lhs, _ = super(KeyTransformIn, self).process_lhs(compiler, connection) rhs, rhs_params = super(KeyTransformIn, self).process_rhs(compiler, connection) return (lhs + ' IN ' + rhs, unquote_json_rhs(rhs_params)) def json_HasKeyLookup(self, compiler, connection): # Process JSON path from the left-hand side. if isinstance(self.lhs, KeyTransform): lhs, _, lhs_key_transforms = self.lhs.preprocess_lhs(compiler, connection) lhs_json_path = compile_json_path(lhs_key_transforms) else: lhs, _ = self.process_lhs(compiler, connection) lhs_json_path = '$' if connection.sql_server_version >= 2022: sql = "JSON_PATH_EXISTS(%s, '%%s') > 0" % lhs else: sql = lhs + ' IN (SELECT ' + lhs + ' FROM ' + self.lhs.output_field.model._meta.db_table + \ ' CROSS APPLY OPENJSON(' + lhs + ') WITH ( [json_path_value] char(1) \'%s\') WHERE [json_path_value] IS NOT NULL)' # Process JSON path from the right-hand side. rhs = self.rhs rhs_params = [] if not isinstance(rhs, (list, tuple)): rhs = [rhs] for key in rhs: if isinstance(key, KeyTransform): *_, rhs_key_transforms = key.preprocess_lhs(compiler, connection) else: rhs_key_transforms = [key] if VERSION >= (4, 1): *rhs_key_transforms, final_key = rhs_key_transforms rhs_json_path = compile_json_path(rhs_key_transforms, include_root=False) rhs_json_path += self.compile_json_path_final_key(final_key) rhs_params.append(lhs_json_path + rhs_json_path) else: rhs_params.append('%s%s' % ( lhs_json_path, compile_json_path(rhs_key_transforms, include_root=False), )) # Add condition for each key. if self.logical_operator: sql = '(%s)' % self.logical_operator.join([sql] * len(rhs_params)) return sql % tuple(rhs_params), [] def BinaryField_init(self, *args, **kwargs): # Add max_length option for BinaryField, default to max kwargs.setdefault('editable', False) Field.__init__(self, *args, **kwargs) if self.max_length is not None: self.validators.append(validators.MaxLengthValidator(self.max_length)) else: self.max_length = 'max' def _get_check_sql(self, model, schema_editor): if VERSION >= (3, 1): query = Query(model=model, alias_cols=False) else: query = Query(model=model) where = query.build_where(self.check) compiler = query.get_compiler(connection=schema_editor.connection) sql, params = where.as_sql(compiler, schema_editor.connection) if schema_editor.connection.vendor == 'microsoft': try: for p in params: str(p).encode('ascii') except UnicodeEncodeError: sql = sql.replace('%s', 'N%s') return sql % tuple(schema_editor.quote_value(p) for p in params) def bulk_update_with_default(self, objs, fields, batch_size=None, default=None): """ Update the given fields in each of the given objects in the database. When bulk_update all fields to null, SQL Server require that at least one of the result expressions in a CASE specification must be an expression other than the NULL constant. Patched with a default value 0. The user can also pass a custom default value for CASE statement. """ if batch_size is not None and batch_size <= 0: raise ValueError('Batch size must be a positive integer.') if not fields: raise ValueError('Field names must be given to bulk_update().') objs = tuple(objs) if any(obj.pk is None for obj in objs): raise ValueError('All bulk_update() objects must have a primary key set.') fields = [self.model._meta.get_field(name) for name in fields] if any(not f.concrete or f.many_to_many for f in fields): raise ValueError('bulk_update() can only be used with concrete fields.') if any(f.primary_key for f in fields): raise ValueError('bulk_update() cannot be used with primary key fields.') if not objs: return 0 if DJANGO41: for obj in objs: obj._prepare_related_fields_for_save( operation_name="bulk_update", fields=fields ) # PK is used twice in the resulting update query, once in the filter # and once in the WHEN. Each field will also have one CAST. self._for_write = True connection = connections[self.db] max_batch_size = connection.ops.bulk_batch_size(['pk', 'pk'] + fields, objs) batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size requires_casting = connection.features.requires_casted_case_in_updates batches = (objs[i:i + batch_size] for i in range(0, len(objs), batch_size)) updates = [] for batch_objs in batches: update_kwargs = {} for field in fields: value_none_counter = 0 when_statements = [] for obj in batch_objs: attr = getattr(obj, field.attname) if not hasattr(attr, "resolve_expression"): if attr is None: value_none_counter += 1 attr = Value(attr, output_field=field) when_statements.append(When(pk=obj.pk, then=attr)) if connection.vendor == 'microsoft' and value_none_counter == len(when_statements): # We don't need a case statement if we are setting everything to None case_statement = Value(None) else: case_statement = Case(*when_statements, output_field=field) if requires_casting: case_statement = Cast(case_statement, output_field=field) update_kwargs[field.attname] = case_statement updates.append(([obj.pk for obj in batch_objs], update_kwargs)) rows_updated = 0 queryset = self.using(self.db) with transaction.atomic(using=self.db, savepoint=False): for pks, update_kwargs in updates: rows_updated += queryset.filter(pk__in=pks).update(**update_kwargs) return rows_updated def sqlserver_md5(self, compiler, connection, **extra_context): # UTF-8 support added in SQL Server 2019 if (connection.sql_server_version < 2019): raise NotSupportedError("Hashing is not supported on this version SQL Server. Upgrade to 2019 or above") column_name = self.get_source_fields()[0].name with connection.cursor() as cursor: cursor.execute("SELECT MAX(DATALENGTH(%s)) FROM %s" % (column_name, compiler.query.model._meta.db_table)) max_size = cursor.fetchone()[0] # Collation of SQL Server by default is UTF-16 but Django always assumes UTF-8 enconding # https://docs.djangoproject.com/en/4.0/ref/unicode/#general-string-handling return self.as_sql( compiler, connection, template="LOWER(CONVERT(CHAR(32), HASHBYTES('%s', CAST(%s COLLATE Latin1_General_100_CI_AI_SC_UTF8 AS VARCHAR(%s))), 2))" % ('%(function)s', column_name, max_size), **extra_context, ) def sqlserver_sha1(self, compiler, connection, **extra_context): # UTF-8 support added in SQL Server 2019 if (connection.sql_server_version < 2019): raise NotSupportedError("Hashing is not supported on this version SQL Server. Upgrade to 2019 or above") column_name = self.get_source_fields()[0].name # Collation of SQL Server by default is UTF-16 but Django always assumes UTF-8 enconding # https://docs.djangoproject.com/en/4.0/ref/unicode/#general-string-handling with connection.cursor() as cursor: cursor.execute("SELECT MAX(DATALENGTH(%s)) FROM %s" % (column_name, compiler.query.model._meta.db_table)) max_size = cursor.fetchone()[0] return self.as_sql( compiler, connection, template="LOWER(CONVERT(CHAR(40), HASHBYTES('%s', CAST(%s COLLATE Latin1_General_100_CI_AI_SC_UTF8 AS VARCHAR(%s))), 2))" % ('%(function)s', column_name, max_size), **extra_context, ) def sqlserver_sha224(self, compiler, connection, **extra_context): raise NotSupportedError("SHA224 is not supported on SQL Server.") def sqlserver_sha256(self, compiler, connection, **extra_context): # UTF-8 support added in SQL Server 2019 if (connection.sql_server_version < 2019): raise NotSupportedError("Hashing is not supported on this version SQL Server. Upgrade to 2019 or above") column_name = self.get_source_fields()[0].name # Collation of SQL Server by default is UTF-16 but Django always assumes UTF-8 enconding # https://docs.djangoproject.com/en/4.0/ref/unicode/#general-string-handling with connection.cursor() as cursor: cursor.execute("SELECT MAX(DATALENGTH(%s)) FROM %s" % (column_name, compiler.query.model._meta.db_table)) max_size = cursor.fetchone()[0] return self.as_sql( compiler, connection, template="LOWER(CONVERT(CHAR(64), HASHBYTES('SHA2_256', CAST(%s COLLATE Latin1_General_100_CI_AI_SC_UTF8 AS VARCHAR(%s))), 2))" % (column_name, max_size), **extra_context, ) def sqlserver_sha384(self, compiler, connection, **extra_context): raise NotSupportedError("SHA384 is not supported on SQL Server.") def sqlserver_sha512(self, compiler, connection, **extra_context): # UTF-8 support added in SQL Server 2019 if (connection.sql_server_version < 2019): raise NotSupportedError("Hashing is not supported on this version SQL Server. Upgrade to 2019 or above") column_name = self.get_source_fields()[0].name # Collation of SQL Server by default is UTF-16 but Django always assumes UTF-8 enconding # https://docs.djangoproject.com/en/4.0/ref/unicode/#general-string-handling with connection.cursor() as cursor: cursor.execute("SELECT MAX(DATALENGTH(%s)) FROM %s" % (column_name, compiler.query.model._meta.db_table)) max_size = cursor.fetchone()[0] return self.as_sql( compiler, connection, template="LOWER(CONVERT(CHAR(128), HASHBYTES('SHA2_512', CAST(%s COLLATE Latin1_General_100_CI_AI_SC_UTF8 AS VARCHAR(%s))), 2))" % (column_name, max_size), **extra_context, ) # `as_microsoft` called by django.db.models.sql.compiler based on connection.vendor ATan2.as_microsoft = sqlserver_atan2 # Need copy of old In.split_parameter_list_as_sql for other backends to call in_split_parameter_list_as_sql = In.split_parameter_list_as_sql In.split_parameter_list_as_sql = split_parameter_list_as_sql if VERSION >= (3, 1): KeyTransformIn.as_microsoft = json_KeyTransformIn # Need copy of old KeyTransformExact.process_rhs to call later key_transform_exact_process_rhs = KeyTransformExact.process_rhs KeyTransformExact.process_rhs = json_KeyTransformExact_process_rhs HasKeyLookup.as_microsoft = json_HasKeyLookup Cast.as_microsoft = sqlserver_cast Degrees.as_microsoft = sqlserver_degrees Radians.as_microsoft = sqlserver_radians Power.as_microsoft = sqlserver_power Ln.as_microsoft = sqlserver_ln Log.as_microsoft = sqlserver_log Mod.as_microsoft = sqlserver_mod NthValue.as_microsoft = sqlserver_nth_value Round.as_microsoft = sqlserver_round Window.as_microsoft = sqlserver_window Replace.as_microsoft = sqlserver_replace Now.as_microsoft = sqlserver_now MD5.as_microsoft = sqlserver_md5 SHA1.as_microsoft = sqlserver_sha1 SHA224.as_microsoft = sqlserver_sha224 SHA256.as_microsoft = sqlserver_sha256 SHA384.as_microsoft = sqlserver_sha384 SHA512.as_microsoft = sqlserver_sha512 BinaryField.__init__ = BinaryField_init CheckConstraint._get_check_sql = _get_check_sql if VERSION >= (3, 2): Random.as_microsoft = sqlserver_random if DJANGO3: Lookup.as_microsoft = sqlserver_lookup else: Exists.as_microsoft = sqlserver_exists OrderBy.as_microsoft = sqlserver_orderby QuerySet.bulk_update = bulk_update_with_default mssql-django-1.5/mssql/introspection.py000066400000000000000000000475421461302131500203770ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. from django.db import DatabaseError import pyodbc as Database from collections import namedtuple from django import VERSION from django.db.backends.base.introspection import BaseDatabaseIntrospection from django.db.backends.base.introspection import FieldInfo as BaseFieldInfo from django.db.backends.base.introspection import TableInfo as BaseTableInfo from django.db.models.indexes import Index from django.conf import settings SQL_AUTOFIELD = -777555 SQL_BIGAUTOFIELD = -777444 SQL_SMALLAUTOFIELD = -777333 SQL_TIMESTAMP_WITH_TIMEZONE = -155 FieldInfo = namedtuple("FieldInfo", BaseFieldInfo._fields + ("comment",)) TableInfo = namedtuple("TableInfo", BaseTableInfo._fields + ("comment",)) def get_schema_name(): return getattr(settings, 'SCHEMA_TO_INSPECT', 'SCHEMA_NAME()') class DatabaseIntrospection(BaseDatabaseIntrospection): # Map type codes to Django Field types. data_types_reverse = { SQL_AUTOFIELD: 'AutoField', SQL_BIGAUTOFIELD: 'BigAutoField', SQL_SMALLAUTOFIELD: 'SmallAutoField', Database.SQL_BIGINT: 'BigIntegerField', # Database.SQL_BINARY: , Database.SQL_BIT: 'BooleanField', Database.SQL_CHAR: 'CharField', Database.SQL_DECIMAL: 'DecimalField', Database.SQL_DOUBLE: 'FloatField', Database.SQL_FLOAT: 'FloatField', Database.SQL_GUID: 'TextField', Database.SQL_INTEGER: 'IntegerField', Database.SQL_LONGVARBINARY: 'BinaryField', # Database.SQL_LONGVARCHAR: , Database.SQL_NUMERIC: 'DecimalField', Database.SQL_REAL: 'FloatField', Database.SQL_SMALLINT: 'SmallIntegerField', Database.SQL_SS_TIME2: 'TimeField', Database.SQL_TINYINT: 'SmallIntegerField', Database.SQL_TYPE_DATE: 'DateField', Database.SQL_TYPE_TIME: 'TimeField', Database.SQL_TYPE_TIMESTAMP: 'DateTimeField', SQL_TIMESTAMP_WITH_TIMEZONE: 'DateTimeField', Database.SQL_VARBINARY: 'BinaryField', Database.SQL_VARCHAR: 'TextField', Database.SQL_WCHAR: 'CharField', Database.SQL_WLONGVARCHAR: 'TextField', Database.SQL_WVARCHAR: 'TextField', } ignored_tables = [] def get_field_type(self, data_type, description): field_type = super().get_field_type(data_type, description) # the max nvarchar length is described as 0 or 2**30-1 # (it depends on the driver) size = description.internal_size if field_type == 'CharField': if size == 0 or size >= 2**30 - 1: field_type = "TextField" elif field_type == 'TextField': if size > 0 and size < 2**30 - 1: field_type = 'CharField' return field_type def get_table_list(self, cursor): """ Returns a list of table and view names in the current database. """ if VERSION >= (4, 2) and self.connection.features.supports_comments: sql = """SELECT TABLE_NAME, TABLE_TYPE, CAST(ep.value AS VARCHAR) AS COMMENT FROM INFORMATION_SCHEMA.TABLES i LEFT JOIN sys.tables t ON t.name = i.TABLE_NAME LEFT JOIN sys.extended_properties ep ON t.object_id = ep.major_id AND ((ep.name = 'MS_DESCRIPTION' AND ep.minor_id = 0) OR ep.value IS NULL) WHERE i.TABLE_SCHEMA = %s""" % ( get_schema_name()) else: sql = 'SELECT TABLE_NAME, TABLE_TYPE FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = %s' % (get_schema_name()) cursor.execute(sql) types = {'BASE TABLE': 't', 'VIEW': 'v'} if VERSION >= (4, 2) and self.connection.features.supports_comments: return [TableInfo(row[0], types.get(row[1]), row[2]) for row in cursor.fetchall() if row[0] not in self.ignored_tables] else: return [BaseTableInfo(row[0], types.get(row[1])) for row in cursor.fetchall() if row[0] not in self.ignored_tables] def _is_auto_field(self, cursor, table_name, column_name): """ Checks whether column is Identity """ # COLUMNPROPERTY: http://msdn2.microsoft.com/en-us/library/ms174968.aspx # from django.db import connection # cursor.execute("SELECT COLUMNPROPERTY(OBJECT_ID(%s), %s, 'IsIdentity')", # (connection.ops.quote_name(table_name), column_name)) cursor.execute("SELECT COLUMNPROPERTY(OBJECT_ID(%s), %s, 'IsIdentity')", (self.connection.ops.quote_name(table_name), column_name)) return cursor.fetchall()[0][0] def get_table_description(self, cursor, table_name, identity_check=True): """Returns a description of the table, with DB-API cursor.description interface. The 'auto_check' parameter has been added to the function argspec. If set to True, the function will check each of the table's fields for the IDENTITY property (the IDENTITY property is the MSSQL equivalent to an AutoField). When an integer field is found with an IDENTITY property, it is given a custom field number of SQL_AUTOFIELD, which maps to the 'AutoField' value in the DATA_TYPES_REVERSE dict. When a bigint field is found with an IDENTITY property, it is given a custom field number of SQL_BIGAUTOFIELD, which maps to the 'BigAutoField' value in the DATA_TYPES_REVERSE dict. """ # map pyodbc's cursor.columns to db-api cursor description columns = [[c[3], c[4], c[6], c[6], c[6], c[8], c[10], c[12]] for c in cursor.columns(table=table_name)] if not columns: raise DatabaseError(f"Table {table_name} does not exist.") items = [] for column in columns: if VERSION >= (3, 2): if self.connection.sql_server_version >= 2019: sql = """SELECT collation_name FROM sys.columns c inner join sys.tables t on c.object_id = t.object_id WHERE t.name = '%s' and c.name = '%s' """ % (table_name, column[0]) cursor.execute(sql) collation_name = cursor.fetchone() column.append(collation_name[0] if collation_name else '') else: column.append('') if VERSION >= (4, 2) and self.connection.features.supports_comments: sql = """select CAST(ep.value AS VARCHAR) AS COMMENT FROM sys.columns c INNER JOIN sys.tables t ON c.object_id = t.object_id INNER JOIN sys.extended_properties ep ON c.object_id=ep.major_id AND ep.minor_id = c.column_id WHERE t.name = '%s' AND c.name = '%s' AND ep.name = 'MS_Description' """ % (table_name, column[0]) cursor.execute(sql) comment = cursor.fetchone() column.append(comment[0] if comment else '') if identity_check and self._is_auto_field(cursor, table_name, column[0]): if column[1] == Database.SQL_BIGINT: column[1] = SQL_BIGAUTOFIELD elif column[1] == Database.SQL_SMALLINT: column[1] = SQL_SMALLAUTOFIELD else: column[1] = SQL_AUTOFIELD if column[1] == Database.SQL_WVARCHAR and column[3] < 4000: column[1] = Database.SQL_WCHAR # Remove surrounding parentheses for default values if column[7]: default_value = column[7] start = 0 end = -1 for _ in range(2): if default_value[start] == '(' and default_value[end] == ')': start += 1 end -= 1 column[7] = default_value[start:end + 1] if VERSION >= (4, 2) and self.connection.features.supports_comments: items.append(FieldInfo(*column)) else: items.append(BaseFieldInfo(*column)) return items def get_sequences(self, cursor, table_name, table_fields=()): cursor.execute(f""" SELECT c.name FROM sys.columns c INNER JOIN sys.tables t ON c.object_id = t.object_id WHERE t.schema_id = SCHEMA_ID({get_schema_name()}) AND t.name = %s AND c.is_identity = 1""", [table_name]) # SQL Server allows only one identity column per table # https://docs.microsoft.com/en-us/sql/t-sql/statements/create-table-transact-sql-identity-property row = cursor.fetchone() return [{'table': table_name, 'column': row[0]}] if row else [] def get_relations(self, cursor, table_name): """ Returns a dictionary of {field_name: (field_name_other_table, other_table)} representing all relationships to the given table. """ # CONSTRAINT_COLUMN_USAGE: http://msdn2.microsoft.com/en-us/library/ms174431.aspx # CONSTRAINT_TABLE_USAGE: http://msdn2.microsoft.com/en-us/library/ms179883.aspx # REFERENTIAL_CONSTRAINTS: http://msdn2.microsoft.com/en-us/library/ms179987.aspx # TABLE_CONSTRAINTS: http://msdn2.microsoft.com/en-us/library/ms181757.aspx sql = f""" SELECT e.COLUMN_NAME AS column_name, c.TABLE_NAME AS referenced_table_name, d.COLUMN_NAME AS referenced_column_name FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS a INNER JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS b ON a.CONSTRAINT_NAME = b.CONSTRAINT_NAME AND a.TABLE_SCHEMA = b.CONSTRAINT_SCHEMA INNER JOIN INFORMATION_SCHEMA.CONSTRAINT_TABLE_USAGE AS c ON b.UNIQUE_CONSTRAINT_NAME = c.CONSTRAINT_NAME AND b.CONSTRAINT_SCHEMA = c.CONSTRAINT_SCHEMA INNER JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE AS d ON c.CONSTRAINT_NAME = d.CONSTRAINT_NAME AND c.CONSTRAINT_SCHEMA = d.CONSTRAINT_SCHEMA INNER JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE AS e ON a.CONSTRAINT_NAME = e.CONSTRAINT_NAME AND a.TABLE_SCHEMA = e.TABLE_SCHEMA WHERE a.TABLE_SCHEMA = {get_schema_name()} AND a.TABLE_NAME = %s AND a.CONSTRAINT_TYPE = 'FOREIGN KEY'""" cursor.execute(sql, (table_name,)) return dict([[item[0], (item[2], item[1])] for item in cursor.fetchall()]) def get_key_columns(self, cursor, table_name): """ Returns a list of (column_name, referenced_table_name, referenced_column_name) for all key columns in given table. """ key_columns = [] cursor.execute(f""" SELECT c.name AS column_name, rt.name AS referenced_table_name, rc.name AS referenced_column_name FROM sys.foreign_key_columns fk INNER JOIN sys.tables t ON t.object_id = fk.parent_object_id INNER JOIN sys.columns c ON c.object_id = t.object_id AND c.column_id = fk.parent_column_id INNER JOIN sys.tables rt ON rt.object_id = fk.referenced_object_id INNER JOIN sys.columns rc ON rc.object_id = rt.object_id AND rc.column_id = fk.referenced_column_id WHERE t.schema_id = SCHEMA_ID({get_schema_name()}) AND t.name = %s""", [table_name]) key_columns.extend([tuple(row) for row in cursor.fetchall()]) return key_columns def get_constraints(self, cursor, table_name): """ Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns. Returns a dict mapping constraint names to their attributes, where attributes is a dict with keys: * columns: List of columns this covers * primary_key: True if primary key, False otherwise * unique: True if this is a unique constraint, False otherwise * foreign_key: (table, column) of target, or None * check: True if check constraint, False otherwise * index: True if index, False otherwise. * orders: The order (ASC/DESC) defined for the columns of indexes * type: The type of the index (btree, hash, etc.) """ constraints = {} # Loop over the key table, collecting things as constraints # This will get PKs, FKs, and uniques, but not CHECK cursor.execute(f""" SELECT kc.constraint_name, kc.column_name, tc.constraint_type, fk.referenced_table_name, fk.referenced_column_name FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS kc INNER JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS tc ON kc.table_schema = tc.table_schema AND kc.table_name = tc.table_name AND kc.constraint_name = tc.constraint_name LEFT OUTER JOIN ( SELECT ps.name AS table_schema, pt.name AS table_name, pc.name AS column_name, rt.name AS referenced_table_name, rc.name AS referenced_column_name FROM sys.foreign_key_columns fkc INNER JOIN sys.tables pt ON fkc.parent_object_id = pt.object_id INNER JOIN sys.schemas ps ON pt.schema_id = ps.schema_id INNER JOIN sys.columns pc ON fkc.parent_object_id = pc.object_id AND fkc.parent_column_id = pc.column_id INNER JOIN sys.tables rt ON fkc.referenced_object_id = rt.object_id INNER JOIN sys.schemas rs ON rt.schema_id = rs.schema_id INNER JOIN sys.columns rc ON fkc.referenced_object_id = rc.object_id AND fkc.referenced_column_id = rc.column_id ) fk ON kc.table_schema = fk.table_schema AND kc.table_name = fk.table_name AND kc.column_name = fk.column_name WHERE kc.table_schema = {get_schema_name()} AND kc.table_name = %s ORDER BY kc.constraint_name ASC, kc.ordinal_position ASC """, [table_name]) for constraint, column, kind, ref_table, ref_column in cursor.fetchall(): # If we're the first column, make the record if constraint not in constraints: constraints[constraint] = { "columns": [], "primary_key": kind.lower() == "primary key", # In the sys.indexes table, primary key indexes have is_unique_constraint as false, # but is_unique as true. "unique": kind.lower() in ["primary key", "unique"], "unique_constraint": kind.lower() == "unique", "foreign_key": (ref_table, ref_column) if kind.lower() == "foreign key" else None, "check": False, # Potentially misleading: primary key and unique constraints still have indexes attached to them. # Should probably be updated with the additional info from the sys.indexes table we fetch later on. "index": False, "default": False, } # Record the details constraints[constraint]['columns'].append(column) # Now get CHECK constraint columns cursor.execute(f""" SELECT kc.constraint_name, kc.column_name FROM INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE AS kc JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS c ON kc.table_schema = c.table_schema AND kc.table_name = c.table_name AND kc.constraint_name = c.constraint_name WHERE c.constraint_type = 'CHECK' AND kc.table_schema = {get_schema_name()} AND kc.table_name = %s """, [table_name]) for constraint, column in cursor.fetchall(): # If we're the first column, make the record if constraint not in constraints: constraints[constraint] = { "columns": [], "primary_key": False, "unique": False, "unique_constraint": False, "foreign_key": None, "check": True, "index": False, "default": False, } # Record the details constraints[constraint]['columns'].append(column) # Now get DEFAULT constraint columns cursor.execute(""" SELECT [name], COL_NAME([parent_object_id], [parent_column_id]) FROM [sys].[default_constraints] WHERE OBJECT_NAME([parent_object_id]) = %s """, [table_name]) for constraint, column in cursor.fetchall(): # If we're the first column, make the record if constraint not in constraints: constraints[constraint] = { "columns": [], "primary_key": False, "unique": False, "unique_constraint": False, "foreign_key": None, "check": False, "index": False, "default": True, } # Record the details constraints[constraint]['columns'].append(column) # Now get indexes cursor.execute(f""" SELECT i.name AS index_name, i.is_unique, i.is_unique_constraint, i.is_primary_key, i.type, i.type_desc, ic.is_descending_key, c.name AS column_name FROM sys.tables AS t INNER JOIN sys.schemas AS s ON t.schema_id = s.schema_id INNER JOIN sys.indexes AS i ON t.object_id = i.object_id INNER JOIN sys.index_columns AS ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id INNER JOIN sys.columns AS c ON ic.object_id = c.object_id AND ic.column_id = c.column_id WHERE t.schema_id = SCHEMA_ID({get_schema_name()}) AND t.name = %s ORDER BY i.index_id ASC, ic.index_column_id ASC """, [table_name]) indexes = {} for index, unique, unique_constraint, primary, type_, desc, order, column in cursor.fetchall(): if index not in indexes: indexes[index] = { "columns": [], "primary_key": primary, "unique": unique, "unique_constraint": unique_constraint, "foreign_key": None, "check": False, "default": False, "index": True, "orders": [], "type": Index.suffix if type_ in (1, 2) else desc.lower(), } indexes[index]["columns"].append(column) indexes[index]["orders"].append("DESC" if order == 1 else "ASC") for index, constraint in indexes.items(): if index not in constraints: constraints[index] = constraint return constraints def get_primary_key_column(self, cursor, table_name): cursor.execute("SELECT 1 FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = N'%s'" % table_name) row = cursor.fetchone() if row is None: raise ValueError("Table %s does not exist" % table_name) return super().get_primary_key_column(cursor, table_name) mssql-django-1.5/mssql/management/000077500000000000000000000000001461302131500172255ustar00rootroot00000000000000mssql-django-1.5/mssql/management/__init__.py000066400000000000000000000000001461302131500213240ustar00rootroot00000000000000mssql-django-1.5/mssql/management/commands/000077500000000000000000000000001461302131500210265ustar00rootroot00000000000000mssql-django-1.5/mssql/management/commands/__init__.py000066400000000000000000000000001461302131500231250ustar00rootroot00000000000000mssql-django-1.5/mssql/management/commands/inspectdb.py000066400000000000000000000012201461302131500233460ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. from django.core.management.commands.inspectdb import Command as inspectdb_Command from django.conf import settings class Command(inspectdb_Command): def add_arguments(self, parser): super().add_arguments(parser) parser.add_argument( '--schema', default='dbo', help='Choose the database schema to inspect, default is [dbo]', ) def handle(self, *args, **options): if options["schema"]: settings.SCHEMA_TO_INSPECT = "'" + options["schema"] + "'" return super().handle(*args, **options) mssql-django-1.5/mssql/management/commands/install_regex_clr.py000066400000000000000000000015221461302131500251000ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. # Add regex support in SQLServer # Code taken from django-mssql (see https://bitbucket.org/Manfre/django-mssql) from django.core.management.base import BaseCommand from django.db import connection class Command(BaseCommand): help = "Installs the regex_clr.dll assembly with the database" requires_model_validation = False args = 'database_name' def add_arguments(self, parser): parser.add_argument('database_name') def handle(self, *args, **options): database_name = options['database_name'] if not database_name: self.print_help('manage.py', 'install_regex_clr') return connection.creation.install_regex_clr(database_name) print('Installed regex_clr to database %s' % database_name) mssql-django-1.5/mssql/operations.py000066400000000000000000000674031461302131500176600ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import datetime import uuid import warnings import sys from django.conf import settings from django.db.backends.base.operations import BaseDatabaseOperations from django.db.models.expressions import Exists, ExpressionWrapper, RawSQL from django.db.models.sql.where import WhereNode from django.utils import timezone from django.utils.encoding import force_str from django import VERSION as django_version import pytz DJANGO41 = django_version >= (4, 1) class DatabaseOperations(BaseDatabaseOperations): compiler_module = 'mssql.compiler' cast_char_field_without_max_length = 'nvarchar(max)' def max_in_list_size(self): # The driver might add a few parameters # chose a reasonable number less than 2100 limit return 2048 def _convert_field_to_tz(self, field_name, tzname): if tzname and settings.USE_TZ and self.connection.timezone_name != tzname: offset = self._get_utcoffset(tzname) field_name = 'DATEADD(second, %d, %s)' % (offset, field_name) return field_name def _convert_sql_to_tz(self, sql, params, tzname): if tzname and settings.USE_TZ and self.connection.timezone_name != tzname: offset = self._get_utcoffset(tzname) sql = 'DATEADD(second, %d, %s)' % (offset, sql) return sql, params def _get_utcoffset(self, tzname): """ Returns UTC offset for given time zone in seconds """ # SQL Server has no built-in support for tz database, see: # http://blogs.msdn.com/b/sqlprogrammability/archive/2008/03/18/using-time-zone-data-in-sql-server-2008.aspx zone = pytz.timezone(tzname) # no way to take DST into account at this point now = datetime.datetime.now() delta = zone.localize(now, is_dst=False).utcoffset() return delta.days * 86400 + delta.seconds - zone.dst(now).seconds def bulk_batch_size(self, fields, objs): """ Returns the maximum allowed batch size for the backend. The fields are the fields going to be inserted in the batch, the objs contains all the objects to be inserted. """ max_insert_rows = 1000 fields_len = len(fields) if fields_len == 0: # Required for empty model # (bulk_create.tests.BulkCreateTests.test_empty_model) return max_insert_rows # MSSQL allows a query to have 2100 parameters but some parameters are # taken up defining `NVARCHAR` parameters to store the query text and # query parameters for the `sp_executesql` call. This should only take # up 2 parameters but I've had this error when sending 2098 parameters. max_query_params = 2050 # inserts are capped at 1000 rows regardless of number of query params. # bulk_update CASE...WHEN...THEN statement sometimes takes 2 parameters per field return min(max_insert_rows, max_query_params // fields_len // 2) def bulk_insert_sql(self, fields, placeholder_rows): placeholder_rows_sql = (", ".join(row) for row in placeholder_rows) values_sql = ", ".join("(%s)" % sql for sql in placeholder_rows_sql) return "VALUES " + values_sql def cache_key_culling_sql(self): """ Returns a SQL query that retrieves the first cache key greater than the smallest. This is used by the 'db' cache backend to determine where to start culling. """ return "SELECT cache_key FROM (SELECT cache_key, " \ "ROW_NUMBER() OVER (ORDER BY cache_key) AS rn FROM %s" \ ") cache WHERE rn = %%s + 1" def combine_duration_expression(self, connector, sub_expressions): lhs, rhs = sub_expressions sign = ' * -1' if connector == '-' else '' if lhs.startswith('DATEADD'): col, sql = rhs, lhs else: col, sql = lhs, rhs params = [sign for _ in range(sql.count('DATEADD'))] params.append(col) return sql % tuple(params) def combine_expression(self, connector, sub_expressions): """ SQL Server requires special cases for some operators in query expressions """ if connector == '^': return 'POWER(%s)' % ','.join(sub_expressions) elif connector == '#': return '%s ^ %s' % tuple(sub_expressions) elif connector == '<<': return '%s * POWER(2, %s)' % tuple(sub_expressions) elif connector == '>>': return 'FLOOR(CONVERT(float, %s) / POWER(2, %s))' % tuple(sub_expressions) return super().combine_expression(connector, sub_expressions) def convert_datetimefield_value(self, value, expression, connection): if value is not None: if settings.USE_TZ and not timezone.is_aware(value): value = timezone.make_aware(value, self.connection.timezone) return value def convert_floatfield_value(self, value, expression, connection): if value is not None: value = float(value) return value def convert_uuidfield_value(self, value, expression, connection): if value is not None: value = uuid.UUID(value) return value def convert_booleanfield_value(self, value, expression, connection): return bool(value) if value in (0, 1) else value if DJANGO41: def date_extract_sql(self, lookup_type, sql, params): if lookup_type == 'week_day': sql = "DATEPART(weekday, %s)" % sql elif lookup_type == 'week': sql = "DATEPART(iso_week, %s)" % sql elif lookup_type == 'iso_week_day': sql = "DATEPART(weekday, DATEADD(day, -1, %s))" % sql elif lookup_type == 'iso_year': sql = "YEAR(DATEADD(day, 26 - DATEPART(isoww, %s), %s))" % (sql, sql) else: sql = "DATEPART(%s, %s)" % (lookup_type, sql) return sql, params else: def date_extract_sql(self, lookup_type, field_name): if lookup_type == 'week_day': return "DATEPART(weekday, %s)" % field_name elif lookup_type == 'week': return "DATEPART(iso_week, %s)" % field_name elif lookup_type == 'iso_week_day': return "DATEPART(weekday, DATEADD(day, -1, %s))" % field_name elif lookup_type == 'iso_year': return "YEAR(DATEADD(day, 26 - DATEPART(isoww, %s), %s))" % (field_name, field_name) else: return "DATEPART(%s, %s)" % (lookup_type, field_name) def date_interval_sql(self, timedelta): """ implements the interval functionality for expressions """ sec = timedelta.seconds + timedelta.days * 86400 sql = 'DATEADD(second, %d%%s, CAST(%%s AS datetime2))' % sec if timedelta.microseconds: sql = 'DATEADD(microsecond, %d%%s, CAST(%s AS datetime2))' % (timedelta.microseconds, sql) return sql if DJANGO41: def date_trunc_sql(self, lookup_type, sql, params, tzname=None): sql, params = self._convert_sql_to_tz(sql, params, tzname) # Python formats year with leading zeroes. This preserves that format for # compatibility with SQL Server's date since DATEPART drops the leading zeroes. CONVERT_YEAR = 'CONVERT(varchar(4), CONVERT(date, %s))' % sql CONVERT_QUARTER = 'CONVERT(varchar, 1+((DATEPART(quarter, %s)-1)*3))' % sql CONVERT_MONTH = 'CONVERT(varchar, DATEPART(month, %s))' % sql CONVERT_WEEK = "DATEADD(DAY, (DATEPART(weekday, %s) + 5) %%%% 7 * -1, %s)" % (sql, sql) if lookup_type == 'year': sql = "CONVERT(datetime2, %s + '/01/01')" % CONVERT_YEAR if lookup_type == 'quarter': sql = "CONVERT(datetime2, %s + '/' + %s + '/01')" % (CONVERT_YEAR, CONVERT_QUARTER) if lookup_type == 'month': sql = "CONVERT(datetime2, %s + '/' + %s + '/01')" % (CONVERT_YEAR, CONVERT_MONTH) if lookup_type == 'week': sql = "CONVERT(datetime2, CONVERT(varchar, %s, 112))" % CONVERT_WEEK if lookup_type == 'day': sql = "CONVERT(datetime2, CONVERT(varchar(12), %s, 112))" % sql return sql, params else: def date_trunc_sql(self, lookup_type, field_name, tzname=None): field_name = self._convert_field_to_tz(field_name, tzname) # Python formats year with leading zeroes. This preserves that format for # compatibility with SQL Server's date since DATEPART drops the leading zeroes. CONVERT_YEAR = 'CONVERT(varchar(4), %s)' % field_name CONVERT_QUARTER = 'CONVERT(varchar, 1+((DATEPART(quarter, %s)-1)*3))' % field_name CONVERT_MONTH = 'CONVERT(varchar, DATEPART(month, %s))' % field_name CONVERT_WEEK = "DATEADD(DAY, (DATEPART(weekday, %s) + 5) %%%% 7 * -1, %s)" % (field_name, field_name) if lookup_type == 'year': return "CONVERT(datetime2, %s + '/01/01')" % CONVERT_YEAR if lookup_type == 'quarter': return "CONVERT(datetime2, %s + '/' + %s + '/01')" % (CONVERT_YEAR, CONVERT_QUARTER) if lookup_type == 'month': return "CONVERT(datetime2, %s + '/' + %s + '/01')" % (CONVERT_YEAR, CONVERT_MONTH) if lookup_type == 'week': return "CONVERT(datetime2, CONVERT(varchar, %s, 112))" % CONVERT_WEEK if lookup_type == 'day': return "CONVERT(datetime2, CONVERT(varchar(12), %s, 112))" % field_name if DJANGO41: def datetime_cast_date_sql(self, sql, params, tzname): sql, params = self._convert_sql_to_tz(sql, params, tzname) sql = 'CAST(%s AS date)' % sql return sql, params else: def datetime_cast_date_sql(self, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) sql = 'CAST(%s AS date)' % field_name return sql if DJANGO41: def datetime_cast_time_sql(self, sql, params, tzname): sql, params = self._convert_sql_to_tz(sql, params, tzname) sql = 'CAST(%s AS time)' % sql return sql, params else: def datetime_cast_time_sql(self, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) sql = 'CAST(%s AS time)' % field_name return sql if DJANGO41: def datetime_extract_sql(self, lookup_type, sql, params, tzname): sql, params = self._convert_sql_to_tz(sql, params, tzname) return self.date_extract_sql(lookup_type, sql, params) else: def datetime_extract_sql(self, lookup_type, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) return self.date_extract_sql(lookup_type, field_name) if DJANGO41: def datetime_trunc_sql(self, lookup_type, sql, params, tzname): sql, params = self._convert_sql_to_tz(sql, params, tzname) if lookup_type in ('year', 'quarter', 'month', 'week', 'day'): return self.date_trunc_sql(lookup_type, sql, params) elif lookup_type == 'hour': sql = "CONVERT(datetime2, SUBSTRING(CONVERT(varchar, %s, 20), 0, 14) + ':00:00')" % sql elif lookup_type == 'minute': sql = "CONVERT(datetime2, SUBSTRING(CONVERT(varchar, %s, 20), 0, 17) + ':00')" % sql elif lookup_type == 'second': sql = "CONVERT(datetime2, CONVERT(varchar, %s, 20))" % sql return sql, params else: def datetime_trunc_sql(self, lookup_type, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) sql = '' if lookup_type in ('year', 'quarter', 'month', 'week', 'day'): sql = self.date_trunc_sql(lookup_type, field_name) elif lookup_type == 'hour': sql = "CONVERT(datetime2, SUBSTRING(CONVERT(varchar, %s, 20), 0, 14) + ':00:00')" % field_name elif lookup_type == 'minute': sql = "CONVERT(datetime2, SUBSTRING(CONVERT(varchar, %s, 20), 0, 17) + ':00')" % field_name elif lookup_type == 'second': sql = "CONVERT(datetime2, CONVERT(varchar, %s, 20))" % field_name return sql def fetch_returned_insert_rows(self, cursor): """ Given a cursor object that has just performed an INSERT...OUTPUT INSERTED statement into a table, return the list of returned data. """ return cursor.fetchall() def return_insert_columns(self, fields): if not fields: return '', () columns = [ '%s.%s' % ( 'INSERTED', self.quote_name(field.column), ) for field in fields ] return 'OUTPUT %s' % ', '.join(columns), () def for_update_sql(self, nowait=False, skip_locked=False, of=()): if skip_locked: return 'WITH (ROWLOCK, UPDLOCK, READPAST)' elif nowait: return 'WITH (NOWAIT, ROWLOCK, UPDLOCK)' else: return 'WITH (ROWLOCK, UPDLOCK)' def format_for_duration_arithmetic(self, sql): if sql == '%s': # use DATEADD only once because Django prepares only one parameter for this fmt = 'DATEADD(second, %s / 1000000%%s, CAST(%%s AS datetime2))' sql = '%%s' else: # use DATEADD twice to avoid arithmetic overflow for number part MICROSECOND = "DATEADD(microsecond, %s %%%%%%%% 1000000%%s, CAST(%%s AS datetime2))" fmt = 'DATEADD(second, %s / 1000000%%s, {})'.format(MICROSECOND) sql = (sql, sql) return fmt % sql def fulltext_search_sql(self, field_name): """ Returns the SQL WHERE clause to use in order to perform a full-text search of the given field_name. Note that the resulting string should contain a '%s' placeholder for the value being searched against. """ return 'CONTAINS(%s, %%s)' % field_name def get_db_converters(self, expression): converters = super().get_db_converters(expression) internal_type = expression.output_field.get_internal_type() if internal_type == 'DateTimeField': converters.append(self.convert_datetimefield_value) elif internal_type == 'FloatField': converters.append(self.convert_floatfield_value) elif internal_type == 'UUIDField': converters.append(self.convert_uuidfield_value) elif internal_type in ('BooleanField', 'NullBooleanField'): converters.append(self.convert_booleanfield_value) return converters def last_insert_id(self, cursor, table_name, pk_name): """ Given a cursor object that has just performed an INSERT statement into a table that has an auto-incrementing ID, returns the newly created ID. This method also receives the table name and the name of the primary-key column. """ # TODO: Check how the `last_insert_id` is being used in the upper layers # in context of multithreaded access, compare with other backends # IDENT_CURRENT: http://msdn2.microsoft.com/en-us/library/ms175098.aspx # SCOPE_IDENTITY: http://msdn2.microsoft.com/en-us/library/ms190315.aspx # @@IDENTITY: http://msdn2.microsoft.com/en-us/library/ms187342.aspx # IDENT_CURRENT is not limited by scope and session; it is limited to # a specified table. IDENT_CURRENT returns the value generated for # a specific table in any session and any scope. # SCOPE_IDENTITY and @@IDENTITY return the last identity values that # are generated in any table in the current session. However, # SCOPE_IDENTITY returns values inserted only within the current scope; # @@IDENTITY is not limited to a specific scope. table_name = self.quote_name(table_name) cursor.execute("SELECT CAST(IDENT_CURRENT(%s) AS int)", [table_name]) return cursor.fetchone()[0] def lookup_cast(self, lookup_type, internal_type=None): if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith'): return "UPPER(%s)" return "%s" def max_name_length(self): return 128 def no_limit_value(self): return None def prepare_sql_script(self, sql, _allow_fallback=False): return [sql] def quote_name(self, name): """ Returns a quoted version of the given table, index or column name. Does not quote the given name if it's already been quoted. """ if name.startswith('[') and name.endswith(']'): return name # Quoting once is enough. return '[%s]' % name def random_function_sql(self): """ Returns a SQL expression that returns a random value. """ return "RAND()" def regex_lookup(self, lookup_type): """ Returns the string to use in a query when performing regular expression lookups (using "regex" or "iregex"). The resulting string should contain a '%s' placeholder for the column being searched against. If the feature is not supported (or part of it is not supported), a NotImplementedError exception can be raised. """ match_option = {'iregex': 0, 'regex': 1}[lookup_type] return "dbo.REGEXP_LIKE(%%s, %%s, %s)=1" % (match_option,) def limit_offset_sql(self, low_mark, high_mark): """Return LIMIT/OFFSET SQL clause.""" limit, offset = self._get_limit_offset_params(low_mark, high_mark) return '%s%s' % ( (' OFFSET %d ROWS' % offset) if offset else ' OFFSET 0 ROWS', (' FETCH FIRST %d ROWS ONLY' % limit) if limit else '', ) def last_executed_query(self, cursor, sql, params): """ Returns a string of the query last executed by the given cursor, with placeholders replaced with actual values. `sql` is the raw query containing placeholders, and `params` is the sequence of parameters. These are used by default, but this method exists for database backends to provide a better implementation according to their own quoting schemes. """ if params: if isinstance(params, list): params = tuple(params) return sql % params # Just return sql when there are no parameters. else: return sql def savepoint_create_sql(self, sid): """ Returns the SQL for starting a new savepoint. Only required if the "uses_savepoints" feature is True. The "sid" parameter is a string for the savepoint id. """ return "SAVE TRANSACTION %s" % sid def savepoint_rollback_sql(self, sid): """ Returns the SQL for rolling back the given savepoint. """ return "ROLLBACK TRANSACTION %s" % sid def _build_sequences(self, sequences, cursor): seqs = [] for seq in sequences: cursor.execute("SELECT COUNT(*) FROM %s" % self.quote_name(seq["table"])) rowcnt = cursor.fetchone()[0] elem = {} if rowcnt: elem['start_id'] = 0 else: elem['start_id'] = 1 elem.update(seq) seqs.append(elem) return seqs def _sql_flush_new(self, style, tables, *, reset_sequences=False, allow_cascade=False): if reset_sequences: return [ sequence for sequence in self.connection.introspection.sequence_list() if sequence['table'].lower() in [table.lower() for table in tables] ] return [] def _sql_flush_old(self, style, tables, sequences, allow_cascade=False): return sequences def sql_flush(self, style, tables, *args, **kwargs): """ Returns a list of SQL statements required to remove all data from the given database tables (without actually removing the tables themselves). The returned value also includes SQL statements required to reset DB sequences passed in :param sequences:. The `style` argument is a Style object as returned by either color_style() or no_style() in django.core.management.color. The `allow_cascade` argument determines whether truncation may cascade to tables with foreign keys pointing the tables being truncated. """ if not tables: return [] if django_version >= (3, 1): sequences = self._sql_flush_new(style, tables, *args, **kwargs) else: sequences = self._sql_flush_old(style, tables, *args, **kwargs) from django.db import connections cursor = connections[self.connection.alias].cursor() seqs = self._build_sequences(sequences, cursor) COLUMNS = "TABLE_NAME, CONSTRAINT_NAME" WHERE = "CONSTRAINT_TYPE not in ('PRIMARY KEY','UNIQUE')" cursor.execute( "SELECT {} FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS WHERE {}".format(COLUMNS, WHERE)) fks = cursor.fetchall() sql_list = ['ALTER TABLE %s NOCHECK CONSTRAINT %s;' % (self.quote_name(fk[0]), self.quote_name(fk[1])) for fk in fks] sql_list.extend(['%s %s %s;' % (style.SQL_KEYWORD('DELETE'), style.SQL_KEYWORD('FROM'), style.SQL_FIELD(self.quote_name(table))) for table in tables]) if self.connection.to_azure_sql_db and self.connection.sql_server_version < 2014: warnings.warn("Resetting identity columns is not supported " "on this versios of Azure SQL Database.", RuntimeWarning) else: # Then reset the counters on each table. sql_list.extend(['%s %s (%s, %s, %s) %s %s;' % ( style.SQL_KEYWORD('DBCC'), style.SQL_KEYWORD('CHECKIDENT'), style.SQL_FIELD(self.quote_name(seq["table"])), style.SQL_KEYWORD('RESEED'), style.SQL_FIELD('%d' % seq['start_id']), style.SQL_KEYWORD('WITH'), style.SQL_KEYWORD('NO_INFOMSGS'), ) for seq in seqs]) sql_list.extend(['ALTER TABLE %s CHECK CONSTRAINT %s;' % (self.quote_name(fk[0]), self.quote_name(fk[1])) for fk in fks]) return sql_list def start_transaction_sql(self): """ Returns the SQL statement required to start a transaction. """ return "BEGIN TRANSACTION" def subtract_temporals(self, internal_type, lhs, rhs): lhs_sql, lhs_params = lhs rhs_sql, rhs_params = rhs if internal_type == 'DateField': sql = "CAST(DATEDIFF(day, %(rhs)s, %(lhs)s) AS bigint) * 86400 * 1000000" params = rhs_params + lhs_params else: SECOND = "DATEDIFF(second, %(rhs)s, %(lhs)s)" MICROSECOND = "DATEPART(microsecond, %(lhs)s) - DATEPART(microsecond, %(rhs)s)" sql = "CAST({} AS bigint) * 1000000 + {}".format(SECOND, MICROSECOND) params = rhs_params + lhs_params * 2 + rhs_params return sql % {'lhs': lhs_sql, 'rhs': rhs_sql}, params def tablespace_sql(self, tablespace, inline=False): """ Returns the SQL that will be appended to tables or rows to define a tablespace. Returns '' if the backend doesn't use tablespaces. """ return "ON %s" % self.quote_name(tablespace) def prep_for_like_query(self, x): """Prepares a value for use in a LIKE query.""" # http://msdn2.microsoft.com/en-us/library/ms179859.aspx return force_str(x).replace('\\', '\\\\').replace('[', '[[]').replace('%', '[%]').replace('_', '[_]') def prep_for_iexact_query(self, x): """ Same as prep_for_like_query(), but called for "iexact" matches, which need not necessarily be implemented using "LIKE" in the backend. """ return x def adapt_datetimefield_value(self, value): """ Transforms a datetime value to an object compatible with what is expected by the backend driver for datetime columns. """ if value is None: return None # Expression values are adapted by the database. if hasattr(value, 'resolve_expression'): return value if timezone.is_aware(value): if settings.USE_TZ: # When support for time zones is enabled, Django stores datetime information # in UTC in the database and uses time-zone-aware objects internally # source: https://docs.djangoproject.com/en/dev/topics/i18n/timezones/#overview value = value.astimezone(datetime.timezone.utc) else: # When USE_TZ is False, settings.TIME_ZONE is the time zone in # which Django will store all datetimes # source: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TIME_ZONE value = timezone.make_naive(value, self.connection.timezone) return value if DJANGO41: def time_trunc_sql(self, lookup_type, sql, params, tzname=None): # if self.connection.sql_server_version >= 2012: # fields = { # 'hour': 'DATEPART(hour, %s)' % field_name, # 'minute': 'DATEPART(minute, %s)' % field_name if lookup_type != 'hour' else '0', # 'second': 'DATEPART(second, %s)' % field_name if lookup_type == 'second' else '0', # } # sql = 'TIMEFROMPARTS(%(hour)s, %(minute)s, %(second)s, 0, 0)' % fields if lookup_type == 'hour': sql = "CONVERT(time, SUBSTRING(CONVERT(varchar, %s, 114), 0, 3) + ':00:00')" % sql elif lookup_type == 'minute': sql = "CONVERT(time, SUBSTRING(CONVERT(varchar, %s, 114), 0, 6) + ':00')" % sql elif lookup_type == 'second': sql = "CONVERT(time, SUBSTRING(CONVERT(varchar, %s, 114), 0, 9))" % sql return sql, params else: def time_trunc_sql(self, lookup_type, field_name, tzname=''): # if self.connection.sql_server_version >= 2012: # fields = { # 'hour': 'DATEPART(hour, %s)' % field_name, # 'minute': 'DATEPART(minute, %s)' % field_name if lookup_type != 'hour' else '0', # 'second': 'DATEPART(second, %s)' % field_name if lookup_type == 'second' else '0', # } # sql = 'TIMEFROMPARTS(%(hour)s, %(minute)s, %(second)s, 0, 0)' % fields if lookup_type == 'hour': sql = "CONVERT(time, SUBSTRING(CONVERT(varchar, %s, 114), 0, 3) + ':00:00')" % field_name elif lookup_type == 'minute': sql = "CONVERT(time, SUBSTRING(CONVERT(varchar, %s, 114), 0, 6) + ':00')" % field_name elif lookup_type == 'second': sql = "CONVERT(time, SUBSTRING(CONVERT(varchar, %s, 114), 0, 9))" % field_name return sql def conditional_expression_supported_in_where_clause(self, expression): """ Following "Moved conditional expression wrapping to the Exact lookup" in django 3.1 https://github.com/django/django/commit/37e6c5b79bd0529a3c85b8c478e4002fd33a2a1d """ if isinstance(expression, (Exists, WhereNode)): return True if isinstance(expression, ExpressionWrapper) and expression.conditional: return self.conditional_expression_supported_in_where_clause(expression.expression) if isinstance(expression, RawSQL) and expression.conditional: return True return False mssql-django-1.5/mssql/schema.py000066400000000000000000002263501461302131500167330ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import binascii import datetime from collections import defaultdict from django.db.backends.base.schema import ( BaseDatabaseSchemaEditor, _is_relevant_relation, _related_non_m2m_objects, logger, ) from django.db.backends.ddl_references import ( Columns, IndexName, Statement as DjStatement, Table, ) from django import VERSION as django_version from django.db.models import NOT_PROVIDED, Index, UniqueConstraint from django.db.models.fields import AutoField, BigAutoField from django.db.models.sql.where import AND from django.db.transaction import TransactionManagementError from django.utils.encoding import force_str if django_version >= (4, 0): from django.db.models.sql import Query from django.db.backends.ddl_references import Expressions class Statement(DjStatement): def __hash__(self): return hash((self.template, str(self.parts['name']))) def __eq__(self, other): return self.template == other.template and str(self.parts['name']) == str(other.parts['name']) def rename_column_references(self, table, old_column, new_column): for part in self.parts.values(): if hasattr(part, 'rename_column_references'): part.rename_column_references(table, old_column, new_column) condition = self.parts['condition'] if condition: self.parts['condition'] = condition.replace(f'[{old_column}]', f'[{new_column}]') class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): _sql_check_constraint = " CONSTRAINT %(name)s CHECK (%(check)s)" _sql_select_default_constraint_name = "SELECT" \ " d.name " \ "FROM sys.default_constraints d " \ "INNER JOIN sys.tables t ON" \ " d.parent_object_id = t.object_id " \ "INNER JOIN sys.columns c ON" \ " d.parent_object_id = c.object_id AND" \ " d.parent_column_id = c.column_id " \ "INNER JOIN sys.schemas s ON" \ " t.schema_id = s.schema_id " \ "WHERE" \ " t.name = %(table)s AND" \ " c.name = %(column)s" sql_alter_column_default = "ADD DEFAULT %(default)s FOR %(column)s" sql_alter_column_no_default = "DROP CONSTRAINT %(column)s" sql_alter_column_not_null = "ALTER COLUMN %(column)s %(type)s NOT NULL" sql_alter_column_null = "ALTER COLUMN %(column)s %(type)s NULL" sql_alter_column_type = "ALTER COLUMN %(column)s %(type)s" sql_create_column = "ALTER TABLE %(table)s ADD %(column)s %(definition)s" sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s" sql_delete_default = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" sql_delete_index = "DROP INDEX %(name)s ON %(table)s" sql_delete_table = """ DECLARE @sql_foreign_constraint_name nvarchar(128) DECLARE @sql_drop_constraint nvarchar(300) WHILE EXISTS(SELECT 1 FROM sys.foreign_keys WHERE referenced_object_id = object_id('%(table)s')) BEGIN SELECT TOP 1 @sql_foreign_constraint_name = name FROM sys.foreign_keys WHERE referenced_object_id = object_id('%(table)s') SELECT @sql_drop_constraint = 'ALTER TABLE [' + OBJECT_NAME(parent_object_id) + '] ' + 'DROP CONSTRAINT [' + @sql_foreign_constraint_name + '] ' FROM sys.foreign_keys WHERE referenced_object_id = object_id('%(table)s') and name = @sql_foreign_constraint_name exec sp_executesql @sql_drop_constraint END DROP TABLE %(table)s """ sql_rename_column = "EXEC sp_rename '%(table)s.%(old_column)s', %(new_column)s, 'COLUMN'" sql_rename_table = "EXEC sp_rename %(old_table)s, %(new_table)s" sql_create_unique_null = "CREATE UNIQUE INDEX %(name)s ON %(table)s(%(columns)s) " \ "WHERE %(columns)s IS NOT NULL" sql_alter_table_comment= """ IF NOT EXISTS (SELECT NULL FROM sys.extended_properties ep WHERE ep.major_id = OBJECT_ID('%(table)s') AND ep.name = 'MS_Description' AND ep.minor_id = 0) EXECUTE sp_addextendedproperty @name = 'MS_Description', @value = %(comment)s, @level0type = 'SCHEMA', @level0name = 'dbo', @level1type = 'TABLE', @level1name = %(table)s ELSE EXECUTE sp_updateextendedproperty @name = 'MS_Description', @value = %(comment)s, @level0type = 'SCHEMA', @level0name = 'dbo', @level1type = 'TABLE', @level1name = %(table)s """ sql_alter_column_comment= """ IF NOT EXISTS (SELECT NULL FROM sys.extended_properties ep WHERE ep.major_id = OBJECT_ID('%(table)s') AND ep.name = 'MS_Description' AND ep.minor_id = (SELECT column_id FROM sys.columns WHERE name = '%(column)s' AND object_id = OBJECT_ID('%(table)s'))) EXECUTE sp_addextendedproperty @name = 'MS_Description', @value = %(comment)s, @level0type = 'SCHEMA', @level0name = 'dbo', @level1type = 'TABLE', @level1name = %(table)s, @level2type = 'COLUMN', @level2name = %(column)s ELSE EXECUTE sp_updateextendedproperty @name = 'MS_Description', @value = %(comment)s, @level0type = 'SCHEMA', @level0name = 'dbo', @level1type = 'TABLE', @level1name = %(table)s, @level2type = 'COLUMN', @level2name = %(column)s """ _deferred_unique_indexes = defaultdict(list) def _alter_column_default_sql(self, model, old_field, new_field, drop=False): """ Hook to specialize column default alteration. Return a (sql, params) fragment to add or drop (depending on the drop argument) a default to new_field's column. """ new_default = self.effective_default(new_field) default = '%s' params = [new_default] column = self.quote_name(new_field.column) if drop: params = [] # SQL Server requires the name of the default constraint result = self.execute( self._sql_select_default_constraint_name % { "table": self.quote_value(model._meta.db_table), "column": self.quote_value(new_field.column), }, has_result=True ) if result: for row in result: column = self.quote_name(next(iter(row))) elif self.connection.features.requires_literal_defaults: # Some databases (Oracle) can't take defaults as a parameter # If this is the case, the SchemaEditor for that database should # implement prepare_default(). default = self.prepare_default(new_default) params = [] new_db_params = new_field.db_parameters(connection=self.connection) sql = self.sql_alter_column_no_default if drop else self.sql_alter_column_default return ( sql % { 'column': column, 'type': new_db_params['type'], 'default': default, }, params, ) def _alter_column_database_default_sql( self, model, old_field, new_field, drop=False ): """ Hook to specialize column database default alteration. Return a (sql, params) fragment to add or drop (depending on the drop argument) a default to new_field's column. """ column = self.quote_name(new_field.column) if drop: # SQL Server requires the name of the default constraint result = self.execute( self._sql_select_default_constraint_name % { "table": self.quote_value(model._meta.db_table), "column": self.quote_value(new_field.column), }, has_result=True ) if result: for row in result: column = self.quote_name(next(iter(row))) sql = self.sql_alter_column_no_default default_sql = "" params = [] else: sql = self.sql_alter_column_default default_sql, params = self.db_default_sql(new_field) new_db_params = new_field.db_parameters(connection=self.connection) return ( sql % { "column": column, "type": new_db_params["type"], "default": default_sql, }, params, ) def _alter_column_comment_sql(self, model, new_field, new_type, new_db_comment): return ( self.sql_alter_column_comment % { "table": self.quote_name(model._meta.db_table), "column": new_field.column, "comment": self._comment_sql(new_db_comment), }, [], ) def _alter_column_null_sql(self, model, old_field, new_field): """ Hook to specialize column null alteration. Return a (sql, params) fragment to set a column to null or non-null as required by new_field, or None if no changes are required. """ if (self.connection.features.interprets_empty_strings_as_nulls and new_field.get_internal_type() in ("CharField", "TextField")): # The field is nullable in the database anyway, leave it alone. return else: new_db_params = new_field.db_parameters(connection=self.connection) sql = self.sql_alter_column_null if new_field.null else self.sql_alter_column_not_null return ( sql % { 'column': self.quote_name(new_field.column), 'type': new_db_params['type'], }, [], ) if django_version >= (4, 2): def _alter_column_type_sql(self, model, old_field, new_field, new_type, old_collation, new_collation): new_type = self._set_field_new_type_null_status(old_field, new_type) return super()._alter_column_type_sql(model, old_field, new_field, new_type, old_collation, new_collation) else: def _alter_column_type_sql(self, model, old_field, new_field, new_type): new_type = self._set_field_new_type_null_status(old_field, new_type) return super()._alter_column_type_sql(model, old_field, new_field, new_type) def alter_unique_together(self, model, old_unique_together, new_unique_together): """ Deal with a model changing its unique_together. The input unique_togethers must be doubly-nested, not the single-nested ["foo", "bar"] format. """ olds = {tuple(fields) for fields in old_unique_together} news = {tuple(fields) for fields in new_unique_together} # Deleted uniques for fields in olds.difference(news): meta_constraint_names = {constraint.name for constraint in model._meta.constraints} meta_index_names = {constraint.name for constraint in model._meta.indexes} columns = [model._meta.get_field(field).column for field in fields] self._delete_unique_constraint_for_columns( model, columns, exclude=meta_constraint_names | meta_index_names, strict=True) # Created uniques if django_version >= (4, 0): for field_names in news.difference(olds): fields = [model._meta.get_field(field) for field in field_names] columns = [model._meta.get_field(field).column for field in field_names] condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns]) sql = self._create_unique_sql(model, fields, condition=condition) self.execute(sql) else: for fields in news.difference(olds): columns = [model._meta.get_field(field).column for field in fields] condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns]) sql = self._create_unique_sql(model, columns, condition=condition) self.execute(sql) def _model_indexes_sql(self, model): """ Return a list of all index SQL statements (field indexes, index_together, Meta.indexes) for the specified model. """ if not model._meta.managed or model._meta.proxy or model._meta.swapped: return [] output = [] for field in model._meta.local_fields: output.extend(self._field_indexes_sql(model, field)) for field_names in model._meta.index_together: fields = [model._meta.get_field(field) for field in field_names] output.append(self._create_index_sql(model, fields, suffix="_idx")) if django_version >= (4, 0): for field_names in model._meta.unique_together: fields = [model._meta.get_field(field) for field in field_names] columns = [model._meta.get_field(field).column for field in field_names] condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns]) sql = self._create_unique_sql(model, fields, condition=condition) output.append(sql) else: for field_names in model._meta.unique_together: columns = [model._meta.get_field(field).column for field in field_names] condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns]) sql = self._create_unique_sql(model, columns, condition=condition) output.append(sql) for index in model._meta.indexes: if django_version >= (3, 2) and ( not index.contains_expressions or self.connection.features.supports_expression_indexes ): output.append(index.create_sql(model, self)) else: output.append(index.create_sql(model, self)) return output def _db_table_constraint_names(self, db_table, column_names=None, column_match_any=False, unique=None, primary_key=None, index=None, foreign_key=None, check=None, type_=None, exclude=None, unique_constraint=None): """ Return all constraint names matching the columns and conditions. Modified from base `_constraint_names` but with the following new arguments: - `unique_constraint` which explicitly finds unique implemented by CONSTRAINT not by an INDEX - `column_match_any`: False: (default) only return constraints covering exactly `column_names` True : return any constraints which include at least 1 of `column_names` """ if column_names is not None: column_names = [ self.connection.introspection.identifier_converter(name) for name in column_names ] with self.connection.cursor() as cursor: constraints = self.connection.introspection.get_constraints(cursor, db_table) result = [] for name, infodict in constraints.items(): if column_names is None or column_names == infodict['columns'] or ( column_match_any and any(col in infodict['columns'] for col in column_names) ): if unique is not None and infodict['unique'] != unique: continue if unique_constraint is not None and infodict['unique_constraint'] != unique_constraint: continue if primary_key is not None and infodict['primary_key'] != primary_key: continue if index is not None and infodict['index'] != index: continue if check is not None and infodict['check'] != check: continue if foreign_key is not None and not infodict['foreign_key']: continue if type_ is not None and infodict['type'] != type_: continue if not exclude or name not in exclude: result.append(name) return result def _db_table_delete_constraint_sql(self, template, db_table, name): return Statement( template, table=Table(db_table, self.quote_name), name=self.quote_name(name), include='' ) def _delete_deferred_unique_indexes_for_field(self, field): deferred_statements = self._deferred_unique_indexes.get(str(field), []) for stmt in deferred_statements: if stmt in self.deferred_sql: self.deferred_sql.remove(stmt) def _add_deferred_unique_index_for_field(self, field, statement): self._deferred_unique_indexes[str(field)].append(statement) def _column_generated_sql(self, field): """Return the SQL to use in a GENERATED ALWAYS clause.""" expression_sql, params = field.generated_sql(self.connection) persistency_sql = "PERSISTED" if field.db_persist else "" if self.connection.features.requires_literal_defaults: expression_sql = expression_sql % tuple(self.quote_value(p) for p in params) params = () return f"GENERATED ALWAYS AS ({expression_sql}) {persistency_sql}", params def _alter_field(self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict=False): """Actually perform a "physical" (non-ManyToMany) field update.""" # the backend doesn't support altering a column to/from AutoField as # SQL Server cannot alter columns to add and remove IDENTITY properties old_is_auto = False new_is_auto = False for t in (AutoField, BigAutoField): if isinstance(old_field, t): old_is_auto = True if isinstance(new_field, t): new_is_auto = True if (old_is_auto and not new_is_auto) or (not old_is_auto and new_is_auto): raise NotImplementedError("the backend doesn't support altering from %s to %s." % (old_field.get_internal_type(), new_field.get_internal_type())) # Drop any FK constraints, we'll remake them later fks_dropped = set() if ( old_field.remote_field and old_field.db_constraint and (django_version < (4,2) or (django_version >= (4, 2) and self._field_should_be_altered( old_field, new_field, ignore={"db_comment"}) ) ) ): # Drop index, SQL Server requires explicit deletion if not hasattr(new_field, 'db_constraint') or not new_field.db_constraint: index_names = self._constraint_names(model, [old_field.column], index=True) for index_name in index_names: self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name)) fk_names = self._constraint_names(model, [old_field.column], foreign_key=True) if strict and len(fk_names) != 1: raise ValueError("Found wrong number (%s) of foreign key constraints for %s.%s" % ( len(fk_names), model._meta.db_table, old_field.column, )) for fk_name in fk_names: fks_dropped.add((old_field.column,)) self.execute(self._delete_constraint_sql(self.sql_delete_fk, model, fk_name)) # Has unique been removed? if old_field.unique and (not new_field.unique or self._field_became_primary_key(old_field, new_field)): self._delete_unique_constraint_for_columns(model, [old_field.column], strict=strict) # Drop incoming FK constraints if the field is a primary key or unique, # which might be a to_field target, and things are going to change. drop_foreign_keys = ( ( (old_field.primary_key and new_field.primary_key) or (old_field.unique and new_field.unique) ) and old_type != new_type ) if drop_foreign_keys: # '_meta.related_field' also contains M2M reverse fields, these # will be filtered out for _old_rel, new_rel in _related_non_m2m_objects(old_field, new_field): rel_fk_names = self._constraint_names( new_rel.related_model, [new_rel.field.column], foreign_key=True ) for fk_name in rel_fk_names: self.execute(self._delete_constraint_sql(self.sql_delete_fk, new_rel.related_model, fk_name)) # If working with an AutoField or BigAutoField drop all indexes on the related table # This is needed when doing ALTER column statements on IDENTITY fields # https://stackoverflow.com/questions/33429775/sql-server-alter-table-alter-column-giving-set-option-error for t in (AutoField, BigAutoField): if isinstance(old_field, t) or isinstance(new_field, t): index_names = self._constraint_names(model, index=True) for index_name in index_names: self.execute( self._delete_constraint_sql(self.sql_delete_index, model, index_name) ) break # Removed an index? (no strict check, as multiple indexes are possible) # Remove indexes if db_index switched to False or a unique constraint # will now be used in lieu of an index. The following lines from the # truth table show all True cases; the rest are False: # # old_field.db_index | old_field.unique | new_field.db_index | new_field.unique # ------------------------------------------------------------------------------ # True | False | False | False # True | False | False | True # True | False | True | True if (old_field.db_index and not old_field.unique and (not new_field.db_index or new_field.unique)) or ( # Drop indexes on nvarchar columns that are changing to a different type # SQL Server requires explicit deletion (old_field.db_index or old_field.unique) and ( (old_type.startswith('nvarchar') and not new_type.startswith('nvarchar')) )): # Find the index for this field meta_index_names = {index.name for index in model._meta.indexes} # Retrieve only BTREE indexes since this is what's created with # db_index=True. index_names = self._constraint_names(model, [old_field.column], index=True, type_=Index.suffix) for index_name in index_names: if index_name not in meta_index_names: # The only way to check if an index was created with # db_index=True or with Index(['field'], name='foo') # is to look at its name (refs #28053). self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name)) # Change check constraints? if (old_db_params['check'] != new_db_params['check'] and old_db_params['check']) or ( # SQL Server requires explicit deletion befor altering column type with the same constraint old_db_params['check'] == new_db_params['check'] and old_db_params['check'] and old_db_params['type'] != new_db_params['type'] ): constraint_names = self._constraint_names(model, [old_field.column], check=True) if strict and len(constraint_names) != 1: raise ValueError("Found wrong number (%s) of check constraints for %s.%s" % ( len(constraint_names), model._meta.db_table, old_field.column, )) for constraint_name in constraint_names: self.execute(self._delete_constraint_sql(self.sql_delete_check, model, constraint_name)) # Have they renamed the column? if old_field.column != new_field.column: sql_restore_index = '' # Drop any unique indexes which include the column to be renamed index_names = self._db_table_constraint_names( db_table=model._meta.db_table, column_names=[old_field.column], column_match_any=True, index=True, unique=True, ) for index_name in index_names: # Before dropping figure out how to recreate it afterwards with self.connection.cursor() as cursor: cursor.execute(f""" SELECT COL_NAME(ic.object_id,ic.column_id) AS column_name, filter_definition FROM sys.indexes AS i INNER JOIN sys.index_columns AS ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id WHERE i.object_id = OBJECT_ID('{model._meta.db_table}') and i.name = '{index_name}' """) result = cursor.fetchall() columns_to_recreate_index = ', '.join(['%s' % self.quote_name(column[0]) for column in result]) filter_definition = result[0][1] sql_restore_index += 'CREATE UNIQUE INDEX %s ON %s (%s) WHERE %s;' % ( index_name, model._meta.db_table, columns_to_recreate_index, filter_definition) self.execute(self._db_table_delete_constraint_sql( self.sql_delete_index, model._meta.db_table, index_name)) self.execute(self._rename_field_sql(model._meta.db_table, old_field, new_field, new_type)) # Restore index(es) now the column has been renamed if sql_restore_index: self.execute(sql_restore_index.replace(f'[{old_field.column}]', f'[{new_field.column}]')) # Rename all references to the renamed column. for sql in self.deferred_sql: if isinstance(sql, DjStatement): sql.rename_column_references(model._meta.db_table, old_field.column, new_field.column) # Next, start accumulating actions to do actions = [] null_actions = [] post_actions = [] # Type or comment change? if old_type != new_type or (django_version >= (4, 2) and self.connection.features.supports_comments and old_field.db_comment != new_field.db_comment ): if django_version >= (4, 2): fragment, other_actions = self._alter_column_type_sql( model, old_field, new_field, new_type, old_collation=None, new_collation=None ) else: fragment, other_actions = self._alter_column_type_sql(model, old_field, new_field, new_type) actions.append(fragment) post_actions.extend(other_actions) # Drop unique constraint, SQL Server requires explicit deletion self._delete_unique_constraints(model, old_field, new_field, strict) # Drop indexes, SQL Server requires explicit deletion self._delete_indexes(model, old_field, new_field) # db_default change? if django_version >= (5,0): if new_field.db_default is not NOT_PROVIDED: if ( old_field.db_default is NOT_PROVIDED or new_field.db_default != old_field.db_default ): actions.append( self._alter_column_database_default_sql(model, old_field, new_field) ) elif old_field.db_default is not NOT_PROVIDED: actions.append( self._alter_column_database_default_sql( model, old_field, new_field, drop=True ) ) # When changing a column NULL constraint to NOT NULL with a given # default value, we need to perform 4 steps: # 1. Add a default for new incoming writes # 2. Update existing NULL rows with new default # 3. Replace NULL constraint with NOT NULL # 4. Drop the default again. # Default change? old_default = self.effective_default(old_field) new_default = self.effective_default(new_field) needs_database_default = ( old_field.null and not new_field.null and old_default != new_default and new_default is not None and not self.skip_default(new_field) ) if django_version >= (5,0): needs_database_default = needs_database_default and new_field.db_default is NOT_PROVIDED if needs_database_default: actions.append(self._alter_column_default_sql(model, old_field, new_field)) # Nullability change? if old_field.null != new_field.null: fragment = self._alter_column_null_sql(model, old_field, new_field) if fragment: null_actions.append(fragment) # Drop unique constraint, SQL Server requires explicit deletion self._delete_unique_constraints(model, old_field, new_field, strict) # Drop indexes, SQL Server requires explicit deletion indexes_dropped = self._delete_indexes(model, old_field, new_field) auto_index_names = [] for index_from_meta in model._meta.indexes: auto_index_names.append(self._create_index_name(model._meta.db_table, index_from_meta.fields)) if ( new_field.get_internal_type() not in ("JSONField", "TextField") and (old_field.db_index or not new_field.db_index) and new_field.db_index or ((indexes_dropped and sorted(indexes_dropped) == sorted([index.name for index in model._meta.indexes])) or (indexes_dropped and sorted(indexes_dropped) == sorted(auto_index_names))) ): create_index_sql_statement = self._create_index_sql(model, [new_field]) if create_index_sql_statement.__str__() not in [sql.__str__() for sql in self.deferred_sql]: post_actions.append((create_index_sql_statement, ())) # Only if we have a default and there is a change from NULL to NOT NULL four_way_default_alteration = ( (new_field.has_default() or (django_version >= (5,0) and new_field.db_default is not NOT_PROVIDED)) and (old_field.null and not new_field.null) ) if actions or null_actions: if not four_way_default_alteration: # If we don't have to do a 4-way default alteration we can # directly run a (NOT) NULL alteration actions = actions + null_actions # Combine actions together if we can (e.g. postgres) if self.connection.features.supports_combined_alters and actions: sql, params = tuple(zip(*actions)) actions = [(", ".join(sql), sum(params, []))] # Apply those actions for sql, params in actions: self.execute( self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": sql, }, params, ) if four_way_default_alteration: if django_version >= (5,0) and new_field.db_default is not NOT_PROVIDED: default_sql, params = self.db_default_sql(new_field) else: default_sql = "%s" params = [new_default] # Update existing rows with default value self.execute( self.sql_update_with_default % { "table": self.quote_name(model._meta.db_table), "column": self.quote_name(new_field.column), "default": default_sql, }, params, ) # Since we didn't run a NOT NULL change before we need to do it # now for sql, params in null_actions: self.execute( self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": sql, }, params, ) if post_actions: for sql, params in post_actions: self.execute(sql, params) # If primary_key changed to False, delete the primary key constraint. if old_field.primary_key and not new_field.primary_key: self._delete_primary_key(model, strict) # Added a unique? if self._unique_should_be_added(old_field, new_field): if (self.connection.features.supports_nullable_unique_constraints and not new_field.many_to_many and new_field.null): self.execute( self._create_index_sql( model, [new_field], sql=self.sql_create_unique_null, suffix="_uniq" ) ) else: if django_version >= (4, 0): self.execute(self._create_unique_sql(model, [new_field])) else: self.execute(self._create_unique_sql(model, [new_field.column])) self._delete_deferred_unique_indexes_for_field(new_field) # Added an index? # constraint will no longer be used in lieu of an index. The following # lines from the truth table show all True cases; the rest are False: # # old_field.db_index | old_field.unique | new_field.db_index | new_field.unique # ------------------------------------------------------------------------------ # False | False | True | False # False | True | True | False # True | True | True | False if (not old_field.db_index or old_field.unique) and new_field.db_index and not new_field.unique: self.execute(self._create_index_sql(model, [new_field])) # Restore indexes & unique constraints deleted above, SQL Server requires explicit restoration if (old_type != new_type or (old_field.null != new_field.null)) and ( old_field.column == new_field.column # column rename is handled separately above ): # Restore unique constraints # Note: if nullable they are implemented via an explicit filtered UNIQUE INDEX (not CONSTRAINT) # in order to get ANSI-compliant NULL behaviour (i.e. NULL != NULL, multiple are allowed) # Note: Don't restore primary keys, we need to re-create those seperately if old_field.unique and new_field.unique and not new_field.primary_key: if new_field.null: self.execute( self._create_index_sql( model, [old_field], sql=self.sql_create_unique_null, suffix="_uniq" ) ) else: if django_version >= (4, 0): self.execute(self._create_unique_sql(model, [old_field])) else: self.execute(self._create_unique_sql(model, columns=[old_field.column])) self._delete_deferred_unique_indexes_for_field(old_field) else: if django_version >= (4, 0): for field_names in model._meta.unique_together: columns = [model._meta.get_field(field).column for field in field_names] fields = [model._meta.get_field(field) for field in field_names] if old_field.column in columns: condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns]) self.execute(self._create_unique_sql(model, fields, condition=condition)) else: for fields in model._meta.unique_together: columns = [model._meta.get_field(field).column for field in fields] if old_field.column in columns: condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns]) self.execute(self._create_unique_sql(model, columns, condition=condition)) # Restore primary keys if old_field.primary_key and new_field.primary_key: self.execute( self.sql_create_pk % { "table": self.quote_name(model._meta.db_table), "name": self.quote_name( self._create_index_name(model._meta.db_table, [new_field.column], suffix="_pk") ), "columns": self.quote_name(new_field.column), } ) # Restore unqiue_together # If we have ALTERed an AutoField or BigAutoField we need to recreate all unique_together clauses for t in (AutoField, BigAutoField): if isinstance(old_field, t) or isinstance(new_field, t): for field_names in model._meta.unique_together: columns = [model._meta.get_field(field).column for field in field_names] fields = [model._meta.get_field(field) for field in field_names] condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns]) # We need to pass fields instead of columns when using >= Django 4.0 because # of a backwards incompatible change to _create_unique_sql if django_version >= (4, 0): self.execute( self._create_unique_sql(model, fields, condition=condition) ) else: self.execute( self._create_unique_sql(model, columns, condition=condition) ) break # Restore indexes # If we have ALTERed an AutoField or BigAutoField we need to recreate all indexes for t in (AutoField, BigAutoField): if isinstance(old_field, t) or isinstance(new_field, t): for field in model._meta.fields: if field.db_index: self.execute( self._create_index_sql(model, [field]) ) break index_columns = [] if old_field.db_index and new_field.db_index: index_columns.append([old_field]) else: for fields in model._meta.index_together: columns = [model._meta.get_field(field) for field in fields] if old_field.column in [c.column for c in columns]: index_columns.append(columns) if index_columns: for columns in index_columns: create_index_sql_statement = self._create_index_sql(model, columns) if (create_index_sql_statement.__str__() not in [sql.__str__() for sql in self.deferred_sql] + [statement[0].__str__() for statement in post_actions] ): self.execute(create_index_sql_statement) # Type alteration on primary key? Then we need to alter the column # referring to us. rels_to_update = [] if old_field.primary_key and new_field.primary_key and old_type != new_type: rels_to_update.extend(_related_non_m2m_objects(old_field, new_field)) # Changed to become primary key? if self._field_became_primary_key(old_field, new_field): # Make the new one self.execute( self.sql_create_pk % { "table": self.quote_name(model._meta.db_table), "name": self.quote_name( self._create_index_name(model._meta.db_table, [new_field.column], suffix="_pk") ), "columns": self.quote_name(new_field.column), } ) # Update all referencing columns rels_to_update.extend(_related_non_m2m_objects(old_field, new_field)) # Handle our type alters on the other end of rels from the PK stuff above for old_rel, new_rel in rels_to_update: rel_db_params = new_rel.field.db_parameters(connection=self.connection) rel_type = rel_db_params['type'] if django_version >= (4, 2): fragment, other_actions = self._alter_column_type_sql( new_rel.related_model, old_rel.field, new_rel.field, rel_type, old_collation=None, new_collation=None ) else: fragment, other_actions = self._alter_column_type_sql( new_rel.related_model, old_rel.field, new_rel.field, rel_type ) # Drop related_model indexes, so it can be altered index_names = self._db_table_constraint_names(old_rel.related_model._meta.db_table, index=True) for index_name in index_names: self.execute(self._db_table_delete_constraint_sql( self.sql_delete_index, old_rel.related_model._meta.db_table, index_name)) self.execute( self.sql_alter_column % { "table": self.quote_name(new_rel.related_model._meta.db_table), "changes": fragment[0], }, fragment[1], ) for sql, params in other_actions: self.execute(sql, params) # Restore related_model indexes for field in new_rel.related_model._meta.fields: if field.db_index: self.execute( self._create_index_sql(new_rel.related_model, [field]) ) # Restore unique_together clauses for field_names in new_rel.related_model._meta.unique_together: columns = [new_rel.related_model._meta.get_field(field).column for field in field_names] fields = [new_rel.related_model._meta.get_field(field) for field in field_names] condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns]) # We need to pass fields instead of columns when using >= Django 4.0 because # of a backwards incompatible change to _create_unique_sql if django_version >= (4, 0): self.execute( self._create_unique_sql(new_rel.related_model, fields, condition=condition) ) else: self.execute( self._create_unique_sql(new_rel.related_model, columns, condition=condition) ) # Does it have a foreign key? if (new_field.remote_field and (fks_dropped or not old_field.remote_field or not old_field.db_constraint) and new_field.db_constraint): self.execute(self._create_fk_sql(model, new_field, "_fk_%(to_table)s_%(to_column)s")) # Rebuild FKs that pointed to us if we previously had to drop them if drop_foreign_keys: for rel in new_field.model._meta.related_objects: if _is_relevant_relation(rel, new_field) and rel.field.db_constraint: self.execute(self._create_fk_sql(rel.related_model, rel.field, "_fk")) # Does it have check constraints we need to add? if (old_db_params['check'] != new_db_params['check'] and new_db_params['check']) or ( # SQL Server requires explicit creation after altering column type with the same constraint old_db_params['check'] == new_db_params['check'] and new_db_params['check'] and old_db_params['type'] != new_db_params['type'] ): self.execute( self.sql_create_check % { "table": self.quote_name(model._meta.db_table), "name": self.quote_name( self._create_index_name(model._meta.db_table, [new_field.column], suffix="_check") ), "column": self.quote_name(new_field.column), "check": new_db_params['check'], } ) # Drop the default if we need to # (Django usually does not use in-database defaults) if needs_database_default: changes_sql, params = self._alter_column_default_sql(model, old_field, new_field, drop=True) sql = self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": changes_sql, } self.execute(sql, params) # Reset connection if required if self.connection.features.connection_persists_old_columns: self.connection.close() def _delete_indexes(self, model, old_field, new_field): index_columns = [] index_names = [] if old_field.db_index and new_field.db_index: index_columns.append([old_field.column]) elif old_field.null != new_field.null: index_columns.append([old_field.column]) for fields in model._meta.index_together: columns = [model._meta.get_field(field).column for field in fields] if old_field.column in columns: index_columns.append(columns) for index in model._meta.indexes: columns = [model._meta.get_field(field).column for field in index.fields] if old_field.column in columns: index_columns.append(columns) for fields in model._meta.unique_together: columns = [model._meta.get_field(field).column for field in fields] if old_field.column in columns: index_columns.append(columns) if index_columns: # remove duplicates first temp = [] for columns in index_columns: if columns not in temp: temp.append(columns) index_columns = temp for columns in index_columns: index_names = self._constraint_names(model, columns, index=True) for index_name in index_names: self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name)) return index_names def _delete_unique_constraints(self, model, old_field, new_field, strict=False): unique_columns = [] # Considering just this column, we only need to drop unique constraints in advance of altering the field # *if* it remains unique - if it wasn't unique before there's nothing to drop; if it won't remain unique # afterwards then that is handled separately in _alter_field if old_field.unique and new_field.unique: unique_columns.append([old_field.column]) # Also consider unique_together because, although this is implemented with a filtered unique INDEX now, we # need to handle the possibility that we're acting on a database previously created by an older version of # this backend, where unique_together used to be implemented with a CONSTRAINT for fields in model._meta.unique_together: columns = [model._meta.get_field(field).column for field in fields] if old_field.column in columns: unique_columns.append(columns) if unique_columns: for columns in unique_columns: self._delete_unique_constraint_for_columns(model, columns, strict=strict) def _delete_unique_constraint_for_columns(self, model, columns, strict=False, **constraint_names_kwargs): constraint_names_unique = self._db_table_constraint_names( model._meta.db_table, columns, unique=True, unique_constraint=True, **constraint_names_kwargs) constraint_names_primary = self._db_table_constraint_names( model._meta.db_table, columns, unique=True, primary_key=True, **constraint_names_kwargs) constraint_names_normal = constraint_names_unique + constraint_names_primary constraint_names_index = self._db_table_constraint_names( model._meta.db_table, columns, unique=True, unique_constraint=False, primary_key=False, **constraint_names_kwargs) constraint_names = constraint_names_normal + constraint_names_index if django_version >= (4, 1): if constraint_names and self.connection.features.allows_multiple_constraints_on_same_fields: # Constraint matching the unique_together name. default_name = str( self._unique_constraint_name(model._meta.db_table, columns, quote=False) ) if default_name in constraint_names: constraint_names = [default_name] if strict and len(constraint_names) != 1: raise ValueError("Found wrong number (%s) of unique constraints for columns %s" % ( len(constraint_names), repr(columns), )) # Delete constraints which are implemented as a table CONSTRAINT (this may include some created by an # older version of this backend, even if the current version would implement it with an INDEX instead) for constraint_name in constraint_names_normal: self.execute(self._delete_constraint_sql(self.sql_delete_unique, model, constraint_name)) # Delete constraints which are implemented with an explicit index instead (not a table CONSTRAINT) # These are used for example to enforce ANSI-compliant unique constraints on nullable columns. for index_name in constraint_names_index: self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name)) def _rename_field_sql(self, table, old_field, new_field, new_type): new_type = self._set_field_new_type_null_status(old_field, new_type) return super()._rename_field_sql(table, old_field, new_field, new_type) def _set_field_new_type_null_status(self, field, new_type): """ Keep the null property of the old field. If it has changed, it will be handled separately. """ if field.null: new_type += " NULL" else: new_type += " NOT NULL" return new_type def add_field(self, model, field): """ Create a field on a model. Usually involves adding a column, but may involve adding a table instead (for M2M fields). """ # Special-case implicit M2M tables if field.many_to_many and field.remote_field.through._meta.auto_created: return self.create_model(field.remote_field.through) # Get the column's definition definition, params = self.column_sql(model, field, include_default=True) # It might not actually have a column behind it if definition is None: return if col_type_suffix := field.db_type_suffix(connection=self.connection): definition += f" {col_type_suffix}" # Remove column type from definition if field is generated if (django_version >= (5,0) and field.generated): definition = definition[definition.find('AS'):] # Nullable columns with default values require 'WITH VALUES' to set existing rows if 'DEFAULT' in definition and field.null: definition = definition.replace('NULL', 'WITH VALUES') if (self.connection.features.supports_nullable_unique_constraints and not field.many_to_many and field.null and field.unique): definition = definition.replace(' UNIQUE', '') statement = self._create_index_sql( model, [field], sql=self.sql_create_unique_null, suffix="_uniq" ) self.deferred_sql.append(statement) self._add_deferred_unique_index_for_field(field, statement) # Check constraints can go on the column SQL here db_params = field.db_parameters(connection=self.connection) if db_params['check']: definition += " CHECK (%s)" % db_params['check'] # Build the SQL and run it sql = self.sql_create_column % { "table": self.quote_name(model._meta.db_table), "column": self.quote_name(field.column), "definition": definition, } self.execute(sql, params) # Drop the default if we need to # (Django usually does not use in-database defaults) if ( ((django_version >= (5,0) and field.db_default is NOT_PROVIDED) or django_version < (5,0)) and not self.skip_default(field) and self.effective_default(field) is not None ): changes_sql, params = self._alter_column_default_sql(model, None, field, drop=True) sql = self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": changes_sql, } self.execute(sql, params) # Add field comment, if required. if django_version >= (4, 2): if ( field.db_comment and self.connection.features.supports_comments and not self.connection.features.supports_comments_inline ): field_type = db_params["type"] self.execute( *self._alter_column_comment_sql( model, field, field_type, field.db_comment ) ) # Add an index, if required self.deferred_sql.extend(self._field_indexes_sql(model, field)) # Add any FK constraints later if field.remote_field and self.connection.features.supports_foreign_keys and field.db_constraint: self.deferred_sql.append(self._create_fk_sql(model, field, "_fk_%(to_table)s_%(to_column)s")) # Reset connection if required if self.connection.features.connection_persists_old_columns: self.connection.close() if django_version >= (4, 0): def _create_unique_sql( self, model, fields, name=None, condition=None, deferrable=None, include=None, opclasses=None, expressions=None, nulls_distinct=None ): if not self._unique_supported( condition=condition, deferrable=deferrable, include=include, expressions=expressions, nulls_distinct=nulls_distinct, ): return None def create_unique_name(*args, **kwargs): return self.quote_name(self._create_index_name(*args, **kwargs)) compiler = Query(model, alias_cols=False).get_compiler(connection=self.connection) columns = [field.column for field in fields] table = model._meta.db_table if name is None: name = IndexName(table, columns, '_uniq', create_unique_name) else: name = self.quote_name(name) if columns: columns = self._index_columns(table, columns, col_suffixes=(), opclasses=opclasses) else: columns = Expressions(table, expressions, compiler, self.quote_value) statement_args = { "deferrable": self._deferrable_constraint_sql(deferrable) } include = self._index_include_sql(model, include) if condition: return Statement( self.sql_create_unique_index, table=self.quote_name(table), name=name, columns=columns, condition=' WHERE ' + condition, **statement_args, include=include, nulls_distinct='' ) if self.connection.features.supports_partial_indexes else None else: return Statement( self.sql_create_unique, table=self.quote_name(table), name=name, columns=columns, **statement_args, include=include, nulls_distinct='' ) else: def _create_unique_sql(self, model, columns, name=None, condition=None, deferrable=None, include=None, opclasses=None, expressions=None): if (deferrable and not getattr(self.connection.features, 'supports_deferrable_unique_constraints', False) or (condition and not self.connection.features.supports_partial_indexes) or (include and not self.connection.features.supports_covering_indexes) or (expressions and not self.connection.features.supports_expression_indexes)): return None def create_unique_name(*args, **kwargs): return self.quote_name(self._create_index_name(*args, **kwargs)) table = Table(model._meta.db_table, self.quote_name) if name is None: name = IndexName(model._meta.db_table, columns, '_uniq', create_unique_name) else: name = self.quote_name(name) columns = Columns(table, columns, self.quote_name) statement_args = { "deferrable": self._deferrable_constraint_sql(deferrable) } if django_version >= (3, 1) else {} include = self._index_include_sql(model, include) if django_version >= (3, 2) else '' if condition: return Statement( self.sql_create_unique_index, table=self.quote_name(table) if isinstance(table, str) else table, name=name, columns=columns, condition=' WHERE ' + condition, **statement_args, include=include, ) if self.connection.features.supports_partial_indexes else None else: return Statement( self.sql_create_unique, table=self.quote_name(table) if isinstance(table, str) else table, name=name, columns=columns, **statement_args, include=include, ) def _create_index_sql(self, model, fields, *, name=None, suffix='', using='', db_tablespace=None, col_suffixes=(), sql=None, opclasses=(), condition=None, include=None, expressions=None): """ Return the SQL statement to create the index for one or several fields. `sql` can be specified if the syntax differs from the standard (GIS indexes, ...). """ if django_version >= (3, 2): return super()._create_index_sql( model, fields=fields, name=name, suffix=suffix, using=using, db_tablespace=db_tablespace, col_suffixes=col_suffixes, sql=sql, opclasses=opclasses, condition=condition, include=include, expressions=expressions, ) return super()._create_index_sql( model, fields=fields, name=name, suffix=suffix, using=using, db_tablespace=db_tablespace, col_suffixes=col_suffixes, sql=sql, opclasses=opclasses, condition=condition, ) def create_model(self, model): """ Takes a model and creates a table for it in the database. Will also create any accompanying indexes or unique constraints. """ # Create column SQL, add FK deferreds if needed column_sqls = [] params = [] for field in model._meta.local_fields: # SQL definition, extra_params = self.column_sql(model, field) if definition is None: continue # Remove column type from definition if field is generated if (django_version >= (5,0) and field.generated): definition = definition[definition.find('AS'):] if (self.connection.features.supports_nullable_unique_constraints and not field.many_to_many and field.null and field.unique): definition = definition.replace(' UNIQUE', '') statement = self._create_index_sql( model, [field], sql=self.sql_create_unique_null, suffix="_uniq" ) self.deferred_sql.append(statement) self._add_deferred_unique_index_for_field(field, statement) # Check constraints can go on the column SQL here db_params = field.db_parameters(connection=self.connection) if db_params['check']: # SQL Server requires a name for the check constraint definition += self._sql_check_constraint % { "name": self._create_index_name(model._meta.db_table, [field.column], suffix="_check"), "check": db_params['check'] } # Autoincrement SQL (for backends with inline variant) col_type_suffix = field.db_type_suffix(connection=self.connection) if col_type_suffix: definition += " %s" % col_type_suffix params.extend(extra_params) # FK if field.remote_field and field.db_constraint: to_table = field.remote_field.model._meta.db_table to_column = field.remote_field.model._meta.get_field(field.remote_field.field_name).column if self.sql_create_inline_fk: definition += " " + self.sql_create_inline_fk % { "to_table": self.quote_name(to_table), "to_column": self.quote_name(to_column), } elif self.connection.features.supports_foreign_keys: self.deferred_sql.append(self._create_fk_sql(model, field, "_fk_%(to_table)s_%(to_column)s")) # Add the SQL to our big list column_sqls.append("%s %s" % ( self.quote_name(field.column), definition, )) # Autoincrement SQL (for backends with post table definition variant) if field.get_internal_type() in ("AutoField", "BigAutoField", "SmallAutoField"): autoinc_sql = self.connection.ops.autoinc_sql(model._meta.db_table, field.column) if autoinc_sql: self.deferred_sql.extend(autoinc_sql) # Add any unique_togethers (always deferred, as some fields might be # created afterwards, like geometry fields with some backends) for field_names in model._meta.unique_together: fields = [model._meta.get_field(field) for field in field_names] columns = [model._meta.get_field(field).column for field in field_names] condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns]) if django_version >= (4, 0): self.deferred_sql.append(self._create_unique_sql(model, fields, condition=condition)) else: self.deferred_sql.append(self._create_unique_sql(model, columns, condition=condition)) constraints = [constraint.constraint_sql(model, self) for constraint in model._meta.constraints] # Make the table sql = self.sql_create_table % { "table": self.quote_name(model._meta.db_table), 'definition': ', '.join(constraint for constraint in (*column_sqls, *constraints) if constraint), } if model._meta.db_tablespace: tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace) if tablespace_sql: sql += ' ' + tablespace_sql # Prevent using [] as params, in the case a literal '%' is used in the definition self.execute(sql, params or None) if django_version >= (4, 2) and self.connection.features.supports_comments: # Add table comment. if model._meta.db_table_comment: self.alter_db_table_comment(model, None, model._meta.db_table_comment) # Add column comments. if not self.connection.features.supports_comments_inline: for field in model._meta.local_fields: if field.db_comment: field_db_params = field.db_parameters( connection=self.connection ) field_type = field_db_params["type"] self.execute( *self._alter_column_comment_sql( model, field, field_type, field.db_comment ) ) # Add any field index and index_together's (deferred as SQLite3 _remake_table needs it) self.deferred_sql.extend(self._model_indexes_sql(model)) self.deferred_sql = list(set(self.deferred_sql)) # Make M2M tables for field in model._meta.local_many_to_many: if field.remote_field.through._meta.auto_created: self.create_model(field.remote_field.through) def _delete_unique_sql( self, model, name, condition=None, deferrable=None, include=None, opclasses=None, expressions=None, nulls_distinct=None, ): if not self._unique_supported( condition=condition, deferrable=deferrable, include=include, expressions=expressions, nulls_distinct=nulls_distinct, ): return None if condition or include or opclasses: sql = self.sql_delete_index with self.connection.cursor() as cursor: cursor.execute( "SELECT 1 FROM INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE WHERE CONSTRAINT_NAME = '%s'" % name) row = cursor.fetchone() if row: sql = self.sql_delete_unique else: sql = self.sql_delete_unique return self._delete_constraint_sql(sql, model, name) def delete_model(self, model): super().delete_model(model) def execute(self, sql, params=(), has_result=False): """ Executes the given SQL statement, with optional parameters. """ result = None # Don't perform the transactional DDL check if SQL is being collected # as it's not going to be executed anyway. if not self.collect_sql and self.connection.in_atomic_block and not self.connection.features.can_rollback_ddl: raise TransactionManagementError( "Executing DDL statements while in a transaction on databases " "that can't perform a rollback is prohibited." ) # Account for non-string statement objects. sql = str(sql) # Log the command we're running, then run it logger.debug("%s; (params %r)", sql, params, extra={'params': params, 'sql': sql}) if self.collect_sql: ending = "" if sql.endswith(";") else ";" if params is not None: self.collected_sql.append((sql % tuple(map(self.quote_value, params))) + ending) else: self.collected_sql.append(sql + ending) else: cursor = self.connection.cursor() cursor.execute(sql, params) if has_result: result = cursor.fetchall() # the cursor can be closed only when the driver supports opening # multiple cursors on a connection because the migration command # has already opened a cursor outside this method if self.connection.supports_mars: cursor.close() return result def prepare_default(self, value): return self.quote_value(value) def quote_value(self, value): """ Returns a quoted version of the value so it's safe to use in an SQL string. This is not safe against injection from user code; it is intended only for use in making SQL scripts or preparing default values for particularly tricky backends (defaults are not user-defined, though, so this is safe). """ if isinstance(value, (datetime.datetime, datetime.date, datetime.time)): return "'%s'" % value elif isinstance(value, str): return "'%s'" % value.replace("'", "''") elif isinstance(value, (bytes, bytearray, memoryview)): return "0x%s" % force_str(binascii.hexlify(value)) elif isinstance(value, bool): return "1" if value else "0" else: return str(value) def remove_field(self, model, field): """ Removes a field from a model. Usually involves deleting a column, but for M2Ms may involve deleting a table. """ # Special-case implicit M2M tables if field.many_to_many and field.remote_field.through._meta.auto_created: return self.delete_model(field.remote_field.through) # It might not actually have a column behind it if field.db_parameters(connection=self.connection)['type'] is None: return # Drop any FK constraints, SQL Server requires explicit deletion with self.connection.cursor() as cursor: constraints = self.connection.introspection.get_constraints(cursor, model._meta.db_table) for name, infodict in constraints.items(): if field.column in infodict['columns'] and infodict['foreign_key']: self.execute(self._delete_constraint_sql(self.sql_delete_fk, model, name)) # Drop any indexes, SQL Server requires explicit deletion for name, infodict in constraints.items(): if field.column in infodict['columns'] and infodict['index']: self.execute(self.sql_delete_index % { "table": self.quote_name(model._meta.db_table), "name": self.quote_name(name), }) # Drop primary key constraint, SQL Server requires explicit deletion for name, infodict in constraints.items(): if field.column in infodict['columns'] and infodict['primary_key']: self.execute(self.sql_delete_pk % { "table": self.quote_name(model._meta.db_table), "name": self.quote_name(name), }) # Drop check constraints, SQL Server requires explicit deletion for name, infodict in constraints.items(): if field.column in infodict['columns'] and infodict['check']: self.execute(self.sql_delete_check % { "table": self.quote_name(model._meta.db_table), "name": self.quote_name(name), }) # Drop unique constraints, SQL Server requires explicit deletion for name, infodict in constraints.items(): if (field.column in infodict['columns'] and infodict['unique'] and not infodict['primary_key'] and not infodict['index']): self.execute(self.sql_delete_unique % { "table": self.quote_name(model._meta.db_table), "name": self.quote_name(name), }) # Drop default constraint, SQL Server requires explicit deletion for name, infodict in constraints.items(): if field.column in infodict['columns'] and infodict['default']: self.execute(self.sql_delete_default % { "table": self.quote_name(model._meta.db_table), "name": self.quote_name(name), }) # Delete the column sql = self.sql_delete_column % { "table": self.quote_name(model._meta.db_table), "column": self.quote_name(field.column), } self.execute(sql) # Reset connection if required if self.connection.features.connection_persists_old_columns: self.connection.close() # Remove all deferred statements referencing the deleted column. for sql in list(self.deferred_sql): if isinstance(sql, Statement) and sql.references_column(model._meta.db_table, field.column): self.deferred_sql.remove(sql) def add_constraint(self, model, constraint): if isinstance(constraint, UniqueConstraint) and constraint.condition and constraint.condition.connector != AND: raise NotImplementedError("The backend does not support %s conditions on unique constraint %s." % (constraint.condition.connector, constraint.name)) super().add_constraint(model, constraint) if django_version >= (4, 2): def _collate_sql(self, collation, old_collation=None, table_name=None): return ' COLLATE ' + collation if collation else "" else: def _collate_sql(self, collation): return ' COLLATE ' + collation def _create_index_name(self, table_name, column_names, suffix=""): index_name = super()._create_index_name(table_name, column_names, suffix) # Check if the db_table specified a user-defined schema if('].[' in index_name): new_index_name = index_name.replace('[', '').replace(']', '').replace('.', '_') return new_index_name return index_name def _unique_supported( self, condition=None, deferrable=None, include=None, expressions=None, nulls_distinct=None, ): return ( (not condition or self.connection.features.supports_partial_indexes) and ( not deferrable or self.connection.features.supports_deferrable_unique_constraints ) and (not include or self.connection.features.supports_covering_indexes) and ( not expressions or self.connection.features.supports_expression_indexes ) and ( nulls_distinct is None or self.connection.features.supports_nulls_distinct_unique_constraints ) ) mssql-django-1.5/setup.cfg000066400000000000000000000001721461302131500155730ustar00rootroot00000000000000[flake8] exclude = .git,__pycache__,migrations # W504 is mutually exclusive with W503 ignore = W504 max-line-length = 119 mssql-django-1.5/setup.py000066400000000000000000000031351461302131500154660ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. from os import path from setuptools import find_packages, setup CLASSIFIERS = [ 'License :: OSI Approved :: BSD License', 'Framework :: Django', "Operating System :: POSIX :: Linux", "Operating System :: Microsoft :: Windows", 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', 'Programming Language :: Python :: 3.12', 'Framework :: Django :: 3.2', 'Framework :: Django :: 4.0', 'Framework :: Django :: 4.1', 'Framework :: Django :: 4.2', 'Framework :: Django :: 5.0', ] this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f: long_description = f.read() setup( name='mssql-django', version='1.5', description='Django backend for Microsoft SQL Server', long_description=long_description, long_description_content_type='text/markdown', author='Microsoft', author_email='opencode@microsoft.com', url='https://github.com/microsoft/mssql-django', project_urls={ 'Release Notes': 'https://github.com/microsoft/mssql-django/releases', }, license='BSD', packages=find_packages(), install_requires=[ 'django>=3.2,<5.1', 'pyodbc>=3.0', 'pytz', ], package_data={'mssql': ['regex_clr.dll']}, classifiers=CLASSIFIERS, keywords='django', ) mssql-django-1.5/test.sh000077500000000000000000000045111461302131500152710ustar00rootroot00000000000000# TODO: # # * m2m_through_regress # * many_to_one_null set -e DJANGO_VERSION="$(python -m django --version)" cd django git fetch --depth=1 origin +refs/tags/*:refs/tags/* git checkout $DJANGO_VERSION pip install -r tests/requirements/py3.txt coverage run tests/runtests.py --settings=testapp.settings --noinput \ aggregation \ aggregation_regress \ annotations \ backends \ basic \ bulk_create \ constraints \ custom_columns \ custom_lookups \ custom_managers \ custom_methods \ custom_migration_operations \ custom_pk \ datatypes \ dates \ datetimes \ db_functions \ db_typecasts \ db_utils \ dbshell \ defer \ defer_regress \ delete \ delete_regress \ distinct_on_fields \ empty \ expressions \ expressions_case \ expressions_window \ extra_regress \ field_deconstruction \ field_defaults \ field_subclassing \ filtered_relation \ fixtures \ fixtures_model_package \ fixtures_regress \ force_insert_update \ foreign_object \ from_db_value \ generic_relations \ generic_relations_regress \ get_earliest_or_latest \ get_object_or_404 \ get_or_create \ indexes \ inspectdb \ introspection \ invalid_models_tests \ known_related_objects \ lookup \ m2m_and_m2o \ m2m_intermediary \ m2m_multiple \ m2m_recursive \ m2m_regress \ m2m_signals \ m2m_through \ m2o_recursive \ managers_regress \ many_to_many \ many_to_one \ max_lengths \ migrate_signals \ migration_test_data_persistence \ migrations \ migrations2 \ model_fields \ model_indexes \ model_options \ mutually_referential \ nested_foreign_keys \ null_fk \ null_fk_ordering \ null_queries \ one_to_one \ or_lookups \ order_with_respect_to \ ordering \ pagination \ prefetch_related \ queries \ queryset_pickle \ raw_query \ reverse_lookup \ save_delete_hooks \ schema \ select_for_update \ select_related \ select_related_onetoone \ select_related_regress \ serializers \ timezones \ transaction_hooks \ transactions \ update \ update_only_fields python -m coverage xml --include '*mssql*' --omit '*virtualenvs*' -o coverage.xml mssql-django-1.5/testapp/000077500000000000000000000000001461302131500154325ustar00rootroot00000000000000mssql-django-1.5/testapp/__init__.py000066400000000000000000000000001461302131500175310ustar00rootroot00000000000000mssql-django-1.5/testapp/migrations/000077500000000000000000000000001461302131500176065ustar00rootroot00000000000000mssql-django-1.5/testapp/migrations/0001_initial.py000066400000000000000000000045301461302131500222530ustar00rootroot00000000000000# Generated by Django 2.2.8.dev20191112211527 on 2019-11-15 01:38 import uuid from django.db import migrations, models import django class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Author', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=100)), ], ), migrations.CreateModel( name='Editor', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=100)), ], ), migrations.CreateModel( name='Post', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255, verbose_name='title')), ], ), migrations.AddField( model_name='post', name='alt_editor', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='testapp.Editor'), ), migrations.AddField( model_name='post', name='author', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='testapp.Author'), ), migrations.AlterUniqueTogether( name='post', unique_together={('author', 'title', 'alt_editor')}, ), migrations.CreateModel( name='Comment', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='testapp.Post')), ('text', models.TextField(verbose_name='text')), ('created_at', models.DateTimeField(default=django.utils.timezone.now)), ], ), migrations.CreateModel( name='UUIDModel', fields=[ ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), ], ), ] mssql-django-1.5/testapp/migrations/0002_test_unique_nullable_part1.py000066400000000000000000000014041461302131500261520ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0001_initial'), ] operations = [ # Prep test for issue https://github.com/ESSolutions/django-mssql-backend/issues/38 # Create with a field that is unique *and* nullable so it is implemented with a filtered unique index. migrations.CreateModel( name='TestUniqueNullableModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('test_field', models.CharField(max_length=100, null=True, unique=True)), ('y', models.IntegerField(unique=True, null=True)), ], ), ] mssql-django-1.5/testapp/migrations/0003_test_unique_nullable_part2.py000066400000000000000000000014521461302131500261570ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0002_test_unique_nullable_part1'), ] operations = [ # Run test for issue https://github.com/ESSolutions/django-mssql-backend/issues/38 # Now remove the null=True to check this transition is correctly handled. migrations.AlterField( model_name='testuniquenullablemodel', name='test_field', field=models.CharField(default='', max_length=100, unique=True), preserve_default=False, ), # Test for renaming of a unique+nullable column migrations.RenameField( model_name='testuniquenullablemodel', old_name='y', new_name='y_renamed', ), ] mssql-django-1.5/testapp/migrations/0004_test_unique_type_change_part1.py000066400000000000000000000017771461302131500266610ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0003_test_unique_nullable_part2'), ] # Prep test for issue https://github.com/ESSolutions/django-mssql-backend/issues/45 operations = [ # for case 1: migrations.AddField( model_name='testuniquenullablemodel', name='x', field=models.CharField(max_length=10, null=True, unique=True), ), # for case 2: migrations.CreateModel( name='TestNullableUniqueTogetherModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('a', models.CharField(max_length=50, null=True)), ('b', models.CharField(max_length=50)), ('c', models.CharField(max_length=50)), ], options={ 'unique_together': {('a', 'b')}, }, ), ] mssql-django-1.5/testapp/migrations/0005_test_unique_type_change_part2.py000066400000000000000000000025771461302131500266620ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0004_test_unique_type_change_part1'), ] # Run test for issue https://github.com/ESSolutions/django-mssql-backend/issues/45 operations = [ # Case 1: changing max_length changes the column type - the filtered UNIQUE INDEX which implements # the nullable unique constraint, should be correctly reinstated after this change of column type # (see also the specific unit test which checks that multiple rows with NULL are allowed) migrations.AlterField( model_name='testuniquenullablemodel', name='x', field=models.CharField(max_length=11, null=True, unique=True), ), # Case 2: the filtered UNIQUE INDEX implementing the partially nullable `unique_together` constraint # should be correctly reinstated after this column type change migrations.AlterField( model_name='testnullableuniquetogethermodel', name='a', field=models.CharField(max_length=51, null=True), ), # ...similarly adding another field to the `unique_together` should preserve the constraint correctly migrations.AlterUniqueTogether( name='testnullableuniquetogethermodel', unique_together={('a', 'b', 'c')}, ), ] mssql-django-1.5/testapp/migrations/0006_test_remove_onetoone_field_part1.py000066400000000000000000000013471461302131500273460ustar00rootroot00000000000000# Generated by Django 3.0.4 on 2020-04-20 14:59 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('testapp', '0005_test_unique_type_change_part2'), ] operations = [ migrations.CreateModel( name='TestRemoveOneToOneFieldModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('a', models.CharField(max_length=50)), ('b', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, to='testapp.TestRemoveOneToOneFieldModel')), ], ), ] mssql-django-1.5/testapp/migrations/0007_test_remove_onetoone_field_part2.py000066400000000000000000000005701461302131500273450ustar00rootroot00000000000000# Generated by Django 3.0.4 on 2020-04-20 14:59 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('testapp', '0006_test_remove_onetoone_field_part1'), ] operations = [ migrations.RemoveField( model_name='testremoveonetoonefieldmodel', name='b', ), ] mssql-django-1.5/testapp/migrations/0008_test_drop_table_with_foreign_key_reference_part1.py000066400000000000000000000011101461302131500325330ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0007_test_remove_onetoone_field_part2'), ] operations = [ migrations.CreateModel( name="Pony", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel( name="Rider", fields=[ ("id", models.AutoField(primary_key=True)), ("pony", models.ForeignKey("testapp.Pony", models.CASCADE)), ]), ] mssql-django-1.5/testapp/migrations/0009_test_drop_table_with_foreign_key_reference_part2.py000066400000000000000000000007351461302131500325510ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): ''' Sql server will generate a error if drop a table that is referenced by a foreign key constraint. This test is to check if the table can be dropped correctly. ''' dependencies = [ ('testapp', '0008_test_drop_table_with_foreign_key_reference_part1'), ] operations = [ migrations.DeleteModel("Pony"), migrations.DeleteModel("Rider"), ] mssql-django-1.5/testapp/migrations/0010_pizza_topping.py000066400000000000000000000013711461302131500235170ustar00rootroot00000000000000# Generated by Django 3.1.7 on 2021-03-16 17:07 from django.db import migrations, models import uuid class Migration(migrations.Migration): dependencies = [ ('testapp', '0009_test_drop_table_with_foreign_key_reference_part2'), ] operations = [ migrations.CreateModel( name='Topping', fields=[ ('name', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)), ], ), migrations.CreateModel( name='Pizza', fields=[ ('name', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)), ('toppings', models.ManyToManyField(to='testapp.Topping')), ], ), ] mssql-django-1.5/testapp/migrations/0011_test_unique_constraints.py000066400000000000000000000045141461302131500256210ustar00rootroot00000000000000# Generated by Django 3.1.5 on 2021-01-18 00:05 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0010_pizza_topping'), ] operations = [ migrations.CreateModel( name='TestUnsupportableUniqueConstraint', fields=[ ( 'id', models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name='ID', ), ), ('_type', models.CharField(max_length=50)), ('status', models.CharField(max_length=50)), ], # Stop Django attempting to automatically create migrations for this table. Instead # migrations are attempted manually in `test_unsupportable_unique_constraint` where # they are expected to fail. options={ 'managed': False, }, ), migrations.CreateModel( name='TestSupportableUniqueConstraint', fields=[ ( 'id', models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name='ID', ), ), ('_type', models.CharField(max_length=50)), ('status', models.CharField(max_length=50)), ], ), migrations.AddConstraint( model_name='testsupportableuniqueconstraint', constraint=models.UniqueConstraint( condition=models.Q( ('status', 'in_progress'), ('status', 'needs_changes'), ('status', 'published'), ), fields=('_type',), name='and_constraint', ), ), migrations.AddConstraint( model_name='testsupportableuniqueconstraint', constraint=models.UniqueConstraint( condition=models.Q(status__in=['in_progress', 'needs_changes']), fields=('_type',), name='in_constraint', ), ), ] mssql-django-1.5/testapp/migrations/0012_test_indexes_retained_part1.py000066400000000000000000000012461461302131500263050ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0011_test_unique_constraints'), ] # Prep test for issue https://github.com/microsoft/mssql-django/issues/14 operations = [ migrations.CreateModel( name='TestIndexesRetained', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('a', models.IntegerField(db_index=True)), ('b', models.IntegerField(db_index=True)), ('c', models.IntegerField(db_index=True)), ], ), ] mssql-django-1.5/testapp/migrations/0013_test_indexes_retained_part2.py000066400000000000000000000014351461302131500263070ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0012_test_indexes_retained_part1'), ] # Run test for issue https://github.com/microsoft/mssql-django/issues/14 # where the following operations should leave indexes intact operations = [ migrations.AlterField( model_name='testindexesretained', name='a', field=models.IntegerField(db_index=True, null=True), ), migrations.RenameField( model_name='testindexesretained', old_name='b', new_name='b_renamed', ), migrations.RenameModel( old_name='TestIndexesRetained', new_name='TestIndexesRetainedRenamed', ), ] mssql-django-1.5/testapp/migrations/0014_test_rename_m2mfield_part1.py000066400000000000000000000015671461302131500260310ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0013_test_indexes_retained_part2'), ] operations = [ # Prep test for issue https://github.com/microsoft/mssql-django/issues/86 migrations.CreateModel( name='M2MOtherModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=10)), ], ), migrations.CreateModel( name='TestRenameManyToManyFieldModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('others', models.ManyToManyField(to='testapp.M2MOtherModel')), ], ), ] mssql-django-1.5/testapp/migrations/0015_test_rename_m2mfield_part2.py000066400000000000000000000011251461302131500260210ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0014_test_rename_m2mfield_part1'), ] operations = [ # Run test for issue https://github.com/microsoft/mssql-django/issues/86 # Must be in a separate migration so that the unique index was created # (deferred after the previous migration) before we do the rename. migrations.RenameField( model_name='testrenamemanytomanyfieldmodel', old_name='others', new_name='others_renamed', ), ] mssql-django-1.5/testapp/migrations/0016_jsonmodel.py000066400000000000000000000014061461302131500226210ustar00rootroot00000000000000# Generated by Django 4.0.1 on 2022-02-01 15:58 from django import VERSION from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0015_test_rename_m2mfield_part2'), ] # JSONField added in Django 3.1 if VERSION >= (3, 1): operations = [ migrations.CreateModel( name='JSONModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('value', models.JSONField()), ], options={ 'required_db_features': {'supports_json_field'}, }, ), ] else: pass mssql-django-1.5/testapp/migrations/0017_binarydata_testcheckconstraintwithunicode_and_more.py000066400000000000000000000024671461302131500332270ustar00rootroot00000000000000# Generated by Django 4.0.2 on 2022-02-23 19:06 from django import VERSION from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0016_jsonmodel'), ] operations = [ migrations.CreateModel( name='BinaryData', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('binary', models.BinaryField(max_length='max', null=True)), ], ), ] if VERSION >= (3, 2): operations += [ migrations.CreateModel( name='TestCheckConstraintWithUnicode', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=100)), ], options={ 'required_db_features': {'supports_table_check_constraints'}, }, ), migrations.AddConstraint( model_name='testcheckconstraintwithunicode', constraint=models.CheckConstraint(check=models.Q(('name__startswith', 'Ãˇ'), _negated=True), name='name_does_not_starts_with_Ãˇ'), ), ] mssql-django-1.5/testapp/migrations/0018_choice_question.py000066400000000000000000000023441461302131500240140ustar00rootroot00000000000000# Generated by Django 3.2.12 on 2022-03-14 18:36 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('testapp', '0017_binarydata_testcheckconstraintwithunicode_and_more'), ] operations = [ migrations.CreateModel( name='Question', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('question_text', models.CharField(max_length=200)), ('pub_date', models.DateTimeField(verbose_name='date published')), ], ), migrations.CreateModel( name='Choice', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('choice_text', models.CharField(max_length=200)), ('votes', models.IntegerField(default=0)), ('question', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='testapp.question')), ], options={ 'unique_together': {('question', 'choice_text')}, }, ), ] mssql-django-1.5/testapp/migrations/0019_customer_name_customer_address.py000066400000000000000000000022111461302131500271140ustar00rootroot00000000000000# Generated by Django 4.0.3 on 2022-03-24 14:51 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('testapp', '0018_choice_question'), ] operations = [ migrations.CreateModel( name='Customer_name', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('Customer_name', models.CharField(max_length=100)), ], options={ 'ordering': ['Customer_name'], }, ), migrations.CreateModel( name='Customer_address', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('Customer_address', models.CharField(max_length=100)), ('Customer_name', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='testapp.customer_name')), ], options={ 'ordering': ['Customer_address'], }, ), ] mssql-django-1.5/testapp/migrations/0020_autofield_to_bigautofield.py000066400000000000000000000006421461302131500260170ustar00rootroot00000000000000# Generated by Django 3.2.13 on 2022-05-04 01:36 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0019_customer_name_customer_address'), ] operations = [ migrations.AlterField( model_name='author', name='id', field=models.BigAutoField(primary_key=True, serialize=False), ), ] mssql-django-1.5/testapp/migrations/0021_multiple_autofield_to_bigauto.py000066400000000000000000000014261461302131500267300ustar00rootroot00000000000000# Generated by Django 3.2.13 on 2022-05-04 01:37 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0020_autofield_to_bigautofield'), ] operations = [ migrations.AlterField( model_name='author', name='id', field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), ), migrations.AlterField( model_name='editor', name='id', field=models.BigAutoField(primary_key=True, serialize=False), ), migrations.AlterField( model_name='post', name='id', field=models.BigAutoField(primary_key=True, serialize=False), ), ] mssql-django-1.5/testapp/migrations/0022_timezone.py000066400000000000000000000011031461302131500224500ustar00rootroot00000000000000# Generated by Django 4.0.4 on 2022-06-07 15:37 from django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('testapp', '0021_multiple_autofield_to_bigauto'), ] operations = [ migrations.CreateModel( name='TimeZone', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('date', models.DateTimeField(default=django.utils.timezone.now)), ], ), ] mssql-django-1.5/testapp/migrations/0023_number.py000066400000000000000000000012771461302131500221230ustar00rootroot00000000000000# Generated by Django 4.0.7 on 2022-09-30 12:16 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0022_timezone'), ] operations = [ migrations.CreateModel( name='Number', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('integer', models.BigIntegerField(db_column='the_integer')), ('float', models.FloatField(db_column='the_float', null=True)), ('decimal_value', models.DecimalField(decimal_places=17, max_digits=20, null=True)), ], ), ] mssql-django-1.5/testapp/migrations/0024_publisher_book.py000066400000000000000000000033201461302131500236320ustar00rootroot00000000000000# Generated by Django 4.2 on 2023-05-03 15:08 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ("testapp", "0023_number"), ] operations = [ migrations.CreateModel( name="Publisher", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("name", models.CharField(max_length=100)), ], ), migrations.CreateModel( name="Book", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("name", models.CharField(max_length=100)), ("updated", models.DateTimeField(auto_now=True)), ( "authors", models.ManyToManyField(related_name="books", to="testapp.author"), ), ( "publisher", models.ForeignKey( db_column="publisher_id_column", on_delete=django.db.models.deletion.CASCADE, related_name="books", to="testapp.publisher", ), ), ], ), ] mssql-django-1.5/testapp/migrations/0025_modelwithnullablefieldsofdifferenttypes.py000066400000000000000000000012441461302131500310320ustar00rootroot00000000000000# Generated by Django 5.0.1 on 2024-01-29 14:18 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0024_publisher_book'), ] operations = [ migrations.CreateModel( name='ModelWithNullableFieldsOfDifferentTypes', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('int_value', models.IntegerField(null=True)), ('name', models.CharField(max_length=100, null=True)), ('date', models.DateTimeField(null=True)), ], ), ] mssql-django-1.5/testapp/migrations/__init__.py000066400000000000000000000000001461302131500217050ustar00rootroot00000000000000mssql-django-1.5/testapp/models.py000066400000000000000000000202471461302131500172740ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import datetime import uuid from django import VERSION from django.db import models from django.db.models import Q from django.utils import timezone # We are using this Mixin to test casting of BigAuto and Auto fields class BigAutoFieldMixin(models.Model): id = models.BigAutoField(primary_key=True) class Meta: abstract = True class Author(models.Model): name = models.CharField(max_length=100) class Editor(BigAutoFieldMixin, models.Model): name = models.CharField(max_length=100) class Post(BigAutoFieldMixin, models.Model): title = models.CharField('title', max_length=255) author = models.ForeignKey(Author, models.CASCADE) # Optional secondary author alt_editor = models.ForeignKey(Editor, models.SET_NULL, blank=True, null=True) class Meta: unique_together = ( ('author', 'title', 'alt_editor'), ) def __str__(self): return self.title class Comment(models.Model): post = models.ForeignKey(Post, on_delete=models.CASCADE) text = models.TextField('text') created_at = models.DateTimeField(default=timezone.now) def __str__(self): return self.text class UUIDModel(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) def __str__(self): return self.pk class ModelWithNullableFieldsOfDifferentTypes(models.Model): # Issue https://github.com/microsoft/mssql-django/issues/340 # Ensures the integrity of bulk updates with different types int_value = models.IntegerField(null=True) name = models.CharField(max_length=100, null=True) date = models.DateTimeField(null=True) class TestUniqueNullableModel(models.Model): # Issue https://github.com/ESSolutions/django-mssql-backend/issues/38: # This field started off as unique=True *and* null=True so it is implemented with a filtered unique index # Then it is made non-nullable by a subsequent migration, to check this is correctly handled (the index # should be dropped, then a normal unique constraint should be added, now that the column is not nullable) test_field = models.CharField(max_length=100, unique=True) # Issue https://github.com/ESSolutions/django-mssql-backend/issues/45 (case 1) # Field used for testing changing the 'type' of a field that's both unique & nullable x = models.CharField(max_length=11, null=True, unique=True) # A variant of Issue https://github.com/microsoft/mssql-django/issues/14 case (b) # but for a unique index (not db_index) y_renamed = models.IntegerField(null=True, unique=True) class TestNullableUniqueTogetherModel(models.Model): class Meta: unique_together = (('a', 'b', 'c'),) # Issue https://github.com/ESSolutions/django-mssql-backend/issues/45 (case 2) # Fields used for testing changing the type of a field that is in a `unique_together` a = models.CharField(max_length=51, null=True) b = models.CharField(max_length=50) c = models.CharField(max_length=50) class TestRemoveOneToOneFieldModel(models.Model): # Issue https://github.com/ESSolutions/django-mssql-backend/pull/51 # Fields used for testing removing OneToOne field. Verifies that delete_unique # does not try to remove indexes that have already been removed # b = models.OneToOneField('self', on_delete=models.SET_NULL, null=True) a = models.CharField(max_length=50) class TestIndexesRetainedRenamed(models.Model): # Issue https://github.com/microsoft/mssql-django/issues/14 # In all these cases the column index should still exist afterwards # case (a) `a` starts out not nullable, but then is changed to be nullable a = models.IntegerField(db_index=True, null=True) # case (b) column originally called `b` is renamed b_renamed = models.IntegerField(db_index=True) # case (c) this entire model is renamed - this is just a column whose index can be checked afterwards c = models.IntegerField(db_index=True) class M2MOtherModel(models.Model): name = models.CharField(max_length=10) class TestRenameManyToManyFieldModel(models.Model): # Issue https://github.com/microsoft/mssql-django/issues/86 others_renamed = models.ManyToManyField(M2MOtherModel) class Topping(models.Model): name = models.UUIDField(primary_key=True, default=uuid.uuid4) class Pizza(models.Model): name = models.UUIDField(primary_key=True, default=uuid.uuid4) toppings = models.ManyToManyField(Topping) def __str__(self): return "%s (%s)" % ( self.name, ", ".join(topping.name for topping in self.toppings.all()), ) class TestUnsupportableUniqueConstraint(models.Model): class Meta: managed = False constraints = [ models.UniqueConstraint( name='or_constraint', fields=['_type'], condition=(Q(status='in_progress') | Q(status='needs_changes')), ), ] _type = models.CharField(max_length=50) status = models.CharField(max_length=50) class TestSupportableUniqueConstraint(models.Model): class Meta: constraints = [ models.UniqueConstraint( name='and_constraint', fields=['_type'], condition=( Q(status='in_progress') & Q(status='needs_changes') & Q(status='published') ), ), models.UniqueConstraint( name='in_constraint', fields=['_type'], condition=(Q(status__in=['in_progress', 'needs_changes'])), ), ] _type = models.CharField(max_length=50) status = models.CharField(max_length=50) class BinaryData(models.Model): binary = models.BinaryField(null=True) if VERSION >= (3, 1): class JSONModel(models.Model): value = models.JSONField() class Meta: required_db_features = {'supports_json_field'} if VERSION >= (3, 2): class TestCheckConstraintWithUnicode(models.Model): name = models.CharField(max_length=100) class Meta: required_db_features = { 'supports_table_check_constraints', } constraints = [ models.CheckConstraint( check=~models.Q(name__startswith='\u00f7'), name='name_does_not_starts_with_\u00f7', ) ] class Question(models.Model): question_text = models.CharField(max_length=200) pub_date = models.DateTimeField('date published') def __str__(self): return self.question_text def was_published_recently(self): return self.pub_date >= timezone.now() - datetime.timedelta(days=1) class Choice(models.Model): question = models.ForeignKey(Question, on_delete=models.CASCADE, null=True) choice_text = models.CharField(max_length=200) votes = models.IntegerField(default=0) class Meta: unique_together = (('question', 'choice_text')) class Customer_name(models.Model): Customer_name = models.CharField(max_length=100) class Meta: ordering = ['Customer_name'] class Customer_address(models.Model): Customer_name = models.ForeignKey(Customer_name, on_delete=models.CASCADE) Customer_address = models.CharField(max_length=100) class Meta: ordering = ['Customer_address'] class TimeZone(models.Model): date = models.DateTimeField(default=timezone.now) class Number(models.Model): integer = models.BigIntegerField(db_column="the_integer") float = models.FloatField(null=True, db_column="the_float") decimal_value = models.DecimalField(max_digits=20, decimal_places=17, null=True) def __str__(self): return "%i, %.3f, %.17f" % (self.integer, self.float, self.decimal_value) class Publisher(models.Model): name = models.CharField(max_length=100) class Book(models.Model): name = models.CharField(max_length=100) authors = models.ManyToManyField(Author, related_name="books") publisher = models.ForeignKey( Publisher, models.CASCADE, related_name="books", db_column="publisher_id_column", ) updated = models.DateTimeField(auto_now=True) mssql-django-1.5/testapp/runners.py000066400000000000000000000025561461302131500175100ustar00rootroot00000000000000from django.test.runner import DiscoverRunner from django.conf import settings import xmlrunner EXCLUDED_TESTS = getattr(settings, 'EXCLUDED_TESTS', []) REGEX_TESTS = getattr(settings, 'REGEX_TESTS', []) ENABLE_REGEX_TESTS = getattr(settings, 'ENABLE_REGEX_TESTS', False) def MarkexpectedFailure(): def decorator(test_item): def wrapper(): raise "Expected Failure" wrapper.__unittest_expecting_failure__ = True return wrapper return decorator class ExcludedTestSuiteRunner(DiscoverRunner): def build_suite(self, *args, **kwargs): suite = super().build_suite(*args, **kwargs) tests = [] for case in suite: test_name = case._testMethodName if ( ENABLE_REGEX_TESTS and case.id() in EXCLUDED_TESTS or not ENABLE_REGEX_TESTS and case.id() in EXCLUDED_TESTS + REGEX_TESTS ): test_method = getattr(case, test_name) setattr(case, test_name, MarkexpectedFailure()(test_method)) tests.append(case) suite._tests = tests return suite def run_suite(self, suite): kwargs = dict(verbosity=1, descriptions=False) with open('./result.xml', 'wb') as xml: return xmlrunner.XMLTestRunner( output=xml, **kwargs).run(suite) mssql-django-1.5/testapp/settings.py000066400000000000000000000410411461302131500176440ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import os from pathlib import Path from django import VERSION BASE_DIR = Path(__file__).resolve().parent.parent DATABASES = { "default": { "ENGINE": "mssql", "NAME": "default", "USER": "sa", "PASSWORD": "MyPassword42", "HOST": "localhost", "PORT": "1433", "OPTIONS": {"driver": "ODBC Driver 17 for SQL Server", "return_rows_bulk_insert": True}, }, 'other': { "ENGINE": "mssql", "NAME": "other", "USER": "sa", "PASSWORD": "MyPassword42", "HOST": "localhost", "PORT": "1433", "OPTIONS": {"driver": "ODBC Driver 17 for SQL Server", "return_rows_bulk_insert": True}, }, } # Django 3.0 and below unit test doesn't handle more than 2 databases in DATABASES correctly if VERSION >= (3, 1): DATABASES['sqlite'] = { "ENGINE": "django.db.backends.sqlite3", "NAME": str(BASE_DIR / "db.sqlitetest"), } # Set to `True` locally if you want SQL queries logged to django_sql.log DEBUG = False # Logging LOG_DIR = os.path.join(os.path.dirname(__file__), '..', 'logs') os.makedirs(LOG_DIR, exist_ok=True) LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'myformatter': { 'format': '%(asctime)s P%(process)05dT%(thread)05d [%(levelname)s] %(name)s: %(message)s', }, }, 'handlers': { 'db_output': { 'level': 'DEBUG', 'class': 'logging.handlers.RotatingFileHandler', 'filename': os.path.join(LOG_DIR, 'django_sql.log'), 'formatter': 'myformatter', }, 'default': { 'level': 'DEBUG', 'class': 'logging.handlers.RotatingFileHandler', 'filename': os.path.join(LOG_DIR, 'default.log'), 'formatter': 'myformatter', } }, 'loggers': { '': { 'handlers': ['default'], 'level': 'DEBUG', 'propagate': False, }, 'django.db': { 'handlers': ['db_output'], 'level': 'DEBUG', 'propagate': False, }, }, } INSTALLED_APPS = ( 'django.contrib.contenttypes', 'django.contrib.staticfiles', 'django.contrib.auth', 'mssql', 'testapp', ) SECRET_KEY = "django_tests_secret_key" PASSWORD_HASHERS = [ 'django.contrib.auth.hashers.PBKDF2PasswordHasher', ] DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' ENABLE_REGEX_TESTS = False USE_TZ = False TEST_RUNNER = "testapp.runners.ExcludedTestSuiteRunner" EXCLUDED_TESTS = [ 'aggregation_regress.tests.AggregationTests.test_annotation_with_value', 'aggregation.tests.AggregateTestCase.test_distinct_on_aggregate', 'annotations.tests.NonAggregateAnnotationTestCase.test_annotate_exists', 'custom_lookups.tests.BilateralTransformTests.test_transform_order_by', 'expressions.tests.BasicExpressionsTests.test_filtering_on_annotate_that_uses_q', 'expressions.tests.BasicExpressionsTests.test_order_by_exists', 'expressions.tests.ExpressionOperatorTests.test_righthand_power', 'expressions.tests.FTimeDeltaTests.test_datetime_subtraction_microseconds', 'expressions.tests.FTimeDeltaTests.test_duration_with_datetime_microseconds', 'expressions.tests.IterableLookupInnerExpressionsTests.test_expressions_in_lookups_join_choice', 'expressions_case.tests.CaseExpressionTests.test_annotate_with_in_clause', 'expressions_window.tests.WindowFunctionTests.test_nth_returns_null', 'expressions_window.tests.WindowFunctionTests.test_nthvalue', 'expressions_window.tests.WindowFunctionTests.test_range_n_preceding_and_following', 'field_deconstruction.tests.FieldDeconstructionTests.test_binary_field', 'ordering.tests.OrderingTests.test_orders_nulls_first_on_filtered_subquery', 'get_or_create.tests.UpdateOrCreateTransactionTests.test_creation_in_transaction', 'indexes.tests.PartialIndexTests.test_multiple_conditions', 'migrations.test_executor.ExecutorTests.test_alter_id_type_with_fk', 'migrations.test_operations.OperationTests.test_add_constraint_percent_escaping', 'migrations.test_operations.OperationTests.test_alter_field_pk', 'migrations.test_operations.OperationTests.test_alter_field_reloads_state_on_fk_with_to_field_target_changes', 'schema.tests.SchemaTests.test_alter_auto_field_to_char_field', 'schema.tests.SchemaTests.test_alter_auto_field_to_integer_field', 'schema.tests.SchemaTests.test_alter_implicit_id_to_explicit', 'schema.tests.SchemaTests.test_alter_int_pk_to_autofield_pk', 'schema.tests.SchemaTests.test_alter_int_pk_to_bigautofield_pk', 'schema.tests.SchemaTests.test_alter_pk_with_self_referential_field', 'schema.tests.SchemaTests.test_remove_field_check_does_not_remove_meta_constraints', 'schema.tests.SchemaTests.test_remove_field_unique_does_not_remove_meta_constraints', 'schema.tests.SchemaTests.test_text_field_with_db_index', 'schema.tests.SchemaTests.test_unique_together_with_fk', 'schema.tests.SchemaTests.test_unique_together_with_fk_with_existing_index', 'aggregation.tests.AggregateTestCase.test_count_star', 'aggregation_regress.tests.AggregationTests.test_values_list_annotation_args_ordering', 'expressions.tests.FTimeDeltaTests.test_invalid_operator', 'fixtures_regress.tests.TestFixtures.test_loaddata_raises_error_when_fixture_has_invalid_foreign_key', 'invalid_models_tests.test_ordinary_fields.TextFieldTests.test_max_length_warning', 'model_indexes.tests.IndexesTests.test_db_tablespace', 'ordering.tests.OrderingTests.test_deprecated_values_annotate', 'queries.test_qs_combinators.QuerySetSetOperationTests.test_limits', 'backends.tests.BackendTestCase.test_unicode_password', 'migrations.test_commands.MigrateTests.test_migrate_syncdb_app_label', 'migrations.test_commands.MigrateTests.test_migrate_syncdb_deferred_sql_executed_with_schemaeditor', 'migrations.test_operations.OperationTests.test_alter_field_pk_fk', 'schema.tests.SchemaTests.test_add_foreign_key_quoted_db_table', 'schema.tests.SchemaTests.test_unique_and_reverse_m2m', 'schema.tests.SchemaTests.test_unique_no_unnecessary_fk_drops', 'select_for_update.tests.SelectForUpdateTests.test_for_update_after_from', 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_extract_year_exact_lookup', 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_extract_year_greaterthan_lookup', 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_extract_year_lessthan_lookup', 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_extract_year_exact_lookup', 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_extract_year_greaterthan_lookup', 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_extract_year_lessthan_lookup', 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_trunc_ambiguous_and_invalid_times', 'delete.tests.DeletionTests.test_only_referenced_fields_selected', 'queries.test_db_returning.ReturningValuesTests.test_insert_returning', 'queries.test_db_returning.ReturningValuesTests.test_insert_returning_non_integer', 'backends.tests.BackendTestCase.test_queries', 'schema.tests.SchemaTests.test_inline_fk', 'aggregation.tests.AggregateTestCase.test_aggregation_subquery_annotation_exists', 'aggregation.tests.AggregateTestCase.test_aggregation_subquery_annotation_values_collision', 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_extract_func_with_timezone', 'expressions.tests.FTimeDeltaTests.test_date_subquery_subtraction', 'expressions.tests.FTimeDeltaTests.test_datetime_subquery_subtraction', 'expressions.tests.FTimeDeltaTests.test_time_subquery_subtraction', 'migrations.test_operations.OperationTests.test_alter_field_reloads_state_on_fk_with_to_field_target_type_change', 'schema.tests.SchemaTests.test_alter_smallint_pk_to_smallautofield_pk', 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_extract_func', 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_extract_iso_weekday_func', 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_extract_func', 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_extract_iso_weekday_func', 'datetimes.tests.DateTimesTests.test_datetimes_ambiguous_and_invalid_times', 'inspectdb.tests.InspectDBTestCase.test_number_field_types', 'inspectdb.tests.InspectDBTestCase.test_json_field', 'ordering.tests.OrderingTests.test_default_ordering_by_f_expression', 'ordering.tests.OrderingTests.test_order_by_nulls_first', 'ordering.tests.OrderingTests.test_order_by_nulls_last', 'queries.test_qs_combinators.QuerySetSetOperationTests.test_ordering_by_f_expression_and_alias', 'queries.test_db_returning.ReturningValuesTests.test_insert_returning_multiple', 'dbshell.tests.DbshellCommandTestCase.test_command_missing', 'schema.tests.SchemaTests.test_char_field_pk_to_auto_field', 'datetimes.tests.DateTimesTests.test_21432', # JSONFields 'model_fields.test_jsonfield.TestQuerying.test_key_quoted_string', 'model_fields.test_jsonfield.TestQuerying.test_isnull_key', 'model_fields.test_jsonfield.TestQuerying.test_none_key', 'model_fields.test_jsonfield.TestQuerying.test_none_key_and_exact_lookup', 'model_fields.test_jsonfield.TestQuerying.test_key_escape', 'model_fields.test_jsonfield.TestQuerying.test_ordering_by_transform', 'expressions_window.tests.WindowFunctionTests.test_key_transform', # Django 3.2 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_trunc_func_with_timezone', 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_trunc_timezone_applied_before_truncation', 'expressions.tests.ExistsTests.test_optimizations', 'expressions.tests.FTimeDeltaTests.test_delta_add', 'expressions.tests.FTimeDeltaTests.test_delta_subtract', 'expressions.tests.FTimeDeltaTests.test_delta_update', 'expressions.tests.FTimeDeltaTests.test_exclude', 'expressions.tests.FTimeDeltaTests.test_mixed_comparisons1', 'expressions.tests.FTimeDeltaTests.test_negative_timedelta_update', 'inspectdb.tests.InspectDBTestCase.test_field_types', 'lookup.tests.LookupTests.test_in_ignore_none', 'lookup.tests.LookupTests.test_in_ignore_none_with_unhashable_items', 'queries.test_qs_combinators.QuerySetSetOperationTests.test_exists_union', 'schema.tests.SchemaTests.test_ci_cs_db_collation', 'select_for_update.tests.SelectForUpdateTests.test_unsuported_no_key_raises_error', # Django 4.0 'aggregation.tests.AggregateTestCase.test_aggregation_default_using_date_from_database', 'aggregation.tests.AggregateTestCase.test_aggregation_default_using_datetime_from_database', 'aggregation.tests.AggregateTestCase.test_aggregation_default_using_time_from_database', 'expressions.tests.FTimeDeltaTests.test_durationfield_multiply_divide', 'lookup.tests.LookupQueryingTests.test_alias', 'lookup.tests.LookupQueryingTests.test_filter_exists_lhs', 'lookup.tests.LookupQueryingTests.test_filter_lookup_lhs', 'lookup.tests.LookupQueryingTests.test_filter_subquery_lhs', 'lookup.tests.LookupQueryingTests.test_filter_wrapped_lookup_lhs', 'lookup.tests.LookupQueryingTests.test_lookup_in_order_by', 'lookup.tests.LookupTests.test_lookup_rhs', 'order_with_respect_to.tests.OrderWithRespectToBaseTests.test_previous_and_next_in_order', 'ordering.tests.OrderingTests.test_default_ordering_does_not_affect_group_by', 'queries.test_explain.ExplainUnsupportedTests.test_message', 'aggregation.tests.AggregateTestCase.test_coalesced_empty_result_set', 'aggregation.tests.AggregateTestCase.test_empty_result_optimization', 'queries.tests.Queries6Tests.test_col_alias_quoted', 'backends.tests.BackendTestCase.test_queries_logger', 'migrations.test_operations.OperationTests.test_alter_field_pk_mti_fk', 'migrations.test_operations.OperationTests.test_run_sql_add_missing_semicolon_on_collect_sql', 'migrations.test_operations.OperationTests.test_alter_field_pk_mti_and_fk_to_base', # Hashing # UTF-8 support was added in SQL Server 2019 'db_functions.text.test_md5.MD5Tests.test_basic', 'db_functions.text.test_md5.MD5Tests.test_transform', 'db_functions.text.test_sha1.SHA1Tests.test_basic', 'db_functions.text.test_sha1.SHA1Tests.test_transform', 'db_functions.text.test_sha256.SHA256Tests.test_basic', 'db_functions.text.test_sha256.SHA256Tests.test_transform', 'db_functions.text.test_sha512.SHA512Tests.test_basic', 'db_functions.text.test_sha512.SHA512Tests.test_transform', # SQL Server doesn't support SHA224 or SHA387 'db_functions.text.test_sha224.SHA224Tests.test_basic', 'db_functions.text.test_sha224.SHA224Tests.test_transform', 'db_functions.text.test_sha384.SHA384Tests.test_basic', 'db_functions.text.test_sha384.SHA384Tests.test_transform', # Timezone 'timezones.tests.NewDatabaseTests.test_cursor_explicit_time_zone', # Skipped next tests because pyodbc drops timezone https://github.com/mkleehammer/pyodbc/issues/810 'timezones.tests.LegacyDatabaseTests.test_cursor_execute_accepts_naive_datetime', 'timezones.tests.LegacyDatabaseTests.test_cursor_execute_returns_naive_datetime', 'timezones.tests.NewDatabaseTests.test_cursor_execute_accepts_naive_datetime', 'timezones.tests.NewDatabaseTests.test_cursor_execute_returns_naive_datetime', 'timezones.tests.NewDatabaseTests.test_cursor_execute_accepts_aware_datetime', 'timezones.tests.NewDatabaseTests.test_cursor_execute_returns_aware_datetime', # Django 4.1 'aggregation.test_filter_argument.FilteredAggregateTests.test_filtered_aggregate_on_exists', 'aggregation.tests.AggregateTestCase.test_aggregation_exists_multivalued_outeref', 'annotations.tests.NonAggregateAnnotationTestCase.test_full_expression_annotation_with_aggregation', 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_extract_lookup_name_sql_injection', 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_extract_lookup_name_sql_injection', 'schema.tests.SchemaTests.test_autofield_to_o2o', 'prefetch_related.tests.PrefetchRelatedTests.test_m2m_prefetching_iterator_with_chunks', 'migrations.test_operations.OperationTests.test_create_model_with_boolean_expression_in_check_constraint', 'queries.test_qs_combinators.QuerySetSetOperationTests.test_union_in_subquery_related_outerref', # These tests pass on SQL Server 2022 or newer 'model_fields.test_jsonfield.TestQuerying.test_has_key_list', 'model_fields.test_jsonfield.TestQuerying.test_has_key_null_value', 'model_fields.test_jsonfield.TestQuerying.test_lookups_with_key_transform', 'model_fields.test_jsonfield.TestQuerying.test_ordering_grouping_by_count', 'model_fields.test_jsonfield.TestQuerying.test_has_key_number', # Django 4.2 'get_or_create.tests.UpdateOrCreateTests.test_update_only_defaults_and_pre_save_fields_when_local_fields', 'aggregation.test_filter_argument.FilteredAggregateTests.test_filtered_aggregate_empty_condition', 'aggregation.test_filter_argument.FilteredAggregateTests.test_filtered_aggregate_ref_multiple_subquery_annotation', 'aggregation.test_filter_argument.FilteredAggregateTests.test_filtered_aggregate_ref_subquery_annotation', 'aggregation.tests.AggregateAnnotationPruningTests.test_referenced_group_by_annotation_kept', 'aggregation.tests.AggregateAnnotationPruningTests.test_referenced_window_requires_wrapping', 'aggregation.tests.AggregateTestCase.test_group_by_nested_expression_with_params', 'expressions.tests.BasicExpressionsTests.test_aggregate_subquery_annotation', 'queries.test_qs_combinators.QuerySetSetOperationTests.test_union_order_with_null_first_last', 'queries.test_qs_combinators.QuerySetSetOperationTests.test_union_with_select_related_and_order', 'expressions_window.tests.WindowFunctionTests.test_limited_filter', 'schema.tests.SchemaTests.test_remove_ignored_unique_constraint_not_create_fk_index', ] REGEX_TESTS = [ 'lookup.tests.LookupTests.test_regex', 'lookup.tests.LookupTests.test_regex_backreferencing', 'lookup.tests.LookupTests.test_regex_non_ascii', 'lookup.tests.LookupTests.test_regex_non_string', 'lookup.tests.LookupTests.test_regex_null', 'model_fields.test_jsonfield.TestQuerying.test_key_iregex', 'model_fields.test_jsonfield.TestQuerying.test_key_regex', ] mssql-django-1.5/testapp/tests/000077500000000000000000000000001461302131500165745ustar00rootroot00000000000000mssql-django-1.5/testapp/tests/__init__.py000066400000000000000000000007151461302131500207100ustar00rootroot00000000000000import django.db def get_constraints(table_name): connection = django.db.connections[django.db.DEFAULT_DB_ALIAS] return connection.introspection.get_constraints( connection.cursor(), table_name=table_name, ) def get_constraint_names_where(table_name, **kwargs): return [ name for name, details in get_constraints(table_name=table_name).items() if all(details[k] == v for k, v in kwargs.items()) ] mssql-django-1.5/testapp/tests/test_bitshift.py000066400000000000000000000015441461302131500220250ustar00rootroot00000000000000from django.test import TestCase from django.db.models import F from ..models import Number class BitShiftTest(TestCase): @classmethod def setUpTestData(cls): cls.n = Number.objects.create(integer=42, float=15.5) cls.n1 = Number.objects.create(integer=-42, float=-15.5) def test_lefthand_bitwise_left_shift_operator_check(self): Number.objects.update(integer=F("integer").bitleftshift(3)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 336) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -336) def test_lefthand_bitwise_right_shift_operator_check(self): Number.objects.update(integer=F("integer").bitrightshift(3)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 5) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -6)mssql-django-1.5/testapp/tests/test_constraints.py000066400000000000000000000260051461302131500225570ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import logging import django.db.utils from django.db import connections, migrations, models from django.db.migrations.state import ProjectState from django.db.utils import IntegrityError from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature from mssql.base import DatabaseWrapper from . import get_constraint_names_where from ..models import ( Author, Editor, M2MOtherModel, Post, TestUniqueNullableModel, TestNullableUniqueTogetherModel, TestRenameManyToManyFieldModel, ) logger = logging.getLogger('mssql.tests') @skipUnlessDBFeature('supports_nullable_unique_constraints') class TestNullableUniqueColumn(TestCase): def test_type_change(self): # Issue https://github.com/ESSolutions/django-mssql-backend/issues/45 (case 1) # After field `x` has had its type changed, the filtered UNIQUE INDEX which is # implementing the nullable unique constraint should still be correctly in place # i.e. allowing multiple NULLs but still enforcing uniqueness of non-NULLs # Allowed (NULL != NULL) TestUniqueNullableModel.objects.create(x=None, test_field='randomness') TestUniqueNullableModel.objects.create(x=None, test_field='doesntmatter') # Disallowed TestUniqueNullableModel.objects.create(x="foo", test_field='irrelevant') with self.assertRaises(IntegrityError): TestUniqueNullableModel.objects.create(x="foo", test_field='nonsense') def test_rename(self): # Rename of a column which is both nullable & unique. Test that # the constraint-enforcing unique index survived this migration # Related to both: # Issue https://github.com/microsoft/mssql-django/issues/67 # Issue https://github.com/microsoft/mssql-django/issues/14 # Allowed (NULL != NULL) TestUniqueNullableModel.objects.create(y_renamed=None, test_field='something') TestUniqueNullableModel.objects.create(y_renamed=None, test_field='anything') # Disallowed TestUniqueNullableModel.objects.create(y_renamed=42, test_field='nonimportant') with self.assertRaises(IntegrityError): TestUniqueNullableModel.objects.create(y_renamed=42, test_field='whocares') @skipUnlessDBFeature('supports_partially_nullable_unique_constraints') class TestPartiallyNullableUniqueTogether(TestCase): def test_partially_nullable(self): # Check basic behaviour of `unique_together` where at least 1 of the columns is nullable # It should be possible to have 2 rows both with NULL `alt_editor` author = Author.objects.create(name="author") Post.objects.create(title="foo", author=author) Post.objects.create(title="foo", author=author) # But `unique_together` is still enforced for non-NULL values editor = Editor.objects.create(name="editor") Post.objects.create(title="foo", author=author, alt_editor=editor) with self.assertRaises(IntegrityError): Post.objects.create(title="foo", author=author, alt_editor=editor) def test_after_type_change(self): # Issue https://github.com/ESSolutions/django-mssql-backend/issues/45 (case 2) # After one of the fields in the `unique_together` has had its type changed # in a migration, the constraint should still be correctly enforced # Multiple rows with a=NULL are considered different TestNullableUniqueTogetherModel.objects.create(a=None, b='bbb', c='ccc') TestNullableUniqueTogetherModel.objects.create(a=None, b='bbb', c='ccc') # Uniqueness still enforced for non-NULL values TestNullableUniqueTogetherModel.objects.create(a='aaa', b='bbb', c='ccc') with self.assertRaises(IntegrityError): TestNullableUniqueTogetherModel.objects.create(a='aaa', b='bbb', c='ccc') class TestHandleOldStyleUniqueTogether(TransactionTestCase): """ Regression test for https://github.com/microsoft/mssql-django/issues/137 Start with a unique_together which was created by an older version of this backend code, which implemented it with a table CONSTRAINT instead of a filtered UNIQUE INDEX like the current code does. e.g. django-mssql-backend < v2.6.0 or (before that) all versions of django-pyodbc-azure Then alter the type of a column (e.g. max_length of CharField) which is part of that unique_together and check that the (old-style) CONSTRAINT is dropped before (& a new-style UNIQUE INDEX created afterwards). """ def test_drop_old_unique_together_constraint(self): class TestMigrationA(migrations.Migration): initial = True operations = [ migrations.CreateModel( name='TestHandleOldStyleUniqueTogether', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('foo', models.CharField(max_length=50)), ('bar', models.CharField(max_length=50)), ], ), # Create the unique_together so that Django knows it exists, however we will deliberately drop # it (filtered unique INDEX) below & manually replace with the old implementation (CONSTRAINT) migrations.AlterUniqueTogether( name='testhandleoldstyleuniquetogether', unique_together={('foo', 'bar')} ), ] class TestMigrationB(migrations.Migration): operations = [ # Alter the type of the field to trigger the _alter_field code which drops/recreats indexes/constraints migrations.AlterField( model_name='testhandleoldstyleuniquetogether', name='foo', field=models.CharField(max_length=99), ) ] migration_a = TestMigrationA(name='test_drop_old_unique_together_constraint_a', app_label='testapp') migration_b = TestMigrationB(name='test_drop_old_unique_together_constraint_b', app_label='testapp') connection = connections['default'] # Setup with connection.schema_editor(atomic=True) as editor: project_state = migration_a.apply(ProjectState(), editor) # Manually replace the unique_together-enforcing INDEX with the old implementation using a CONSTRAINT instead # to simulate the state of a database which had been migrated using an older version of this backend table_name = 'testapp_testhandleoldstyleuniquetogether' unique_index_names = get_constraint_names_where(table_name=table_name, index=True, unique=True) assert len(unique_index_names) == 1 unique_together_name = unique_index_names[0] logger.debug('Replacing UNIQUE INDEX %s with a CONSTRAINT of the same name', unique_together_name) with connection.schema_editor(atomic=True) as editor: # Out with the new editor.execute('DROP INDEX [%s] ON [%s]' % (unique_together_name, table_name)) # In with the old, so that we end up in the state that an old database might be in editor.execute('ALTER TABLE [%s] ADD CONSTRAINT [%s] UNIQUE ([foo], [bar])' % (table_name, unique_together_name)) # Test by running AlterField with connection.schema_editor(atomic=True) as editor: # If this doesn't explode then all is well. Without the bugfix, the CONSTRAINT wasn't dropped before, # so then re-instating the unique_together using an INDEX of the same name (after altering the field) # would fail due to the presence of a CONSTRAINT (really still an index under the hood) with that name. try: migration_b.apply(project_state, editor) except django.db.utils.DatabaseError as e: logger.exception('Failed to AlterField:') self.fail('Check for regression of issue #137, AlterField failed with exception: %s' % e) class TestRenameManyToManyField(TestCase): def test_uniqueness_still_enforced_afterwards(self): # Issue https://github.com/microsoft/mssql-django/issues/86 # Prep thing1 = TestRenameManyToManyFieldModel.objects.create() other1 = M2MOtherModel.objects.create(name='1') other2 = M2MOtherModel.objects.create(name='2') thing1.others_renamed.set([other1, other2]) # Check that the unique_together on the through table is still enforced # (created by create_many_to_many_intermediary_model) ThroughModel = TestRenameManyToManyFieldModel.others_renamed.through with self.assertRaises(IntegrityError, msg='Through model fails to enforce uniqueness after m2m rename'): # This should fail due to the unique_together because (thing1, other1) is already in the through table ThroughModel.objects.create(testrenamemanytomanyfieldmodel=thing1, m2mothermodel=other1) class TestUniqueConstraints(TransactionTestCase): def test_unsupportable_unique_constraint(self): # Only execute tests when running against SQL Server connection = connections['default'] if isinstance(connection, DatabaseWrapper): class TestMigration(migrations.Migration): initial = True operations = [ migrations.CreateModel( name='TestUnsupportableUniqueConstraint', fields=[ ( 'id', models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name='ID', ), ), ('_type', models.CharField(max_length=50)), ('status', models.CharField(max_length=50)), ], ), migrations.AddConstraint( model_name='testunsupportableuniqueconstraint', constraint=models.UniqueConstraint( condition=models.Q( ('status', 'in_progress'), ('status', 'needs_changes'), _connector='OR', ), fields=('_type',), name='or_constraint', ), ), ] migration = TestMigration(name='test_unsupportable_unique_constraint', app_label='testapp') with connection.schema_editor(atomic=True) as editor: with self.assertRaisesRegex( NotImplementedError, "does not support OR conditions" ): return migration.apply(ProjectState(), editor) mssql-django-1.5/testapp/tests/test_expressions.py000066400000000000000000000132741461302131500225760ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import datetime from unittest import skipUnless from django import VERSION from django.db.models import CharField, IntegerField, F from django.db.models.expressions import Case, Exists, OuterRef, Subquery, Value, When, ExpressionWrapper from django.test import TestCase, skipUnlessDBFeature from django.db.models.aggregates import Count, Sum from ..models import Author, Book, Comment, Post, Editor, ModelWithNullableFieldsOfDifferentTypes DJANGO3 = VERSION[0] >= 3 class TestSubquery(TestCase): def setUp(self): self.author = Author.objects.create(name="author") self.post = Post.objects.create(title="foo", author=self.author) def test_with_count(self): newest = Comment.objects.filter(post=OuterRef('pk')).order_by('-created_at') Post.objects.annotate( post_exists=Subquery(newest.values('text')[:1]) ).filter(post_exists=True).count() class TestExists(TestCase): def setUp(self): self.author = Author.objects.create(name="author") self.post = Post.objects.create(title="foo", author=self.author) def test_with_count(self): Post.objects.annotate( post_exists=Exists(Post.objects.all()) ).filter(post_exists=True).count() @skipUnless(DJANGO3, "Django 3 specific tests") def test_with_case_when(self): author = Author.objects.annotate( has_post=Case( When(Exists(Post.objects.filter(author=OuterRef('pk')).values('pk')), then=Value(1)), default=Value(0), output_field=IntegerField(), ) ).get() self.assertEqual(author.has_post, 1) def test_unnecessary_exists_group_by(self): author = Author.objects.annotate( has_post=Case( When(Exists(Post.objects.filter(author=OuterRef('pk')).values('pk')), then=Value(1)), default=Value(0), output_field=IntegerField(), )).annotate( amount=Count("post") ).get() self.assertEqual(author.amount, 1) self.assertEqual(author.has_post, 1) def test_combined_expression_annotation_with_aggregation(self): book = Author.objects.annotate( combined=ExpressionWrapper( Value(2) * Value(5), output_field=IntegerField() ), null_value=ExpressionWrapper( Value(None), output_field=IntegerField() ), rating_count=Count("post"), ).first() self.assertEqual(book.combined, 10) self.assertEqual(book.null_value, None) @skipUnless(DJANGO3, "Django 3 specific tests") def test_order_by_exists(self): author_without_posts = Author.objects.create(name="other author") authors_by_posts = Author.objects.order_by(Exists(Post.objects.filter(author=OuterRef('pk'))).desc()) self.assertSequenceEqual(authors_by_posts, [self.author, author_without_posts]) authors_by_posts = Author.objects.order_by(Exists(Post.objects.filter(author=OuterRef('pk'))).asc()) self.assertSequenceEqual(authors_by_posts, [author_without_posts, self.author]) class TestGroupBy(TestCase): def test_group_by_case(self): annotated_queryset = Book.objects.annotate(age=Case( When(id__gt=1000, then=Value("new")), default=Value("old"), output_field=CharField())).values('age').annotate(sum=Sum('id')) self.assertEqual(list(annotated_queryset.all()), []) @skipUnless(DJANGO3, "Django 3 specific tests") @skipUnlessDBFeature("order_by_nulls_first") class TestOrderBy(TestCase): def setUp(self): self.author = Author.objects.create(name="author") self.post = Post.objects.create(title="foo", author=self.author) self.editor = Editor.objects.create(name="editor") self.post_alt = Post.objects.create(title="Post with editor", author=self.author, alt_editor=self.editor) def test_order_by_nulls_last(self): results = Post.objects.order_by(F("alt_editor").asc(nulls_last=True)).all() self.assertEqual(len(results), 2) self.assertIsNotNone(results[0].alt_editor) self.assertIsNone(results[1].alt_editor) def test_order_by_nulls_first(self): results = Post.objects.order_by(F("alt_editor").desc(nulls_first=True)).all() self.assertEqual(len(results), 2) self.assertIsNone(results[0].alt_editor) self.assertIsNotNone(results[1].alt_editor) class TestBulkUpdate(TestCase): def test_bulk_update_different_column_types(self): data = ( (1, 'a', datetime.datetime(year=2024, month=1, day=1)), (2, 'b', datetime.datetime(year=2023, month=12, day=31)) ) objs = ModelWithNullableFieldsOfDifferentTypes.objects.bulk_create(ModelWithNullableFieldsOfDifferentTypes(int_value=row_data[0], name=row_data[1], date=row_data[2]) for row_data in data) for obj in objs: obj.int_value = None obj.name = None obj.date = None ModelWithNullableFieldsOfDifferentTypes.objects.bulk_update(objs, ["int_value", "name", "date"]) self.assertCountEqual(ModelWithNullableFieldsOfDifferentTypes.objects.filter(int_value__isnull=True), objs) self.assertCountEqual(ModelWithNullableFieldsOfDifferentTypes.objects.filter(name__isnull=True), objs) self.assertCountEqual(ModelWithNullableFieldsOfDifferentTypes.objects.filter(date__isnull=True), objs) mssql-django-1.5/testapp/tests/test_fields.py000066400000000000000000000024541461302131500214600ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. from django.test import TestCase from ..models import UUIDModel, Customer_name, Customer_address class TestUUIDField(TestCase): def test_create(self): UUIDModel.objects.create() class TestOrderBy(TestCase): def test_order_by(self): # Issue 109 # Sample: https://github.com/jwaschkau/django-mssql-issue109 john = Customer_name.objects.create(Customer_name='John') Customer_address.objects.create(Customer_address='123 Main St', Customer_name=john) names = Customer_name.objects.select_for_update().all() addresses = Customer_address.objects.filter(Customer_address='123 Main St', Customer_name__in=names) self.assertEqual(len(addresses), 1) def test_random_order_by(self): # https://code.djangoproject.com/ticket/33531 Customer_name.objects.bulk_create([ Customer_name(Customer_name='Jack'), Customer_name(Customer_name='Jane'), Customer_name(Customer_name='John'), ]) names = [] # iterate 20 times to make sure we don't get the same result for _ in range(20): names.append(list(Customer_name.objects.order_by('?'))) self.assertNotEqual(names.count(names[0]), 20) mssql-django-1.5/testapp/tests/test_getorcreate.py000066400000000000000000000033131461302131500225110ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. from unittest import skipUnless from django import VERSION from django.test import TestCase from django.db import connection from django.test.utils import CaptureQueriesContext from ..models import Book, Publisher DJANGO42 = VERSION >= (4, 2) # Copied from Django test suite but modified to test our code @skipUnless(DJANGO42, "Django 4.2 specific tests") class UpdateOrCreateTests(TestCase): def test_update_only_defaults_and_pre_save_fields_when_local_fields(self): publisher = Publisher.objects.create(name="Acme Publishing") book = Book.objects.create(publisher=publisher, name="The Book of Ed & Fred") for defaults in [{"publisher": publisher}, {"publisher_id": publisher}]: with self.subTest(defaults=defaults): with CaptureQueriesContext(connection) as captured_queries: book, created = Book.objects.update_or_create( pk=book.pk, defaults=defaults, ) self.assertIs(created, False) update_sqls = [ q["sql"] for q in captured_queries if "UPDATE" in q["sql"] ] self.assertEqual(len(update_sqls), 1) update_sql = update_sqls[0] self.assertIsNotNone(update_sql) self.assertIn( connection.ops.quote_name("publisher_id_column"), update_sql ) self.assertIn(connection.ops.quote_name("updated"), update_sql) # Name should not be updated. self.assertNotIn(connection.ops.quote_name("name"), update_sql) mssql-django-1.5/testapp/tests/test_indexes.py000066400000000000000000000230421461302131500216450ustar00rootroot00000000000000import logging import django.db from django import VERSION from django.apps import apps from django.db import models, migrations from django.db.migrations.migration import Migration from django.db.migrations.state import ProjectState from django.db.models import UniqueConstraint from django.db.utils import DEFAULT_DB_ALIAS, ConnectionHandler, ProgrammingError from django.test import TestCase from . import get_constraints from ..models import ( TestIndexesRetainedRenamed, Choice, Question, ) connections = ConnectionHandler() if (VERSION >= (3, 2)): from django.utils.connection import ConnectionProxy connection = ConnectionProxy(connections, DEFAULT_DB_ALIAS) else: from django.db import DefaultConnectionProxy connection = DefaultConnectionProxy() logger = logging.getLogger('mssql.tests') class TestIndexesRetained(TestCase): """ Issue https://github.com/microsoft/mssql-django/issues/14 Indexes dropped during a migration should be re-created afterwards assuming the field still has `db_index=True` """ @classmethod def setUpClass(cls): super().setUpClass() # Pre-fetch which indexes exist for the relevant test model # now that all the test migrations have run cls.constraints = get_constraints(table_name=TestIndexesRetainedRenamed._meta.db_table) cls.indexes = {k: v for k, v in cls.constraints.items() if v['index'] is True} def _assert_index_exists(self, columns): matching = {k: v for k, v in self.indexes.items() if set(v['columns']) == columns} assert len(matching) == 1, ( "Expected 1 index for columns %s but found %d %s" % ( columns, len(matching), ', '.join(matching.keys()) ) ) def test_field_made_nullable(self): # case (a) of https://github.com/microsoft/mssql-django/issues/14 self._assert_index_exists({'a'}) def test_field_renamed(self): # case (b) of https://github.com/microsoft/mssql-django/issues/14 self._assert_index_exists({'b_renamed'}) def test_table_renamed(self): # case (c) of https://github.com/microsoft/mssql-django/issues/14 self._assert_index_exists({'c'}) def _get_all_models(): for app in apps.get_app_configs(): app_label = app.label for model_name, model_class in app.models.items(): yield model_class, model_name, app_label class TestCorrectIndexes(TestCase): def test_correct_indexes_exist(self): """ Check there are the correct number of indexes for each field after all migrations by comparing what the model says (e.g. `db_index=True` / `index_together` etc.) with the actual constraints found in the database. This acts as a general regression test for issues such as: - duplicate index created (e.g. https://github.com/microsoft/mssql-django/issues/77) - index dropped but accidentally not recreated - index incorrectly 'recreated' when it was never actually dropped or required at all Note of course that it only covers cases which exist in testapp/models.py and associated migrations """ connection = django.db.connections[django.db.DEFAULT_DB_ALIAS] for model_cls, model_name, app_label in _get_all_models(): logger.debug('Checking model: %s.%s', app_label, model_name) if not model_cls._meta.managed: # Models where the table is not managed by Django migrations are irrelevant continue model_constraints = get_constraints(table_name=model_cls._meta.db_table) # Check correct indexes are in place for all fields in model for field in model_cls._meta.get_fields(): if not hasattr(field, 'column'): # ignore things like reverse fields which don't have a column on this table continue col_name = connection.introspection.identifier_converter(field.column) field_str = f'{app_label}.{model_name}.{field.name} ({col_name})' logger.debug(' > Checking field: %s', field_str) # Find constraints which include this column col_constraints = [ dict(name=name, **infodict) for name, infodict in model_constraints.items() if col_name in infodict['columns'] ] col_indexes = [c for c in col_constraints if c['index']] for c in col_constraints: logger.debug(' > Column <%s> is involved in constraint: %s', col_name, c) # There should be an explicit index for each of the following cases expected_index_causes = [] if field.db_index: expected_index_causes.append('db_index=True') for field_names in model_cls._meta.index_together: if field.name in field_names: expected_index_causes.append(f'index_together[{field_names}]') if field._unique and field.null: # This is implemented using a (filtered) unique index (not a constraint) to get ANSI NULL behaviour expected_index_causes.append('unique=True & null=True') for field_names in model_cls._meta.unique_together: if field.name in field_names: # unique_together results in an index because this backend implements it using a # (filtered) unique index rather than a constraint, to get ANSI NULL behaviour expected_index_causes.append(f'unique_together[{field_names}]') for uniq_constraint in filter(lambda c: isinstance(c, UniqueConstraint), model_cls._meta.constraints): if field.name in uniq_constraint.fields and uniq_constraint.condition is not None: # Meta:constraints > UniqueConstraint with condition are implemented with filtered unique index expected_index_causes.append(f'UniqueConstraint (with condition) in Meta: constraints') # Other cases like `unique=True, null=False` or `field.primary_key` do have index-like constraints # but in those cases the introspection returns `"index": False` so they are not in the list of # explicit indexes which we are checking here (`col_indexes`) assert len(col_indexes) == len(expected_index_causes), \ 'Expected %s index(es) on %s but found %s.\n' \ 'Check for behaviour changes around index drop/recreate in methods like _alter_field.\n' \ 'Expected due to: %s\n' \ 'Found: %s' % ( len(expected_index_causes), field_str, len(col_indexes), expected_index_causes, '\n'.join(str(i) for i in col_indexes), ) logger.debug(' Found %s index(es) as expected', len(col_indexes)) class TestIndexesBeingDropped(TestCase): def test_unique_index_dropped(self): """ Issues https://github.com/microsoft/mssql-django/issues/110 and https://github.com/microsoft/mssql-django/issues/90 Unique indexes not being dropped when changing non-nullable foreign key with unique_together to nullable causing dependent on column error """ old_field = Choice._meta.get_field('question') new_field = models.ForeignKey( Question, null=False, on_delete=models.deletion.CASCADE ) new_field.set_attributes_from_name("question") with connection.schema_editor() as editor: editor.alter_field(Choice, old_field, new_field, strict=True) old_field = new_field new_field = models.ForeignKey( Question, null=True, on_delete=models.deletion.CASCADE ) new_field.set_attributes_from_name("question") try: with connection.schema_editor() as editor: editor.alter_field(Choice, old_field, new_field, strict=True) except ProgrammingError: self.fail("Unique indexes not being dropped") class TestAddAndAlterUniqueIndex(TestCase): def test_alter_unique_nullable_to_non_nullable(self): """ Test a single migration that creates a field with unique=True and null=True and then alters the field to set null=False. See https://github.com/microsoft/mssql-django/issues/22 """ operations = [ migrations.CreateModel( "TestAlterNullableInUniqueField", [ ("id", models.AutoField(primary_key=True)), ("a", models.CharField(max_length=4, unique=True, null=True)), ] ), migrations.AlterField( "testalternullableinuniquefield", "a", models.CharField(max_length=4, unique=True) ) ] project_state = ProjectState() new_state = project_state.clone() migration = Migration("name", "testapp") migration.operations = operations try: with connection.schema_editor(atomic=True) as editor: migration.apply(new_state, editor) except django.db.utils.ProgrammingError as e: self.fail('Check if can alter field from unique, nullable to unique non-nullable for issue #23, AlterField failed with exception: %s' % e) mssql-django-1.5/testapp/tests/test_jsonfield.py000066400000000000000000000037301461302131500221650ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. from unittest import skipUnless from django import VERSION from django.test import TestCase if VERSION >= (3, 1): from ..models import JSONModel def _check_jsonfield_supported_sqlite(): # Info about JSONField support in SQLite: https://code.djangoproject.com/wiki/JSON1Extension import sqlite3 supports_jsonfield = True try: conn = sqlite3.connect(':memory:') cursor = conn.cursor() cursor.execute('SELECT JSON(\'{"a": "b"}\')') except sqlite3.OperationalError: supports_jsonfield = False finally: return supports_jsonfield class TestJSONField(TestCase): databases = ['default'] # Django 3.0 and below unit test doesn't handle more than 2 databases in DATABASES correctly if VERSION >= (3, 1): databases.append('sqlite') json = { 'a': 'b', 'b': 1, 'c': '1', 'd': [], 'e': [1, 2], 'f': ['a', 'b'], 'g': [1, 'a'], 'h': {}, 'i': {'j': 1}, 'j': False, 'k': True, 'l': { 'foo': 'bar', 'baz': {'a': 'b', 'c': 'd'}, 'bar': ['foo', 'bar'], 'bax': {'foo': 'bar'}, }, } @skipUnless(VERSION >= (3, 1), "JSONField not support in Django versions < 3.1") @skipUnless( _check_jsonfield_supported_sqlite(), "JSONField not support by SQLite on this platform and Python version", ) def test_keytransformexact_not_overriding(self): # Issue https://github.com/microsoft/mssql-django/issues/82 json_obj = JSONModel(value=self.json) json_obj.save() self.assertSequenceEqual( JSONModel.objects.filter(value__a='b'), [json_obj], ) json_obj.save(using='sqlite') self.assertSequenceEqual( JSONModel.objects.using('sqlite').filter(value__a='b'), [json_obj], ) mssql-django-1.5/testapp/tests/test_lookups.py000066400000000000000000000007441461302131500217060ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. from django.test import TestCase from ..models import Pizza, Topping class TestLookups(TestCase): def test_large_number_of_params_UUID(self): iterations = 3000 for _ in range(iterations): Pizza.objects.create() Topping.objects.create() prefetch_result = Pizza.objects.prefetch_related('toppings') self.assertEqual(len(prefetch_result), iterations) mssql-django-1.5/testapp/tests/test_multiple_databases.py000066400000000000000000000071371461302131500240570ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. from unittest import skipUnless from django import VERSION from django.core.exceptions import ValidationError from django.db import OperationalError from django.db.backends.sqlite3.operations import DatabaseOperations from django.test import TestCase, skipUnlessDBFeature from ..models import BinaryData, Pizza, Topping if VERSION >= (3, 2): from ..models import TestCheckConstraintWithUnicode @skipUnless( VERSION >= (3, 1), "Django 3.0 and below doesn't support different databases in unit tests", ) class TestMultpleDatabases(TestCase): databases = ['default', 'sqlite'] def test_in_split_parameter_list_as_sql(self): # Issue: https://github.com/microsoft/mssql-django/issues/92 # Mimic databases that have a limit on parameters (e.g. Oracle DB) old_max_in_list_size = DatabaseOperations.max_in_list_size DatabaseOperations.max_in_list_size = lambda self: 100 mssql_iterations = 3000 Pizza.objects.bulk_create([Pizza() for _ in range(mssql_iterations)]) Topping.objects.bulk_create([Topping() for _ in range(mssql_iterations)]) prefetch_result = Pizza.objects.prefetch_related('toppings') self.assertEqual(len(prefetch_result), mssql_iterations) # Different iterations since SQLite has max host parameters of 999 for versions prior to 3.32.0 # Info about limit: https://www.sqlite.org/limits.html sqlite_iterations = 999 Pizza.objects.using('sqlite').bulk_create([Pizza() for _ in range(sqlite_iterations)]) Topping.objects.using('sqlite').bulk_create([Topping() for _ in range(sqlite_iterations)]) prefetch_result_sqlite = Pizza.objects.using('sqlite').prefetch_related('toppings') self.assertEqual(len(prefetch_result_sqlite), sqlite_iterations) DatabaseOperations.max_in_list_size = old_max_in_list_size def test_binaryfield_init(self): binary_data = b'\x00\x46\xFE' binary = BinaryData(binary=binary_data) binary.save() binary.save(using='sqlite') try: binary.full_clean() except ValidationError: self.fail() b1 = BinaryData.objects.filter(binary=binary_data) self.assertSequenceEqual( b1, [binary], ) b2 = BinaryData.objects.using('sqlite').filter(binary=binary_data) self.assertSequenceEqual( b2, [binary], ) @skipUnlessDBFeature('supports_table_check_constraints') @skipUnless( VERSION >= (3, 2), "Django 3.1 and below has errors from running migrations for this test", ) def test_checkconstraint_get_check_sql(self): TestCheckConstraintWithUnicode.objects.create(name='abc') try: TestCheckConstraintWithUnicode.objects.using('sqlite').create(name='abc') except OperationalError: self.fail() def test_queryset_bulk_update(self): objs = [ BinaryData.objects.create(binary=b'\x00') for _ in range(5) ] for obj in objs: obj.binary = None BinaryData.objects.bulk_update(objs, ["binary"]) self.assertCountEqual(BinaryData.objects.filter(binary__isnull=True), objs) objs = [ BinaryData.objects.using('sqlite').create(binary=b'\x00') for _ in range(5) ] for obj in objs: obj.binary = None BinaryData.objects.using('sqlite').bulk_update(objs, ["binary"]) self.assertCountEqual(BinaryData.objects.using('sqlite').filter(binary__isnull=True), objs) mssql-django-1.5/testapp/tests/test_queries.py000066400000000000000000000024231461302131500216630ustar00rootroot00000000000000import django.db.utils from django.db import connections from django.test import TransactionTestCase from ..models import Author class TestTableWithTrigger(TransactionTestCase): def test_insert_into_table_with_trigger(self): connection = connections['default'] with connection.schema_editor() as cursor: cursor.execute(""" CREATE TRIGGER TestTrigger ON [testapp_author] FOR INSERT AS INSERT INTO [testapp_editor]([name]) VALUES ('Bar') """) try: # Change can_return_rows_from_bulk_insert to be the same as when # has_trigger = True old_return_rows_flag = connection.features_class.can_return_rows_from_bulk_insert connection.features_class.can_return_rows_from_bulk_insert = False Author.objects.create(name='Foo') except django.db.utils.ProgrammingError as e: self.fail('Check for regression of issue #130. Insert with trigger failed with exception: %s' % e) finally: with connection.schema_editor() as cursor: cursor.execute("DROP TRIGGER TestTrigger") connection.features_class.can_return_rows_from_bulk_insert = old_return_rows_flagmssql-django-1.5/testapp/tests/test_timezones.py000066400000000000000000000101141461302131500222170ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import datetime from django.db import connection from django.test import TestCase from django.test.utils import override_settings from ..models import TimeZone class TestDateTimeField(TestCase): def test_iso_week_day(self): days = { 1: TimeZone.objects.create(date=datetime.datetime(2022, 5, 16)), 2: TimeZone.objects.create(date=datetime.datetime(2022, 5, 17)), 3: TimeZone.objects.create(date=datetime.datetime(2022, 5, 18)), 4: TimeZone.objects.create(date=datetime.datetime(2022, 5, 19)), 5: TimeZone.objects.create(date=datetime.datetime(2022, 5, 20)), 6: TimeZone.objects.create(date=datetime.datetime(2022, 5, 21)), 7: TimeZone.objects.create(date=datetime.datetime(2022, 5, 22)), } for k, v in days.items(): self.assertSequenceEqual(TimeZone.objects.filter(date__iso_week_day=k), [v]) class TestDateTimeToDateTimeOffsetMigration(TestCase): def setUp(self): # Want this to be a naive datetime so don't want # to override settings before TimeZone creation self.time = TimeZone.objects.create() def tearDown(self): TimeZone.objects.all().delete() @override_settings(USE_TZ=True) def test_datetime_to_datetimeoffset_utc(self): dt = self.time.date # Do manual migration from DATETIME2 to DATETIMEOFFSET # and local time to UTC with connection.schema_editor() as cursor: cursor.execute(""" ALTER TABLE [testapp_timezone] ALTER COLUMN [date] DATETIMEOFFSET; UPDATE [testapp_timezone] SET [date] = TODATETIMEOFFSET([date], 0) AT TIME ZONE 'UTC' """) dto = TimeZone.objects.get(id=self.time.id).date try: self.assertEqual(dt, dto.replace(tzinfo=None)) finally: # Migrate back to DATETIME2 for other unit tests with connection.schema_editor() as cursor: cursor.execute("ALTER TABLE [testapp_timezone] ALTER column [date] datetime2") @override_settings(USE_TZ=True, TIME_ZONE="Africa/Nairobi") def test_datetime_to_datetimeoffset_local_timezone(self): dt = self.time.date # Do manual migration from DATETIME2 to DATETIMEOFFSET # and local time to UTC with connection.schema_editor() as cursor: cursor.execute(""" ALTER TABLE [testapp_timezone] ALTER COLUMN [date] DATETIMEOFFSET; UPDATE [testapp_timezone] SET [date] = TODATETIMEOFFSET([date], 180) AT TIME ZONE 'UTC' """) dto = TimeZone.objects.get(id=self.time.id).date try: # Africa/Nairobi (EAT) offset is +03:00 self.assertEqual(dt - datetime.timedelta(hours=3), dto.replace(tzinfo=None)) finally: # Migrate back to DATETIME2 for other unit tests with connection.schema_editor() as cursor: cursor.execute("ALTER TABLE [testapp_timezone] ALTER column [date] datetime2") @override_settings(USE_TZ=True, TIME_ZONE="Africa/Nairobi") def test_datetime_to_datetimeoffset_other_timezone(self): dt = self.time.date # Do manual migration from DATETIME2 to DATETIMEOFFSET # and local time to UTC with connection.schema_editor() as cursor: cursor.execute(""" ALTER TABLE [testapp_timezone] ALTER COLUMN [date] DATETIMEOFFSET; UPDATE [testapp_timezone] SET [date] = TODATETIMEOFFSET([date], 420) AT TIME ZONE 'UTC' """) dto = TimeZone.objects.get(id=self.time.id).date try: self.assertEqual(dt - datetime.timedelta(hours=7), dto.replace(tzinfo=None)) finally: # Migrate back to DATETIME2 for other unit tests with connection.schema_editor() as cursor: cursor.execute("ALTER TABLE [testapp_timezone] ALTER column [date] datetime2") mssql-django-1.5/tox.ini000066400000000000000000000010111461302131500152560ustar00rootroot00000000000000[tox] envlist = {py36,py37,py38,py39}-django32, {py38, py39, py310}-django40, {py38, py39, py310}-django41, {py38, py39, py310}-django42, {py310, py311, py312}-django50 [testenv] allowlist_externals = bash commands = python manage.py test --noinput bash test.sh deps = coverage==5.5 unittest-xml-reporting django32: django==3.2.* django40: django>=4.0a1,<4.1 django41: django>=4.1a1,<4.2 django42: django>=4.2,<4.3 django50: django>=5.0,<5.1