pax_global_header00006660000000000000000000000064141760707740014527gustar00rootroot0000000000000052 comment=39ee995c0bc4f04ec03be63cb1379ebea208ee2b mssql-django-1.1.2/000077500000000000000000000000001417607077400141275ustar00rootroot00000000000000mssql-django-1.1.2/.editorconfig000066400000000000000000000003461417607077400166070ustar00rootroot00000000000000# https://editorconfig.org/ root = true [*] indent_style = space indent_size = 4 insert_final_newline = true trim_trailing_whitespace = true end_of_line = lf charset = utf-8 max_line_length = 119 [*.{yml,yaml}] indent_size = 2 mssql-django-1.1.2/.github/000077500000000000000000000000001417607077400154675ustar00rootroot00000000000000mssql-django-1.1.2/.github/workflows/000077500000000000000000000000001417607077400175245ustar00rootroot00000000000000mssql-django-1.1.2/.github/workflows/codeql-analysis.yml000066400000000000000000000044631417607077400233460ustar00rootroot00000000000000# For most projects, this workflow file will not need changing; you simply need # to commit it to your repository. # # You may wish to alter this file to override the set of languages analyzed, # or to provide custom queries or build logic. # # ******** NOTE ******** # We have attempted to detect the languages in your repository. Please check # the `language` matrix defined below to confirm you have the correct set of # supported CodeQL languages. # name: "CodeQL" on: push: branches: [ dev ] pull_request: # The branches below must be a subset of the branches above branches: [ dev ] schedule: - cron: '40 13 * * 3' jobs: analyze: name: Analyze runs-on: ubuntu-latest strategy: fail-fast: false matrix: language: [ 'python' ] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] # Learn more: # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed steps: - name: Checkout repository uses: actions/checkout@v2 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v1 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. # queries: ./path/to/local/query, your-org/your-repo/queries@main # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild uses: github/codeql-action/autobuild@v1 # â„šī¸ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl # âœī¸ If the Autobuild fails above, remove it and uncomment the following three lines # and modify them (or add more) to build your code if your project # uses a compiled language #- run: | # make bootstrap # make release - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v1 mssql-django-1.1.2/.gitignore000066400000000000000000000001701417607077400161150ustar00rootroot00000000000000*.py[co] *.sw[a-z] *.orig *~ .DS_Store Thumbs.db *.egg-info *.dll tests/local_settings.py # Virtual Env /venv/ .idea/ mssql-django-1.1.2/CODE_OF_CONDUCT.md000066400000000000000000000006731417607077400167340ustar00rootroot00000000000000# Microsoft Open Source Code of Conduct This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). Resources: - [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/) - [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) - Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concernsmssql-django-1.1.2/CONTRIBUTING.md000066400000000000000000000037251417607077400163670ustar00rootroot00000000000000# Contributing ## How to contribute ### Run unit tests After changes made to the project, it's a good idea to run the unit tests before making a pull request. 1. **Run SQL Server** Make sure you have SQL Server running in your local machine. Download and install SQL Server [here](https://www.microsoft.com/en-us/sql-server/sql-server-downloads), or you could use docker. Change `testapp/settings.py` to match your SQL Server login username and password. ``` docker run -e 'ACCEPT_EULA=Y' -e 'SA_PASSWORD=MyPassword42' -p 1433:1433 -d mcr.microsoft.com/mssql/server:2019-latest ``` 2. **Clone Django** In `mssql-django` folder. ``` # Install your local mssql-django pip install -e . # The unit test suite are in `Django` folder, so we need to clone it git clone https://github.com/django/django.git --depth 1 ``` 3. **Install Tox** ``` # we use `tox` to run tests and install dependencies pip install tox ``` 4. **Run Tox** ``` # eg. run django 3.1 tests with Python 3.7 tox -e py37-django31 ``` --- This project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com. When you submit a pull request, a CLA bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repos using our CLA. This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.mssql-django-1.1.2/LICENSE.txt000066400000000000000000000032111417607077400157470ustar00rootroot00000000000000Project Name: mssql-django BSD 3-Clause License Copyright (c) 2021, Microsoft Corporation 2019, ES Solutions AB 2018, Michiya Takahashi 2008, 2009 django-pyodbc developers All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. mssql-django-1.1.2/MANIFEST.in000066400000000000000000000001621417607077400156640ustar00rootroot00000000000000include LICENSE.txt include MANIFEST.in include README.md recursive-include mssql *.py recursive-exclude docker * mssql-django-1.1.2/NOTICE.md000066400000000000000000000033771417607077400154440ustar00rootroot00000000000000# Notices This repository incorporates material as listed below or described in the code. ## django-mssql-backend Please see below for the associated license for the incorporated material from django-mssql-backend (https://github.com/ESSolutions/django-mssql-backend). ### BSD 3-Clause License Copyright (c) 2019, ES Solutions AB All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. mssql-django-1.1.2/README.md000066400000000000000000000227321417607077400154140ustar00rootroot00000000000000# SQL Server backend for Django Welcome to the MSSQL-Django 3rd party backend project! *mssql-django* is a fork of [django-mssql-backend](https://pypi.org/project/django-mssql-backend/). This project provides an enterprise database connectivity option for the Django Web Framework, with support for Microsoft SQL Server and Azure SQL Database. We'd like to give thanks to the community that made this project possible, with particular recognition of the contributors: OskarPersson, michiya, dlo and the original Google Code django-pyodbc team. Moving forward we encourage partipation in this project from both old and new contributors! We hope you enjoy using the MSSQL-Django 3rd party backend. ## Features - Supports Django 2.2, 3.0, 3.1, 3.2 and 4.0 - Tested on Microsoft SQL Server 2016, 2017, 2019 - Passes most of the tests of the Django test suite - Compatible with [Micosoft ODBC Driver for SQL Server](https://docs.microsoft.com/en-us/sql/connect/odbc/microsoft-odbc-driver-for-sql-server), [SQL Server Native Client](https://msdn.microsoft.com/en-us/library/ms131321(v=sql.120).aspx), and [FreeTDS](https://www.freetds.org/) ODBC drivers ## Dependencies - pyodbc 3.0 or newer ## Installation 1. Install pyodbc 3.0 (or newer) and Django 2. Install mssql-django: pip install mssql-django 3. Set the `ENGINE` setting in the `settings.py` file used by your Django application or project to `'mssql'`: 'ENGINE': 'mssql' ## Configuration ### Standard Django settings The following entries in a database-level settings dictionary in DATABASES control the behavior of the backend: - ENGINE String. It must be `"mssql"`. - NAME String. Database name. Required. - HOST String. SQL Server instance in `"server\instance"` format. - PORT String. Server instance port. An empty string means the default port. - USER String. Database user name in `"user"` format. If not given then MS Integrated Security will be used. - PASSWORD String. Database user password. - AUTOCOMMIT Boolean. Set this to `False` if you want to disable Django's transaction management and implement your own. - Trusted_Connection String. Default is `"yes"`. Can be set to `"no"` if required. and the following entries are also available in the `TEST` dictionary for any given database-level settings dictionary: - NAME String. The name of database to use when running the test suite. If the default value (`None`) is used, the test database will use the name `"test_" + NAME`. - COLLATION String. The collation order to use when creating the test database. If the default value (`None`) is used, the test database is assigned the default collation of the instance of SQL Server. - DEPENDENCIES String. The creation-order dependencies of the database. See the official Django documentation for more details. - MIRROR String. The alias of the database that this database should mirror during testing. Default value is `None`. See the official Django documentation for more details. ### OPTIONS Dictionary. Current available keys are: - driver String. ODBC Driver to use (`"ODBC Driver 17 for SQL Server"`, `"SQL Server Native Client 11.0"`, `"FreeTDS"` etc). Default is `"ODBC Driver 17 for SQL Server"`. - isolation_level String. Sets [transaction isolation level](https://docs.microsoft.com/en-us/sql/t-sql/statements/set-transaction-isolation-level-transact-sql) for each database session. Valid values for this entry are `READ UNCOMMITTED`, `READ COMMITTED`, `REPEATABLE READ`, `SNAPSHOT`, and `SERIALIZABLE`. Default is `None` which means no isolation levei is set to a database session and SQL Server default will be used. - dsn String. A named DSN can be used instead of `HOST`. - host_is_server Boolean. Only relevant if using the FreeTDS ODBC driver under Unix/Linux. By default, when using the FreeTDS ODBC driver the value specified in the ``HOST`` setting is used in a ``SERVERNAME`` ODBC connection string component instead of being used in a ``SERVER`` component; this means that this value should be the name of a *dataserver* definition present in the ``freetds.conf`` FreeTDS configuration file instead of a hostname or an IP address. But if this option is present and its value is ``True``, this special behavior is turned off. Instead, connections to the database server will be established using ``HOST`` and ``PORT`` options, without requiring ``freetds.conf`` to be configured. See https://www.freetds.org/userguide/dsnless.html for more information. - unicode_results Boolean. If it is set to ``True``, pyodbc's *unicode_results* feature is activated and strings returned from pyodbc are always Unicode. Default value is ``False``. - extra_params String. Additional parameters for the ODBC connection. The format is ``"param=value;param=value"``, [Azure AD Authentication](https://github.com/microsoft/mssql-django/wiki/Azure-AD-Authentication) (Service Principal, Interactive, Msi) can be added to this field. - collation String. Name of the collation to use when performing text field lookups against the database. Default is ``None``; this means no collation specifier is added to your lookup SQL (the default collation of your database will be used). For Chinese language you can set it to ``"Chinese_PRC_CI_AS"``. - connection_timeout Integer. Sets the timeout in seconds for the database connection process. Default value is ``0`` which disables the timeout. - connection_retries Integer. Sets the times to retry the database connection process. Default value is ``5``. - connection_retry_backoff_time Integer. Sets the back off time in seconds for reries of the database connection process. Default value is ``5``. - query_timeout Integer. Sets the timeout in seconds for the database query. Default value is ``0`` which disables the timeout. - [setencoding](https://github.com/mkleehammer/pyodbc/wiki/Connection#setencoding) and [setdecoding](https://github.com/mkleehammer/pyodbc/wiki/Connection#setdecoding) ```python # Example "OPTIONS": { "setdecoding": [ {"sqltype": pyodbc.SQL_CHAR, "encoding": 'utf-8'}, {"sqltype": pyodbc.SQL_WCHAR, "encoding": 'utf-8'}], "setencoding": [ {"encoding": "utf-8"}], ... }, ``` ### Backend-specific settings The following project-level settings also control the behavior of the backend: - DATABASE_CONNECTION_POOLING Boolean. If it is set to ``False``, pyodbc's connection pooling feature won't be activated. ### Example Here is an example of the database settings: ```python DATABASES = { 'default': { 'ENGINE': 'mssql', 'NAME': 'mydb', 'USER': 'user@myserver', 'PASSWORD': 'password', 'HOST': 'myserver.database.windows.net', 'PORT': '', 'OPTIONS': { 'driver': 'ODBC Driver 17 for SQL Server', }, }, } # set this to False if you want to turn off pyodbc's connection pooling DATABASE_CONNECTION_POOLING = False ``` ## Limitations The following features are currently not fully supported: - Altering a model field from or to AutoField at migration - Django annotate functions have floating point arithmetic problems in some cases - Annotate function with exists - Exists function in order_by - Righthand power and arithmetic with datatimes - Timezones, timedeltas not fully supported - Rename field/model with foreign key constraint - Database level constraints - Math degrees power or radians - Bit-shift operators - Filtered index - Date extract function - Hashing functions JSONField lookups have limitations, more details [here](https://github.com/microsoft/mssql-django/wiki/JSONField). ## Contributing More details on contributing can be found [here](CONTRIBUTING.md). This project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com. When you submit a pull request, a CLA bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repos using our CLA. This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. ## Security Reporting Instructions For security reporting instructions please refer to the [`SECURITY.md`](SECURITY.md) file in this repository. ## Trademarks This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft trademarks or logos is subject to and must follow [Microsoft's Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks/usage/general). Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship. Any use of third-party trademarks or logos are subject to those third-party's policies. mssql-django-1.1.2/SECURITY.md000066400000000000000000000052501417607077400157220ustar00rootroot00000000000000# Security Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/). If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://docs.microsoft.com/en-us/previous-versions/tn-archive/cc751383(v=technet.10)), please report it to us as described below. ## Reporting Security Issues **Please do not report security vulnerabilities through public GitHub issues.** Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://msrc.microsoft.com/create-report). If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/en-us/msrc/pgp-key-msrc). You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc). Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) * Full paths of source file(s) related to the manifestation of the issue * The location of the affected source code (tag/branch/commit or direct URL) * Any special configuration required to reproduce the issue * Step-by-step instructions to reproduce the issue * Proof-of-concept or exploit code (if possible) * Impact of the issue, including how an attacker might exploit the issue This information will help us triage your report more quickly. If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://microsoft.com/msrc/bounty) page for more details about our active programs. ## Preferred Languages We prefer all communications to be in English. ## Policy Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://www.microsoft.com/en-us/msrc/cvd). mssql-django-1.1.2/SUPPORT.md000066400000000000000000000012261417607077400156260ustar00rootroot00000000000000# Support ## How to file issues and get help This project uses GitHub Issues to track bugs and feature requests. Please search the existing issues before filing new issues to avoid duplicates. For new issues, file your bug or feature request as a new Issue. For help and questions about using this project, please utilize the Django Developers form at https://groups.google.com/g/django-developers. Please search for an existing discussion on your topic before adding a new conversation. For new conversations, include "MSSQL" in a descriptive subject. ## Microsoft Support Policy Support for this project is limited to the resources listed above. mssql-django-1.1.2/azure-pipelines.yml000066400000000000000000000155321417607077400177740ustar00rootroot00000000000000trigger: - master - dev - 1ES schedules: - cron: "0 9 * * *" displayName: Daily midnight build branches: include: - dev always: true jobs: - job: Windows pool: name: Django-1ES-pool demands: - imageOverride -equals JDBC-MMS2019-SQL2019 timeoutInMinutes: 120 strategy: matrix: Python3.10 - Django 4.0: python.version: '3.10' tox.env: 'py310-django40' Python 3.9 - Django 4.0: python.version: '3.9' tox.env: 'py39-django40' Python 3.8 - Django 4.0: python.version: '3.8' tox.env: 'py38-django40' Python 3.9 - Django 3.2: python.version: '3.9' tox.env: 'py39-django32' Python 3.8 - Django 3.2: python.version: '3.8' tox.env: 'py38-django32' Python 3.7 - Django 3.2: python.version: '3.7' tox.env: 'py37-django32' Python 3.6 - Django 3.2: python.version: '3.6' tox.env: 'py36-django32' Python 3.9 - Django 3.1: python.version: '3.9' tox.env: 'py39-django31' Python 3.8 - Django 3.1: python.version: '3.8' tox.env: 'py38-django31' Python 3.7 - Django 3.1: python.version: '3.7' tox.env: 'py37-django31' Python 3.6 - Django 3.1: python.version: '3.6' tox.env: 'py36-django31' Python 3.9 - Django 3.0: python.version: '3.9' tox.env: 'py39-django30' Python 3.8 - Django 3.0: python.version: '3.8' tox.env: 'py38-django30' Python 3.7 - Django 3.0: python.version: '3.7' tox.env: 'py37-django30' Python 3.6 - Django 3.0: python.version: '3.6' tox.env: 'py36-django30' Python 3.7 - Django 2.2: python.version: '3.7' tox.env: 'py37-django22' Python 3.6 - Django 2.2: python.version: '3.6' tox.env: 'py36-django22' steps: - task: CredScan@2 inputs: toolMajorVersion: 'V2' - task: UsePythonVersion@0 inputs: versionSpec: "$(python.version)" displayName: Use Python $(python.version) - powershell: | $IP=Get-NetIPAddress -AddressFamily IPv4 -InterfaceIndex $(Get-NetConnectionProfile -IPv4Connectivity Internet | Select-Object -ExpandProperty InterfaceIndex) | Select-Object -ExpandProperty IPAddress (Get-Content $pwd/testapp/settings.py).replace('localhost', $IP) | Set-Content $pwd/testapp/settings.py Invoke-WebRequest https://download.microsoft.com/download/E/6/B/E6BFDC7A-5BCD-4C51-9912-635646DA801E/en-US/17.5.2.1/x64/msodbcsql.msi -OutFile msodbcsql.msi msiexec /quiet /passive /qn /i msodbcsql.msi IACCEPTMSODBCSQLLICENSETERMS=YES Get-OdbcDriver displayName: Install ODBC - powershell: | Import-Module "sqlps" Invoke-Sqlcmd @" EXEC xp_instance_regwrite N'HKEY_LOCAL_MACHINE', N'Software\Microsoft\MSSQLServer\MSSQLServer', N'LoginMode', REG_DWORD, 2 ALTER LOGIN [sa] ENABLE; ALTER LOGIN [sa] WITH PASSWORD = 'MyPassword42', CHECK_POLICY=OFF; "@ displayName: Set up SQL Server - powershell: | Restart-Service -Name MSSQLSERVER -Force displayName: Restart SQL Server - powershell: | python -m pip install --upgrade pip wheel setuptools python -m pip install tox git clone https://github.com/django/django.git python -m tox -e $(tox.env) displayName: Run tox - job: Linux pool: name: Django-1ES-pool demands: - imageOverride -equals MMSUbuntu20.04 timeoutInMinutes: 120 strategy: matrix: Python3.10 - Django 4.0: python.version: '3.10' tox.env: 'py310-django40' Python 3.9 - Django 4.0: python.version: '3.9' tox.env: 'py39-django40' Python 3.8 - Django 4.0: python.version: '3.8' tox.env: 'py38-django40' Python 3.9 - Django 3.2: python.version: '3.9' tox.env: 'py39-django32' Python 3.8 - Django 3.2: python.version: '3.8' tox.env: 'py38-django32' Python 3.7 - Django 3.2: python.version: '3.7' tox.env: 'py37-django32' Python 3.6 - Django 3.2: python.version: '3.6' tox.env: 'py36-django32' Python 3.9 - Django 3.1: python.version: '3.9' tox.env: 'py39-django31' Python 3.8 - Django 3.1: python.version: '3.8' tox.env: 'py38-django31' Python 3.7 - Django 3.1: python.version: '3.7' tox.env: 'py37-django31' Python 3.6 - Django 3.1: python.version: '3.6' tox.env: 'py36-django31' Python 3.9 - Django 3.0: python.version: '3.9' tox.env: 'py39-django30' Python 3.8 - Django 3.0: python.version: '3.8' tox.env: 'py38-django30' Python 3.7 - Django 3.0: python.version: '3.7' tox.env: 'py37-django30' Python 3.6 - Django 3.0: python.version: '3.6' tox.env: 'py36-django30' Python 3.7 - Django 2.2: python.version: '3.7' tox.env: 'py37-django22' Python 3.6 - Django 2.2: python.version: '3.6' tox.env: 'py36-django22' steps: - task: UsePythonVersion@0 inputs: versionSpec: "$(python.version)" displayName: Use Python $(python.version) - script: | docker pull mcr.microsoft.com/mssql/server:2019-latest docker run -e 'ACCEPT_EULA=Y' -e 'SA_PASSWORD=MyPassword42' -p 1433:1433 -d mcr.microsoft.com/mssql/server:2019-latest curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add - curl https://packages.microsoft.com/config/ubuntu/20.04/prod.list | sudo tee /etc/apt/sources.list.d/mssql-release.list sudo apt-get update sudo ACCEPT_EULA=Y apt-get install -y msodbcsql17 g++ unixodbc-dev libmemcached-dev displayName: Install SQL Server - script: | python -m pip install --upgrade pip wheel setuptools pip install tox git clone https://github.com/django/django.git displayName: Install requirements - script: tox -e $(tox.env) displayName: Run tox - task: PublishCodeCoverageResults@1 inputs: codeCoverageTool: 'Cobertura' summaryFileLocation: 'django/coverage.xml' - task: PublishTestResults@2 displayName: Publish test results via jUnit inputs: testResultsFormat: 'JUnit' testResultsFiles: 'django/result.xml' testRunTitle: 'junit-$(Agent.OS)-$(Agent.OSArchitecture)-$(tox.env)' mssql-django-1.1.2/manage.py000077500000000000000000000005051417607077400157340ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testapp.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv) mssql-django-1.1.2/mssql/000077500000000000000000000000001417607077400152665ustar00rootroot00000000000000mssql-django-1.1.2/mssql/__init__.py000066400000000000000000000001511417607077400173740ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import mssql.functions # noqa mssql-django-1.1.2/mssql/base.py000066400000000000000000000563061417607077400165640ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. """ MS SQL Server database backend for Django. """ import os import re import time from django.core.exceptions import ImproperlyConfigured try: import pyodbc as Database except ImportError as e: raise ImproperlyConfigured("Error loading pyodbc module: %s" % e) from django.utils.version import get_version_tuple # noqa pyodbc_ver = get_version_tuple(Database.version) if pyodbc_ver < (3, 0): raise ImproperlyConfigured("pyodbc 3.0 or newer is required; you have %s" % Database.version) from django.conf import settings # noqa from django.db import NotSupportedError # noqa from django.db.backends.base.base import BaseDatabaseWrapper # noqa from django.utils.encoding import smart_str # noqa from django.utils.functional import cached_property # noqa if hasattr(settings, 'DATABASE_CONNECTION_POOLING'): if not settings.DATABASE_CONNECTION_POOLING: Database.pooling = False from .client import DatabaseClient # noqa from .creation import DatabaseCreation # noqa from .features import DatabaseFeatures # noqa from .introspection import DatabaseIntrospection # noqa from .operations import DatabaseOperations # noqa from .schema import DatabaseSchemaEditor # noqa EDITION_AZURE_SQL_DB = 5 def encode_connection_string(fields): """Encode dictionary of keys and values as an ODBC connection String. See [MS-ODBCSTR] document: https://msdn.microsoft.com/en-us/library/ee208909%28v=sql.105%29.aspx """ # As the keys are all provided by us, don't need to encode them as we know # they are ok. return ';'.join( '%s=%s' % (k, encode_value(v)) for k, v in fields.items() ) def encode_value(v): """If the value contains a semicolon, or starts with a left curly brace, then enclose it in curly braces and escape all right curly braces. """ if ';' in v or v.strip(' ').startswith('{'): return '{%s}' % (v.replace('}', '}}'),) return v class DatabaseWrapper(BaseDatabaseWrapper): vendor = 'microsoft' display_name = 'SQL Server' # This dictionary maps Field objects to their associated MS SQL column # types, as strings. Column-type strings can contain format strings; they'll # be interpolated against the values of Field.__dict__ before being output. # If a column type is set to None, it won't be included in the output. data_types = { 'AutoField': 'int', 'BigAutoField': 'bigint', 'BigIntegerField': 'bigint', 'BinaryField': 'varbinary(%(max_length)s)', 'BooleanField': 'bit', 'CharField': 'nvarchar(%(max_length)s)', 'DateField': 'date', 'DateTimeField': 'datetime2', 'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)', 'DurationField': 'bigint', 'FileField': 'nvarchar(%(max_length)s)', 'FilePathField': 'nvarchar(%(max_length)s)', 'FloatField': 'double precision', 'IntegerField': 'int', 'IPAddressField': 'nvarchar(15)', 'GenericIPAddressField': 'nvarchar(39)', 'JSONField': 'nvarchar(max)', 'NullBooleanField': 'bit', 'OneToOneField': 'int', 'PositiveIntegerField': 'int', 'PositiveSmallIntegerField': 'smallint', 'PositiveBigIntegerField' : 'bigint', 'SlugField': 'nvarchar(%(max_length)s)', 'SmallAutoField': 'smallint', 'SmallIntegerField': 'smallint', 'TextField': 'nvarchar(max)', 'TimeField': 'time', 'UUIDField': 'char(32)', } data_types_suffix = { 'AutoField': 'IDENTITY (1, 1)', 'BigAutoField': 'IDENTITY (1, 1)', 'SmallAutoField': 'IDENTITY (1, 1)', } data_type_check_constraints = { 'JSONField': '(ISJSON ("%(column)s") = 1)', 'PositiveIntegerField': '[%(column)s] >= 0', 'PositiveSmallIntegerField': '[%(column)s] >= 0', 'PositiveBigIntegerField': '[%(column)s] >= 0', } operators = { # Since '=' is used not only for string comparision there is no way # to make it case (in)sensitive. 'exact': '= %s', 'iexact': "= UPPER(%s)", 'contains': "LIKE %s ESCAPE '\\'", 'icontains': "LIKE UPPER(%s) ESCAPE '\\'", 'gt': '> %s', 'gte': '>= %s', 'lt': '< %s', 'lte': '<= %s', 'startswith': "LIKE %s ESCAPE '\\'", 'endswith': "LIKE %s ESCAPE '\\'", 'istartswith': "LIKE UPPER(%s) ESCAPE '\\'", 'iendswith': "LIKE UPPER(%s) ESCAPE '\\'", } # The patterns below are used to generate SQL pattern lookup clauses when # the right-hand side of the lookup isn't a raw string (it might be an expression # or the result of a bilateral transformation). # In those cases, special characters for LIKE operators (e.g. \, *, _) should be # escaped on database side. # # Note: we use str.format() here for readability as '%' is used as a wildcard for # the LIKE operator. pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '[\]'), '%%', '[%%]'), '_', '[_]')" pattern_ops = { 'contains': "LIKE '%%' + {} + '%%'", 'icontains': "LIKE '%%' + UPPER({}) + '%%'", 'startswith': "LIKE {} + '%%'", 'istartswith': "LIKE UPPER({}) + '%%'", 'endswith': "LIKE '%%' + {}", 'iendswith': "LIKE '%%' + UPPER({})", } Database = Database SchemaEditorClass = DatabaseSchemaEditor # Classes instantiated in __init__(). client_class = DatabaseClient creation_class = DatabaseCreation features_class = DatabaseFeatures introspection_class = DatabaseIntrospection ops_class = DatabaseOperations _codes_for_networkerror = ( '08S01', '08S02', ) _sql_server_versions = { 9: 2005, 10: 2008, 11: 2012, 12: 2014, 13: 2016, 14: 2017, 15: 2019, } # https://azure.microsoft.com/en-us/documentation/articles/sql-database-develop-csharp-retry-windows/ _transient_error_numbers = ( '4060', '10928', '10929', '40197', '40501', '40613', '49918', '49919', '49920', ) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) opts = self.settings_dict["OPTIONS"] # capability for multiple result sets or cursors self.supports_mars = False # Some drivers need unicode encoded as UTF8. If this is left as # None, it will be determined based on the driver, namely it'll be # False if the driver is a windows driver and True otherwise. # # However, recent versions of FreeTDS and pyodbc (0.91 and 3.0.6 as # of writing) are perfectly okay being fed unicode, which is why # this option is configurable. if 'driver_needs_utf8' in opts: self.driver_charset = 'utf-8' else: self.driver_charset = opts.get('driver_charset', None) # interval to wait for recovery from network error interval = opts.get('connection_recovery_interval_msec', 0.0) self.connection_recovery_interval_msec = float(interval) / 1000 # make lookup operators to be collation-sensitive if needed collation = opts.get('collation', None) if collation: self.operators = dict(self.__class__.operators) ops = {} for op in self.operators: sql = self.operators[op] if sql.startswith('LIKE '): ops[op] = '%s COLLATE %s' % (sql, collation) self.operators.update(ops) def create_cursor(self, name=None): return CursorWrapper(self.connection.cursor(), self) def _cursor(self): new_conn = False if self.connection is None: new_conn = True conn = super()._cursor() if new_conn: if self.sql_server_version <= 2005: self.data_types['DateField'] = 'datetime' self.data_types['DateTimeField'] = 'datetime' self.data_types['TimeField'] = 'datetime' return conn def get_connection_params(self): settings_dict = self.settings_dict if settings_dict['NAME'] == '': raise ImproperlyConfigured( "settings.DATABASES is improperly configured. " "Please supply the NAME value.") conn_params = settings_dict.copy() if conn_params['NAME'] is None: conn_params['NAME'] = 'master' return conn_params def get_new_connection(self, conn_params): database = conn_params['NAME'] host = conn_params.get('HOST', 'localhost') user = conn_params.get('USER', None) password = conn_params.get('PASSWORD', None) port = conn_params.get('PORT', None) trusted_connection = conn_params.get('Trusted_Connection', 'yes') options = conn_params.get('OPTIONS', {}) driver = options.get('driver', 'ODBC Driver 17 for SQL Server') dsn = options.get('dsn', None) options_extra_params = options.get('extra_params', '') # Microsoft driver names assumed here are: # * SQL Server Native Client 10.0/11.0 # * ODBC Driver 11/13 for SQL Server ms_drivers = re.compile('^ODBC Driver .* for SQL Server$|^SQL Server Native Client') # available ODBC connection string keywords: # (Microsoft drivers for Windows) # https://docs.microsoft.com/en-us/sql/relational-databases/native-client/applications/using-connection-string-keywords-with-sql-server-native-client # (Microsoft drivers for Linux/Mac) # https://docs.microsoft.com/en-us/sql/connect/odbc/linux-mac/connection-string-keywords-and-data-source-names-dsns # (FreeTDS) # http://www.freetds.org/userguide/odbcconnattr.htm cstr_parts = {} if dsn: cstr_parts['DSN'] = dsn else: # Only append DRIVER if DATABASE_ODBC_DSN hasn't been set cstr_parts['DRIVER'] = driver if ms_drivers.match(driver): if port: host = ','.join((host, str(port))) cstr_parts['SERVER'] = host elif options.get('host_is_server', False): if port: cstr_parts['PORT'] = str(port) cstr_parts['SERVER'] = host else: cstr_parts['SERVERNAME'] = host if user: cstr_parts['UID'] = user if 'Authentication=ActiveDirectoryInteractive' not in options_extra_params: cstr_parts['PWD'] = password else: if ms_drivers.match(driver) and 'Authentication=ActiveDirectoryMsi' not in options_extra_params: cstr_parts['Trusted_Connection'] = trusted_connection else: cstr_parts['Integrated Security'] = 'SSPI' cstr_parts['DATABASE'] = database if ms_drivers.match(driver) and os.name == 'nt': cstr_parts['MARS_Connection'] = 'yes' connstr = encode_connection_string(cstr_parts) # extra_params are glued on the end of the string without encoding, # so it's up to the settings writer to make sure they're appropriate - # use encode_connection_string if constructing from external input. if options.get('extra_params', None): connstr += ';' + options['extra_params'] unicode_results = options.get('unicode_results', False) timeout = options.get('connection_timeout', 0) retries = options.get('connection_retries', 5) backoff_time = options.get('connection_retry_backoff_time', 5) query_timeout = options.get('query_timeout', 0) setencoding = options.get('setencoding', None) setdecoding = options.get('setdecoding', None) conn = None retry_count = 0 need_to_retry = False while conn is None: try: conn = Database.connect(connstr, unicode_results=unicode_results, timeout=timeout) except Exception as e: for error_number in self._transient_error_numbers: if error_number in e.args[1]: if error_number in e.args[1] and retry_count < retries: time.sleep(backoff_time) need_to_retry = True retry_count = retry_count + 1 else: need_to_retry = False break if not need_to_retry: raise conn.timeout = query_timeout if setencoding: for entry in setencoding: conn.setencoding(**entry) if setdecoding: for entry in setdecoding: conn.setdecoding(**entry) return conn def init_connection_state(self): drv_name = self.connection.getinfo(Database.SQL_DRIVER_NAME).upper() if drv_name.startswith('LIBTDSODBC'): try: drv_ver = self.connection.getinfo(Database.SQL_DRIVER_VER) ver = get_version_tuple(drv_ver)[:2] if ver < (0, 95): raise ImproperlyConfigured( "FreeTDS 0.95 or newer is required.") except Exception: # unknown driver version pass ms_drv_names = re.compile('^(LIB)?(SQLNCLI|MSODBCSQL)') if ms_drv_names.match(drv_name): self.driver_charset = None # http://msdn.microsoft.com/en-us/library/ms131686.aspx self.supports_mars = True self.features.can_use_chunked_reads = True settings_dict = self.settings_dict cursor = self.create_cursor() options = settings_dict.get('OPTIONS', {}) isolation_level = options.get('isolation_level', None) if isolation_level: cursor.execute('SET TRANSACTION ISOLATION LEVEL %s' % isolation_level) # Set date format for the connection. Also, make sure Sunday is # considered the first day of the week (to be consistent with the # Django convention for the 'week_day' Django lookup) if the user # hasn't told us otherwise datefirst = options.get('datefirst', 7) cursor.execute('SET DATEFORMAT ymd; SET DATEFIRST %s' % datefirst) val = self.get_system_datetime() if isinstance(val, str): raise ImproperlyConfigured( "The database driver doesn't support modern datatime types.") def is_usable(self): try: self.create_cursor().execute("SELECT 1") except Database.Error: return False else: return True def get_system_datetime(self): # http://blogs.msdn.com/b/sqlnativeclient/archive/2008/02/27/microsoft-sql-server-native-client-and-microsoft-sql-server-2008-native-client.aspx with self.temporary_connection() as cursor: if self.sql_server_version <= 2005: return cursor.execute('SELECT GETDATE()').fetchone()[0] else: return cursor.execute('SELECT SYSDATETIME()').fetchone()[0] @cached_property def sql_server_version(self, _known_versions={}): """ Get the SQL server version The _known_versions default dictionary is created on the class. This is intentional - it allows us to cache this property's value across instances. Therefore, when Django creates a new database connection using the same alias, we won't need query the server again. """ if self.alias not in _known_versions: with self.temporary_connection() as cursor: cursor.execute("SELECT CAST(SERVERPROPERTY('ProductVersion') AS varchar)") ver = cursor.fetchone()[0] ver = int(ver.split('.')[0]) if ver not in self._sql_server_versions: raise NotSupportedError('SQL Server v%d is not supported.' % ver) _known_versions[self.alias] = self._sql_server_versions[ver] return _known_versions[self.alias] @cached_property def to_azure_sql_db(self, _known_azures={}): """ Whether this connection is to a Microsoft Azure database server The _known_azures default dictionary is created on the class. This is intentional - it allows us to cache this property's value across instances. Therefore, when Django creates a new database connection using the same alias, we won't need query the server again. """ if self.alias not in _known_azures: with self.temporary_connection() as cursor: cursor.execute("SELECT CAST(SERVERPROPERTY('EngineEdition') AS integer)") _known_azures[self.alias] = cursor.fetchone()[0] == EDITION_AZURE_SQL_DB return _known_azures[self.alias] def _execute_foreach(self, sql, table_names=None): cursor = self.cursor() if table_names is None: table_names = self.introspection.table_names(cursor) for table_name in table_names: cursor.execute(sql % self.ops.quote_name(table_name)) def _get_trancount(self): with self.connection.cursor() as cursor: return cursor.execute('SELECT @@TRANCOUNT').fetchone()[0] def _on_error(self, e): if e.args[0] in self._codes_for_networkerror: try: # close the stale connection self.close() # wait a moment for recovery from network error time.sleep(self.connection_recovery_interval_msec) except Exception: pass self.connection = None def _savepoint(self, sid): with self.cursor() as cursor: cursor.execute('SELECT @@TRANCOUNT') trancount = cursor.fetchone()[0] if trancount == 0: cursor.execute(self.ops.start_transaction_sql()) cursor.execute(self.ops.savepoint_create_sql(sid)) def _savepoint_commit(self, sid): # SQL Server has no support for partial commit in a transaction pass def _savepoint_rollback(self, sid): with self.cursor() as cursor: # FreeTDS requires TRANCOUNT that is greater than 0 cursor.execute('SELECT @@TRANCOUNT') trancount = cursor.fetchone()[0] if trancount > 0: cursor.execute(self.ops.savepoint_rollback_sql(sid)) def _set_autocommit(self, autocommit): with self.wrap_database_errors: allowed = not autocommit if not allowed: # FreeTDS requires TRANCOUNT that is greater than 0 allowed = self._get_trancount() > 0 if allowed: self.connection.autocommit = autocommit def check_constraints(self, table_names=None): self._execute_foreach('ALTER TABLE %s WITH CHECK CHECK CONSTRAINT ALL', table_names) def disable_constraint_checking(self): if not self.needs_rollback: self._execute_foreach('ALTER TABLE %s NOCHECK CONSTRAINT ALL') return not self.needs_rollback def enable_constraint_checking(self): if not self.needs_rollback: self._execute_foreach('ALTER TABLE %s WITH NOCHECK CHECK CONSTRAINT ALL') class CursorWrapper(object): """ A wrapper around the pyodbc's cursor that takes in account a) some pyodbc DB-API 2.0 implementation and b) some common ODBC driver particularities. """ def __init__(self, cursor, connection): self.active = True self.cursor = cursor self.connection = connection self.driver_charset = connection.driver_charset self.last_sql = '' self.last_params = () def close(self): if self.active: self.active = False self.cursor.close() def format_sql(self, sql, params): if self.driver_charset and isinstance(sql, str): # FreeTDS (and other ODBC drivers?) doesn't support Unicode # yet, so we need to encode the SQL clause itself in utf-8 sql = smart_str(sql, self.driver_charset) # pyodbc uses '?' instead of '%s' as parameter placeholder. if params is not None: sql = sql % tuple('?' * len(params)) return sql def format_params(self, params): fp = [] if params is not None: for p in params: if isinstance(p, str): if self.driver_charset: # FreeTDS (and other ODBC drivers?) doesn't support Unicode # yet, so we need to encode parameters in utf-8 fp.append(smart_str(p, self.driver_charset)) else: fp.append(p) elif isinstance(p, bytes): fp.append(p) elif isinstance(p, type(True)): if p: fp.append(1) else: fp.append(0) else: fp.append(p) return tuple(fp) def execute(self, sql, params=None): self.last_sql = sql sql = self.format_sql(sql, params) params = self.format_params(params) self.last_params = params try: return self.cursor.execute(sql, params) except Database.Error as e: self.connection._on_error(e) raise def executemany(self, sql, params_list=()): if not params_list: return None raw_pll = [p for p in params_list] sql = self.format_sql(sql, raw_pll[0]) params_list = [self.format_params(p) for p in raw_pll] try: return self.cursor.executemany(sql, params_list) except Database.Error as e: self.connection._on_error(e) raise def format_rows(self, rows): return list(map(self.format_row, rows)) def format_row(self, row): """ Decode data coming from the database if needed and convert rows to tuples (pyodbc Rows are not hashable). """ if self.driver_charset: for i in range(len(row)): f = row[i] # FreeTDS (and other ODBC drivers?) doesn't support Unicode # yet, so we need to decode utf-8 data coming from the DB if isinstance(f, bytes): row[i] = f.decode(self.driver_charset) return tuple(row) def fetchone(self): row = self.cursor.fetchone() if row is not None: row = self.format_row(row) # Any remaining rows in the current set must be discarded # before changing autocommit mode when you use FreeTDS if not self.connection.supports_mars: self.cursor.nextset() return row def fetchmany(self, chunk): return self.format_rows(self.cursor.fetchmany(chunk)) def fetchall(self): return self.format_rows(self.cursor.fetchall()) def __getattr__(self, attr): if attr in self.__dict__: return self.__dict__[attr] return getattr(self.cursor, attr) def __iter__(self): return iter(self.cursor) mssql-django-1.1.2/mssql/client.py000066400000000000000000000036061417607077400171230ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import re import subprocess from django.db.backends.base.client import BaseDatabaseClient class DatabaseClient(BaseDatabaseClient): executable_name = 'sqlcmd' @classmethod def settings_to_cmd_args(cls, settings_dict, parameters): options = settings_dict['OPTIONS'] user = options.get('user', settings_dict['USER']) password = options.get('passwd', settings_dict['PASSWORD']) driver = options.get('driver', 'ODBC Driver 13 for SQL Server') ms_drivers = re.compile('^ODBC Driver .* for SQL Server$|^SQL Server Native Client') if not ms_drivers.match(driver): cls.executable_name = 'isql' if cls.executable_name == 'sqlcmd': db = options.get('db', settings_dict['NAME']) server = options.get('host', settings_dict['HOST']) port = options.get('port', settings_dict['PORT']) defaults_file = options.get('read_default_file') args = [cls.executable_name] if server: if port: server = ','.join((server, str(port))) args += ["-S", server] if user: args += ["-U", user] if password: args += ["-P", password] else: args += ["-E"] # Try trusted connection instead if db: args += ["-d", db] if defaults_file: args += ["-i", defaults_file] else: dsn = options.get('dsn', '') args = ['%s -v %s %s %s' % (cls.executable_name, dsn, user, password)] args.extend(parameters) return args def runshell(self, parameters=[]): args = DatabaseClient.settings_to_cmd_args(self.connection.settings_dict, parameters) subprocess.run(args, check=True) mssql-django-1.1.2/mssql/compiler.py000066400000000000000000000547121417607077400174630ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import types from itertools import chain import django from django.db.models.aggregates import Avg, Count, StdDev, Variance from django.db.models.expressions import Ref, Subquery, Value from django.db.models.functions import ( Chr, ConcatPair, Greatest, Least, Length, LPad, Repeat, RPad, StrIndex, Substr, Trim ) from django.db.models.sql import compiler from django.db.transaction import TransactionManagementError from django.db.utils import NotSupportedError if django.VERSION >= (3, 1): from django.db.models.fields.json import compile_json_path, KeyTransform as json_KeyTransform def _as_sql_agv(self, compiler, connection): return self.as_sql(compiler, connection, template='%(function)s(CONVERT(float, %(field)s))') def _as_sql_chr(self, compiler, connection): return self.as_sql(compiler, connection, function='NCHAR') def _as_sql_concatpair(self, compiler, connection): if connection.sql_server_version < 2012: node = self.coalesce() return node.as_sql(compiler, connection, arg_joiner=' + ', template='%(expressions)s') else: return self.as_sql(compiler, connection) def _as_sql_count(self, compiler, connection): return self.as_sql(compiler, connection, function='COUNT_BIG') def _as_sql_greatest(self, compiler, connection): # SQL Server does not provide GREATEST function, # so we emulate it with a table value constructor # https://msdn.microsoft.com/en-us/library/dd776382.aspx template = '(SELECT MAX(value) FROM (VALUES (%(expressions)s)) AS _%(function)s(value))' return self.as_sql(compiler, connection, arg_joiner='), (', template=template) def _as_sql_json_keytransform(self, compiler, connection): lhs, params, key_transforms = self.preprocess_lhs(compiler, connection) json_path = compile_json_path(key_transforms) return ( "COALESCE(JSON_QUERY(%s, '%s'), JSON_VALUE(%s, '%s'))" % ((lhs, json_path) * 2) ), tuple(params) * 2 def _as_sql_least(self, compiler, connection): # SQL Server does not provide LEAST function, # so we emulate it with a table value constructor # https://msdn.microsoft.com/en-us/library/dd776382.aspx template = '(SELECT MIN(value) FROM (VALUES (%(expressions)s)) AS _%(function)s(value))' return self.as_sql(compiler, connection, arg_joiner='), (', template=template) def _as_sql_length(self, compiler, connection): return self.as_sql(compiler, connection, function='LEN') def _as_sql_lpad(self, compiler, connection): i = iter(self.get_source_expressions()) expression, expression_arg = compiler.compile(next(i)) length, length_arg = compiler.compile(next(i)) fill_text, fill_text_arg = compiler.compile(next(i)) params = [] params.extend(fill_text_arg) params.extend(length_arg) params.extend(length_arg) params.extend(expression_arg) params.extend(length_arg) params.extend(expression_arg) params.extend(expression_arg) template = ('LEFT(REPLICATE(%(fill_text)s, %(length)s), CASE WHEN %(length)s > LEN(%(expression)s) ' 'THEN %(length)s - LEN(%(expression)s) ELSE 0 END) + %(expression)s') return template % {'expression': expression, 'length': length, 'fill_text': fill_text}, params def _as_sql_repeat(self, compiler, connection): return self.as_sql(compiler, connection, function='REPLICATE') def _as_sql_rpad(self, compiler, connection): i = iter(self.get_source_expressions()) expression, expression_arg = compiler.compile(next(i)) length, length_arg = compiler.compile(next(i)) fill_text, fill_text_arg = compiler.compile(next(i)) params = [] params.extend(expression_arg) params.extend(fill_text_arg) params.extend(length_arg) params.extend(length_arg) template = 'LEFT(%(expression)s + REPLICATE(%(fill_text)s, %(length)s), %(length)s)' return template % {'expression': expression, 'length': length, 'fill_text': fill_text}, params def _as_sql_stddev(self, compiler, connection): function = 'STDEV' if self.function == 'STDDEV_POP': function = '%sP' % function return self.as_sql(compiler, connection, function=function) def _as_sql_strindex(self, compiler, connection): self.source_expressions.reverse() sql = self.as_sql(compiler, connection, function='CHARINDEX') self.source_expressions.reverse() return sql def _as_sql_substr(self, compiler, connection): if len(self.get_source_expressions()) < 3: self.get_source_expressions().append(Value(2**31 - 1)) return self.as_sql(compiler, connection) def _as_sql_trim(self, compiler, connection): return self.as_sql(compiler, connection, template='LTRIM(RTRIM(%(expressions)s))') def _as_sql_variance(self, compiler, connection): function = 'VAR' if self.function == 'VAR_POP': function = '%sP' % function return self.as_sql(compiler, connection, function=function) def _cursor_iter(cursor, sentinel, col_count, itersize): """ Yields blocks of rows from a cursor and ensures the cursor is closed when done. """ if not hasattr(cursor.db, 'supports_mars') or cursor.db.supports_mars: # same as the original Django implementation try: for rows in iter((lambda: cursor.fetchmany(itersize)), sentinel): yield rows if col_count is None else [r[:col_count] for r in rows] finally: cursor.close() else: # retrieve all chunks from the cursor and close it before yielding # so that we can open an another cursor over an iteration # (for drivers such as FreeTDS) chunks = [] try: for rows in iter((lambda: cursor.fetchmany(itersize)), sentinel): chunks.append(rows if col_count is None else [r[:col_count] for r in rows]) finally: cursor.close() for rows in chunks: yield rows compiler.cursor_iter = _cursor_iter class SQLCompiler(compiler.SQLCompiler): def as_sql(self, with_limits=True, with_col_aliases=False): """ Create the SQL for this query. Return the SQL string and list of parameters. If 'with_limits' is False, any limit/offset information is not included in the query. """ refcounts_before = self.query.alias_refcount.copy() try: extra_select, order_by, group_by = self.pre_sql_setup() for_update_part = None # Is a LIMIT/OFFSET clause needed? with_limit_offset = with_limits and (self.query.high_mark is not None or self.query.low_mark) combinator = self.query.combinator features = self.connection.features # The do_offset flag indicates whether we need to construct # the SQL needed to use limit/offset w/SQL Server. high_mark = self.query.high_mark low_mark = self.query.low_mark do_limit = with_limits and high_mark is not None do_offset = with_limits and low_mark != 0 # SQL Server 2012 or newer supports OFFSET/FETCH clause supports_offset_clause = self.connection.sql_server_version >= 2012 do_offset_emulation = do_offset and not supports_offset_clause if combinator: if not getattr(features, 'supports_select_{}'.format(combinator)): raise NotSupportedError('{} is not supported on this database backend.'.format(combinator)) result, params = self.get_combinator_sql(combinator, self.query.combinator_all) else: distinct_fields, distinct_params = self.get_distinct() # This must come after 'select', 'ordering', and 'distinct' -- see # docstring of get_from_clause() for details. from_, f_params = self.get_from_clause() where, w_params = self.compile(self.where) if self.where is not None else ("", []) having, h_params = self.compile(self.having) if self.having is not None else ("", []) params = [] result = ['SELECT'] if self.query.distinct: distinct_result, distinct_params = self.connection.ops.distinct_sql( distinct_fields, distinct_params, ) result += distinct_result params += distinct_params # SQL Server requires the keword for limitting at the begenning if do_limit and not do_offset: result.append('TOP %d' % high_mark) out_cols = [] col_idx = 1 for _, (s_sql, s_params), alias in self.select + extra_select: if alias: s_sql = '%s AS %s' % (s_sql, self.connection.ops.quote_name(alias)) elif with_col_aliases or do_offset_emulation: s_sql = '%s AS %s' % (s_sql, 'Col%d' % col_idx) col_idx += 1 params.extend(s_params) out_cols.append(s_sql) # SQL Server requires an order-by clause for offsetting if do_offset: meta = self.query.get_meta() qn = self.quote_name_unless_alias offsetting_order_by = '%s.%s' % (qn(meta.db_table), qn(meta.pk.db_column or meta.pk.column)) if do_offset_emulation: if order_by: ordering = [] for expr, (o_sql, o_params, _) in order_by: # value_expression in OVER clause cannot refer to # expressions or aliases in the select list. See: # http://msdn.microsoft.com/en-us/library/ms189461.aspx src = next(iter(expr.get_source_expressions())) if isinstance(src, Ref): src = next(iter(src.get_source_expressions())) o_sql, _ = src.as_sql(self, self.connection) odir = 'DESC' if expr.descending else 'ASC' o_sql = '%s %s' % (o_sql, odir) ordering.append(o_sql) params.extend(o_params) offsetting_order_by = ', '.join(ordering) order_by = [] out_cols.append('ROW_NUMBER() OVER (ORDER BY %s) AS [rn]' % offsetting_order_by) elif not order_by: order_by.append(((None, ('%s ASC' % offsetting_order_by, [], None)))) if self.query.select_for_update and self.connection.features.has_select_for_update: if self.connection.get_autocommit(): raise TransactionManagementError('select_for_update cannot be used outside of a transaction.') if with_limit_offset and not self.connection.features.supports_select_for_update_with_limit: raise NotSupportedError( 'LIMIT/OFFSET is not supported with ' 'select_for_update on this database backend.' ) nowait = self.query.select_for_update_nowait skip_locked = self.query.select_for_update_skip_locked of = self.query.select_for_update_of # If it's a NOWAIT/SKIP LOCKED/OF query but the backend # doesn't support it, raise NotSupportedError to prevent a # possible deadlock. if nowait and not self.connection.features.has_select_for_update_nowait: raise NotSupportedError('NOWAIT is not supported on this database backend.') elif skip_locked and not self.connection.features.has_select_for_update_skip_locked: raise NotSupportedError('SKIP LOCKED is not supported on this database backend.') elif of and not self.connection.features.has_select_for_update_of: raise NotSupportedError('FOR UPDATE OF is not supported on this database backend.') for_update_part = self.connection.ops.for_update_sql( nowait=nowait, skip_locked=skip_locked, of=self.get_select_for_update_of_arguments(), ) if for_update_part and self.connection.features.for_update_after_from: from_.insert(1, for_update_part) result += [', '.join(out_cols), 'FROM', *from_] params.extend(f_params) if where: result.append('WHERE %s' % where) params.extend(w_params) grouping = [] for g_sql, g_params in group_by: grouping.append(g_sql) params.extend(g_params) if grouping: if distinct_fields: raise NotImplementedError('annotate() + distinct(fields) is not implemented.') order_by = order_by or self.connection.ops.force_no_ordering() result.append('GROUP BY %s' % ', '.join(grouping)) if having: result.append('HAVING %s' % having) params.extend(h_params) explain = self.query.explain_info if django.VERSION >= (4, 0) else self.query.explain_query if explain: result.insert(0, self.connection.ops.explain_query_prefix( self.query.explain_format, **self.query.explain_options )) if order_by: ordering = [] for _, (o_sql, o_params, _) in order_by: ordering.append(o_sql) params.extend(o_params) result.append('ORDER BY %s' % ', '.join(ordering)) # For subqueres with an ORDER BY clause, SQL Server also # requires a TOP or OFFSET clause which is not generated for # Django 2.x. See https://github.com/microsoft/mssql-django/issues/12 if django.VERSION < (3, 0, 0) and not (do_offset or do_limit): result.append("OFFSET 0 ROWS") # SQL Server requires the backend-specific emulation (2008 or earlier) # or an offset clause (2012 or newer) for offsetting if do_offset: if do_offset_emulation: # Construct the final SQL clause, using the initial select SQL # obtained above. result = ['SELECT * FROM (%s) AS X WHERE X.rn' % ' '.join(result)] # Place WHERE condition on `rn` for the desired range. if do_limit: result.append('BETWEEN %d AND %d' % (low_mark + 1, high_mark)) else: result.append('>= %d' % (low_mark + 1)) if not self.query.subquery: result.append('ORDER BY X.rn') else: result.append(self.connection.ops.limit_offset_sql(self.query.low_mark, self.query.high_mark)) if self.query.subquery and extra_select: # If the query is used as a subquery, the extra selects would # result in more columns than the left-hand side expression is # expecting. This can happen when a subquery uses a combination # of order_by() and distinct(), forcing the ordering expressions # to be selected as well. Wrap the query in another subquery # to exclude extraneous selects. sub_selects = [] sub_params = [] for index, (select, _, alias) in enumerate(self.select, start=1): if not alias and with_col_aliases: alias = 'col%d' % index if alias: sub_selects.append("%s.%s" % ( self.connection.ops.quote_name('subquery'), self.connection.ops.quote_name(alias), )) else: select_clone = select.relabeled_clone({select.alias: 'subquery'}) subselect, subparams = select_clone.as_sql(self, self.connection) sub_selects.append(subselect) sub_params.extend(subparams) return 'SELECT %s FROM (%s) subquery' % ( ', '.join(sub_selects), ' '.join(result), ), tuple(sub_params + params) return ' '.join(result), tuple(params) finally: # Finally do cleanup - get rid of the joins we created above. self.query.reset_refcounts(refcounts_before) def compile(self, node, *args, **kwargs): node = self._as_microsoft(node) return super().compile(node, *args, **kwargs) def collapse_group_by(self, expressions, having): expressions = super().collapse_group_by(expressions, having) return [e for e in expressions if not isinstance(e, Subquery)] def _as_microsoft(self, node): as_microsoft = None if isinstance(node, Avg): as_microsoft = _as_sql_agv elif isinstance(node, Chr): as_microsoft = _as_sql_chr elif isinstance(node, ConcatPair): as_microsoft = _as_sql_concatpair elif isinstance(node, Count): as_microsoft = _as_sql_count elif isinstance(node, Greatest): as_microsoft = _as_sql_greatest elif isinstance(node, Least): as_microsoft = _as_sql_least elif isinstance(node, Length): as_microsoft = _as_sql_length elif isinstance(node, RPad): as_microsoft = _as_sql_rpad elif isinstance(node, LPad): as_microsoft = _as_sql_lpad elif isinstance(node, Repeat): as_microsoft = _as_sql_repeat elif isinstance(node, StdDev): as_microsoft = _as_sql_stddev elif isinstance(node, StrIndex): as_microsoft = _as_sql_strindex elif isinstance(node, Substr): as_microsoft = _as_sql_substr elif isinstance(node, Trim): as_microsoft = _as_sql_trim elif isinstance(node, Variance): as_microsoft = _as_sql_variance if django.VERSION >= (3, 1): if isinstance(node, json_KeyTransform): as_microsoft = _as_sql_json_keytransform if as_microsoft: node = node.copy() node.as_microsoft = types.MethodType(as_microsoft, node) return node class SQLInsertCompiler(compiler.SQLInsertCompiler, SQLCompiler): def get_returned_fields(self): if django.VERSION >= (3, 0, 0): return self.returning_fields return self.return_id def fix_auto(self, sql, opts, fields, qn): if opts.auto_field is not None: # db_column is None if not explicitly specified by model field auto_field_column = opts.auto_field.db_column or opts.auto_field.column columns = [f.column for f in fields] if auto_field_column in columns: id_insert_sql = [] table = qn(opts.db_table) sql_format = 'SET IDENTITY_INSERT %s ON; %s; SET IDENTITY_INSERT %s OFF' for q, p in sql: id_insert_sql.append((sql_format % (table, q, table), p)) sql = id_insert_sql return sql def as_sql(self): # We don't need quote_name_unless_alias() here, since these are all # going to be column names (so we can avoid the extra overhead). qn = self.connection.ops.quote_name opts = self.query.get_meta() result = ['INSERT INTO %s' % qn(opts.db_table)] fields = self.query.fields or [opts.pk] if self.query.fields: result.append('(%s)' % ', '.join(qn(f.column) for f in fields)) values_format = 'VALUES (%s)' value_rows = [ [self.prepare_value(field, self.pre_save_val(field, obj)) for field in fields] for obj in self.query.objs ] else: values_format = '%s VALUES' # An empty object. value_rows = [[self.connection.ops.pk_default_value()] for _ in self.query.objs] fields = [None] # Currently the backends just accept values when generating bulk # queries and generate their own placeholders. Doing that isn't # necessary and it should be possible to use placeholders and # expressions in bulk inserts too. can_bulk = (not self.get_returned_fields() and self.connection.features.has_bulk_insert) and self.query.fields placeholder_rows, param_rows = self.assemble_as_sql(fields, value_rows) if self.get_returned_fields() and self.connection.features.can_return_id_from_insert: result.insert(0, 'SET NOCOUNT ON') result.append((values_format + ';') % ', '.join(placeholder_rows[0])) params = [param_rows[0]] result.append('SELECT CAST(SCOPE_IDENTITY() AS bigint)') sql = [(" ".join(result), tuple(chain.from_iterable(params)))] else: if can_bulk: result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows)) sql = [(" ".join(result), tuple(p for ps in param_rows for p in ps))] else: sql = [ (" ".join(result + [values_format % ", ".join(p)]), vals) for p, vals in zip(placeholder_rows, param_rows) ] if self.query.fields: sql = self.fix_auto(sql, opts, fields, qn) return sql class SQLDeleteCompiler(compiler.SQLDeleteCompiler, SQLCompiler): def as_sql(self): sql, params = super().as_sql() if sql: sql = '; '.join(['SET NOCOUNT OFF', sql]) return sql, params class SQLUpdateCompiler(compiler.SQLUpdateCompiler, SQLCompiler): def as_sql(self): sql, params = super().as_sql() if sql: sql = '; '.join(['SET NOCOUNT OFF', sql]) return sql, params class SQLAggregateCompiler(compiler.SQLAggregateCompiler, SQLCompiler): pass mssql-django-1.1.2/mssql/creation.py000066400000000000000000000107121417607077400174450ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import binascii import os from django.db.utils import InterfaceError from django.db.backends.base.creation import BaseDatabaseCreation from django import VERSION as django_version class DatabaseCreation(BaseDatabaseCreation): def cursor(self): if django_version >= (3, 1): return self.connection._nodb_cursor() return self.connection._nodb_connection.cursor() def _create_test_db(self, verbosity, autoclobber, keepdb=False): """ Internal implementation - create the test db tables. """ # Try to create the test DB, but if we fail due to 28000 (Login failed for user), # it's probably because the user doesn't have permission to [dbo].[master], # so we can proceed if we're keeping the DB anyway. # https://github.com/microsoft/mssql-django/issues/61 try: return super()._create_test_db(verbosity, autoclobber, keepdb) except InterfaceError as err: if err.args[0] == '28000' and keepdb: self.log('Received error %s, proceeding because keepdb=True' % ( err.args[1], )) else: raise err def _destroy_test_db(self, test_database_name, verbosity): """ Internal implementation - remove the test db tables. """ # Remove the test database to clean up after # ourselves. Connect to the previous database (not the test database) # to do so, because it's not allowed to delete a database while being # connected to it. with self.cursor() as cursor: to_azure_sql_db = self.connection.to_azure_sql_db if not to_azure_sql_db: cursor.execute("ALTER DATABASE %s SET SINGLE_USER WITH ROLLBACK IMMEDIATE" % self.connection.ops.quote_name(test_database_name)) cursor.execute("DROP DATABASE %s" % self.connection.ops.quote_name(test_database_name)) def sql_table_creation_suffix(self): suffix = [] collation = self.connection.settings_dict['TEST'].get('COLLATION', None) if collation: suffix.append('COLLATE %s' % collation) return ' '.join(suffix) # The following code to add regex support in SQLServer is taken from django-mssql # see https://bitbucket.org/Manfre/django-mssql def enable_clr(self): """ Enables clr for server if not already enabled This function will not fail if current user doesn't have permissions to enable clr, and clr is already enabled """ with self.cursor() as cursor: # check whether clr is enabled cursor.execute(''' SELECT value FROM sys.configurations WHERE name = 'clr enabled' ''') res = None try: res = cursor.fetchone() except Exception: pass if not res or not res[0]: # if not enabled enable clr cursor.execute("sp_configure 'clr enabled', 1") cursor.execute("RECONFIGURE") cursor.execute("sp_configure 'show advanced options', 1") cursor.execute("RECONFIGURE") cursor.execute("sp_configure 'clr strict security', 0") cursor.execute("RECONFIGURE") def install_regex_clr(self, database_name): sql = ''' USE {database_name}; -- Drop and recreate the function if it already exists IF OBJECT_ID('REGEXP_LIKE') IS NOT NULL DROP FUNCTION [dbo].[REGEXP_LIKE] IF EXISTS(select * from sys.assemblies where name like 'regex_clr') DROP ASSEMBLY regex_clr ; CREATE ASSEMBLY regex_clr FROM 0x{assembly_hex} WITH PERMISSION_SET = SAFE; create function [dbo].[REGEXP_LIKE] ( @input nvarchar(max), @pattern nvarchar(max), @caseSensitive int ) RETURNS INT AS EXTERNAL NAME regex_clr.UserDefinedFunctions.REGEXP_LIKE '''.format( database_name=self.connection.ops.quote_name(database_name), assembly_hex=self.get_regex_clr_assembly_hex(), ).split(';') self.enable_clr() with self.cursor() as cursor: for s in sql: cursor.execute(s) def get_regex_clr_assembly_hex(self): with open(os.path.join(os.path.dirname(__file__), 'regex_clr.dll'), 'rb') as f: return binascii.hexlify(f.read()).decode('ascii') mssql-django-1.1.2/mssql/features.py000066400000000000000000000046021417607077400174600ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. from django.db.backends.base.features import BaseDatabaseFeatures from django.utils.functional import cached_property class DatabaseFeatures(BaseDatabaseFeatures): allow_sliced_subqueries_with_in = False can_introspect_autofield = True can_introspect_json_field = False can_introspect_small_integer_field = True can_return_columns_from_insert = True can_return_id_from_insert = True can_rollback_ddl = True can_use_chunked_reads = False for_update_after_from = True greatest_least_ignores_nulls = True has_json_object_function = False has_json_operators = False has_native_json_field = False has_native_uuid_field = False has_real_datatype = True has_select_for_update = True has_select_for_update_nowait = True has_select_for_update_skip_locked = True ignores_quoted_identifier_case = True ignores_table_name_case = True order_by_nulls_first = True requires_literal_defaults = True requires_sqlparse_for_splitting = False supports_boolean_expr_in_select_clause = False supports_covering_indexes = True supports_deferrable_unique_constraints = False supports_expression_indexes = False supports_ignore_conflicts = False supports_index_on_text_field = False supports_json_field_contains = False supports_order_by_nulls_modifier = False supports_over_clause = True supports_paramstyle_pyformat = False supports_primitives_in_json_field = False supports_regex_backreferencing = True supports_sequence_reset = False supports_subqueries_in_group_by = False supports_tablespaces = True supports_temporal_subtraction = True supports_timezones = False supports_transactions = True uses_savepoints = True has_bulk_insert = True supports_nullable_unique_constraints = True supports_partially_nullable_unique_constraints = True supports_partial_indexes = True supports_functions_in_partial_indexes = True @cached_property def has_zoneinfo_database(self): with self.connection.cursor() as cursor: cursor.execute("SELECT TOP 1 1 FROM sys.time_zone_info") return cursor.fetchone() is not None @cached_property def supports_json_field(self): return self.connection.sql_server_version >= 2016 or self.connection.to_azure_sql_db mssql-django-1.1.2/mssql/functions.py000066400000000000000000000301111417607077400176440ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import json from django import VERSION from django.db import NotSupportedError, connections, transaction from django.db.models import BooleanField, Value from django.db.models.functions import Cast, NthValue from django.db.models.functions.math import ATan2, Log, Ln, Mod, Round from django.db.models.expressions import Case, Exists, OrderBy, When, Window, Expression from django.db.models.lookups import Lookup, In from django.db.models import lookups, CheckConstraint from django.db.models.fields import BinaryField, Field from django.db.models.sql.query import Query from django.db.models.query import QuerySet from django.core import validators if VERSION >= (3, 1): from django.db.models.fields.json import ( KeyTransform, KeyTransformIn, KeyTransformExact, HasKeyLookup, compile_json_path) if VERSION >= (3, 2): from django.db.models.functions.math import Random DJANGO3 = VERSION[0] >= 3 class TryCast(Cast): function = 'TRY_CAST' def sqlserver_atan2(self, compiler, connection, **extra_context): return self.as_sql(compiler, connection, function='ATN2', **extra_context) def sqlserver_log(self, compiler, connection, **extra_context): clone = self.copy() clone.set_source_expressions(self.get_source_expressions()[::-1]) return clone.as_sql(compiler, connection, **extra_context) def sqlserver_ln(self, compiler, connection, **extra_context): return self.as_sql(compiler, connection, function='LOG', **extra_context) def sqlserver_mod(self, compiler, connection): # MSSQL doesn't have keyword MOD expr = self.get_source_expressions() number_a = compiler.compile(expr[0]) number_b = compiler.compile(expr[1]) return self.as_sql( compiler, connection, function="", template='(ABS({a}) - FLOOR(ABS({a}) / ABS({b})) * ABS({b})) * SIGN({a}) * SIGN({b})'.format( a=number_a[0], b=number_b[0]), arg_joiner="" ) def sqlserver_nth_value(self, compiler, connection, **extra_content): raise NotSupportedError('This backend does not support the NthValue function') def sqlserver_round(self, compiler, connection, **extra_context): return self.as_sql(compiler, connection, template='%(function)s(%(expressions)s, 0)', **extra_context) def sqlserver_random(self, compiler, connection, **extra_context): return self.as_sql(compiler, connection, function='RAND', **extra_context) def sqlserver_window(self, compiler, connection, template=None): # MSSQL window functions require an OVER clause with ORDER BY if self.order_by is None: self.order_by = Value('SELECT NULL') return self.as_sql(compiler, connection, template) def sqlserver_exists(self, compiler, connection, template=None, **extra_context): # MS SQL doesn't allow EXISTS() in the SELECT list, so wrap it with a # CASE WHEN expression. Change the template since the When expression # requires a left hand side (column) to compare against. sql, params = self.as_sql(compiler, connection, template, **extra_context) sql = 'CASE WHEN {} THEN 1 ELSE 0 END'.format(sql) return sql, params def sqlserver_lookup(self, compiler, connection): # MSSQL doesn't allow EXISTS() to be compared to another expression # unless it's wrapped in a CASE WHEN. wrapped = False exprs = [] for expr in (self.lhs, self.rhs): if isinstance(expr, Exists): expr = Case(When(expr, then=True), default=False, output_field=BooleanField()) wrapped = True exprs.append(expr) lookup = type(self)(*exprs) if wrapped else self return lookup.as_sql(compiler, connection) def sqlserver_orderby(self, compiler, connection): template = None if self.nulls_last: template = 'CASE WHEN %(expression)s IS NULL THEN 1 ELSE 0 END, %(expression)s %(ordering)s' if self.nulls_first: template = 'CASE WHEN %(expression)s IS NULL THEN 0 ELSE 1 END, %(expression)s %(ordering)s' copy = self.copy() # Prevent OrderBy.as_sql() from modifying supplied templates copy.nulls_first = False copy.nulls_last = False # MSSQL doesn't allow ORDER BY EXISTS() unless it's wrapped in a CASE WHEN. if isinstance(self.expression, Exists): copy.expression = Case( When(self.expression, then=True), default=False, output_field=BooleanField(), ) return copy.as_sql(compiler, connection, template=template) def split_parameter_list_as_sql(self, compiler, connection): # Insert In clause parameters 1000 at a time into a temp table. lhs, _ = self.process_lhs(compiler, connection) _, rhs_params = self.batch_process_rhs(compiler, connection) with connection.cursor() as cursor: cursor.execute("IF OBJECT_ID('tempdb.dbo.#Temp_params', 'U') IS NOT NULL DROP TABLE #Temp_params; ") parameter_data_type = self.lhs.field.db_type(connection) Temp_table_collation = 'COLLATE DATABASE_DEFAULT' if 'char' in parameter_data_type else '' cursor.execute(f"CREATE TABLE #Temp_params (params {parameter_data_type} {Temp_table_collation})") for offset in range(0, len(rhs_params), 1000): sqls_params = rhs_params[offset: offset + 1000] sqls_params = ", ".join("('{}')".format(item) for item in sqls_params) cursor.execute("INSERT INTO #Temp_params VALUES %s" % sqls_params) in_clause = lhs + ' IN ' + '(SELECT params from #Temp_params)' return in_clause, () def unquote_json_rhs(rhs_params): for value in rhs_params: value = json.loads(value) if not isinstance(value, (list, dict)): rhs_params = [param.replace('"', '') for param in rhs_params] return rhs_params def json_KeyTransformExact_process_rhs(self, compiler, connection): if isinstance(self.rhs, KeyTransform): return super(lookups.Exact, self).process_rhs(compiler, connection) rhs, rhs_params = super(KeyTransformExact, self).process_rhs(compiler, connection) return rhs, unquote_json_rhs(rhs_params) def json_KeyTransformIn(self, compiler, connection): lhs, _ = super(KeyTransformIn, self).process_lhs(compiler, connection) rhs, rhs_params = super(KeyTransformIn, self).process_rhs(compiler, connection) return (lhs + ' IN ' + rhs, unquote_json_rhs(rhs_params)) def json_HasKeyLookup(self, compiler, connection): # Process JSON path from the left-hand side. if isinstance(self.lhs, KeyTransform): lhs, _, lhs_key_transforms = self.lhs.preprocess_lhs(compiler, connection) lhs_json_path = compile_json_path(lhs_key_transforms) else: lhs, _ = self.process_lhs(compiler, connection) lhs_json_path = '$' sql = lhs + ' IN (SELECT ' + lhs + ' FROM ' + self.lhs.output_field.model._meta.db_table + \ ' CROSS APPLY OPENJSON(' + lhs + ') WITH ( [json_path_value] char(1) \'%s\') WHERE [json_path_value] IS NOT NULL)' # Process JSON path from the right-hand side. rhs = self.rhs rhs_params = [] if not isinstance(rhs, (list, tuple)): rhs = [rhs] for key in rhs: if isinstance(key, KeyTransform): *_, rhs_key_transforms = key.preprocess_lhs(compiler, connection) else: rhs_key_transforms = [key] rhs_params.append('%s%s' % ( lhs_json_path, compile_json_path(rhs_key_transforms, include_root=False), )) # Add condition for each key. if self.logical_operator: sql = '(%s)' % self.logical_operator.join([sql] * len(rhs_params)) return sql % tuple(rhs_params), [] def BinaryField_init(self, *args, **kwargs): # Add max_length option for BinaryField, default to max kwargs.setdefault('editable', False) Field.__init__(self, *args, **kwargs) if self.max_length is not None: self.validators.append(validators.MaxLengthValidator(self.max_length)) else: self.max_length = 'max' def _get_check_sql(self, model, schema_editor): if VERSION >= (3, 1): query = Query(model=model, alias_cols=False) else: query = Query(model=model) where = query.build_where(self.check) compiler = query.get_compiler(connection=schema_editor.connection) sql, params = where.as_sql(compiler, schema_editor.connection) try: for p in params: str(p).encode('ascii') except UnicodeEncodeError: sql = sql.replace('%s', 'N%s') return sql % tuple(schema_editor.quote_value(p) for p in params) def bulk_update_with_default(self, objs, fields, batch_size=None, default=0): """ Update the given fields in each of the given objects in the database. When bulk_update all fields to null, SQL Server require that at least one of the result expressions in a CASE specification must be an expression other than the NULL constant. Patched with a default value 0. The user can also pass a custom default value for CASE statement. """ if batch_size is not None and batch_size < 0: raise ValueError('Batch size must be a positive integer.') if not fields: raise ValueError('Field names must be given to bulk_update().') objs = tuple(objs) if any(obj.pk is None for obj in objs): raise ValueError('All bulk_update() objects must have a primary key set.') fields = [self.model._meta.get_field(name) for name in fields] if any(not f.concrete or f.many_to_many for f in fields): raise ValueError('bulk_update() can only be used with concrete fields.') if any(f.primary_key for f in fields): raise ValueError('bulk_update() cannot be used with primary key fields.') if not objs: return 0 # PK is used twice in the resulting update query, once in the filter # and once in the WHEN. Each field will also have one CAST. max_batch_size = connections[self.db].ops.bulk_batch_size(['pk', 'pk'] + fields, objs) batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size requires_casting = connections[self.db].features.requires_casted_case_in_updates batches = (objs[i:i + batch_size] for i in range(0, len(objs), batch_size)) updates = [] for batch_objs in batches: update_kwargs = {} for field in fields: value_none_counter = 0 when_statements = [] for obj in batch_objs: attr = getattr(obj, field.attname) if not isinstance(attr, Expression): if attr is None: value_none_counter+=1 attr = Value(attr, output_field=field) when_statements.append(When(pk=obj.pk, then=attr)) if(value_none_counter == len(when_statements)): case_statement = Case(*when_statements, output_field=field, default=Value(default)) else: case_statement = Case(*when_statements, output_field=field) if requires_casting: case_statement = Cast(case_statement, output_field=field) update_kwargs[field.attname] = case_statement updates.append(([obj.pk for obj in batch_objs], update_kwargs)) rows_updated = 0 with transaction.atomic(using=self.db, savepoint=False): for pks, update_kwargs in updates: rows_updated += self.filter(pk__in=pks).update(**update_kwargs) return rows_updated ATan2.as_microsoft = sqlserver_atan2 In.split_parameter_list_as_sql = split_parameter_list_as_sql if VERSION >= (3, 1): KeyTransformIn.as_microsoft = json_KeyTransformIn KeyTransformExact.process_rhs = json_KeyTransformExact_process_rhs HasKeyLookup.as_microsoft = json_HasKeyLookup Ln.as_microsoft = sqlserver_ln Log.as_microsoft = sqlserver_log Mod.as_microsoft = sqlserver_mod NthValue.as_microsoft = sqlserver_nth_value Round.as_microsoft = sqlserver_round Window.as_microsoft = sqlserver_window BinaryField.__init__ = BinaryField_init CheckConstraint._get_check_sql = _get_check_sql if VERSION >= (3, 2): Random.as_microsoft = sqlserver_random if DJANGO3: Lookup.as_microsoft = sqlserver_lookup else: Exists.as_microsoft = sqlserver_exists OrderBy.as_microsoft = sqlserver_orderby QuerySet.bulk_update = bulk_update_with_default mssql-django-1.1.2/mssql/introspection.py000066400000000000000000000365551417607077400205560ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import pyodbc as Database from django import VERSION from django.db.backends.base.introspection import ( BaseDatabaseIntrospection, FieldInfo, TableInfo, ) from django.db.models.indexes import Index from django.conf import settings SQL_AUTOFIELD = -777555 SQL_BIGAUTOFIELD = -777444 def get_schema_name(): return getattr(settings, 'SCHEMA_TO_INSPECT', 'SCHEMA_NAME()') class DatabaseIntrospection(BaseDatabaseIntrospection): # Map type codes to Django Field types. data_types_reverse = { SQL_AUTOFIELD: 'AutoField', SQL_BIGAUTOFIELD: 'BigAutoField', Database.SQL_BIGINT: 'BigIntegerField', # Database.SQL_BINARY: , Database.SQL_BIT: 'BooleanField', Database.SQL_CHAR: 'CharField', Database.SQL_DECIMAL: 'DecimalField', Database.SQL_DOUBLE: 'FloatField', Database.SQL_FLOAT: 'FloatField', Database.SQL_GUID: 'TextField', Database.SQL_INTEGER: 'IntegerField', Database.SQL_LONGVARBINARY: 'BinaryField', # Database.SQL_LONGVARCHAR: , Database.SQL_NUMERIC: 'DecimalField', Database.SQL_REAL: 'FloatField', Database.SQL_SMALLINT: 'SmallIntegerField', Database.SQL_SS_TIME2: 'TimeField', Database.SQL_TINYINT: 'SmallIntegerField', Database.SQL_TYPE_DATE: 'DateField', Database.SQL_TYPE_TIME: 'TimeField', Database.SQL_TYPE_TIMESTAMP: 'DateTimeField', Database.SQL_VARBINARY: 'BinaryField', Database.SQL_VARCHAR: 'TextField', Database.SQL_WCHAR: 'CharField', Database.SQL_WLONGVARCHAR: 'TextField', Database.SQL_WVARCHAR: 'TextField', } ignored_tables = [] def get_field_type(self, data_type, description): field_type = super().get_field_type(data_type, description) # the max nvarchar length is described as 0 or 2**30-1 # (it depends on the driver) size = description.internal_size if field_type == 'CharField': if size == 0 or size >= 2**30 - 1: field_type = "TextField" elif field_type == 'TextField': if size > 0 and size < 2**30 - 1: field_type = 'CharField' return field_type def get_table_list(self, cursor): """ Returns a list of table and view names in the current database. """ sql = f'SELECT TABLE_NAME, TABLE_TYPE FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = {get_schema_name()}' cursor.execute(sql) types = {'BASE TABLE': 't', 'VIEW': 'v'} return [TableInfo(row[0], types.get(row[1])) for row in cursor.fetchall() if row[0] not in self.ignored_tables] def _is_auto_field(self, cursor, table_name, column_name): """ Checks whether column is Identity """ # COLUMNPROPERTY: http://msdn2.microsoft.com/en-us/library/ms174968.aspx # from django.db import connection # cursor.execute("SELECT COLUMNPROPERTY(OBJECT_ID(%s), %s, 'IsIdentity')", # (connection.ops.quote_name(table_name), column_name)) cursor.execute("SELECT COLUMNPROPERTY(OBJECT_ID(%s), %s, 'IsIdentity')", (self.connection.ops.quote_name(table_name), column_name)) return cursor.fetchall()[0][0] def get_table_description(self, cursor, table_name, identity_check=True): """Returns a description of the table, with DB-API cursor.description interface. The 'auto_check' parameter has been added to the function argspec. If set to True, the function will check each of the table's fields for the IDENTITY property (the IDENTITY property is the MSSQL equivalent to an AutoField). When an integer field is found with an IDENTITY property, it is given a custom field number of SQL_AUTOFIELD, which maps to the 'AutoField' value in the DATA_TYPES_REVERSE dict. When a bigint field is found with an IDENTITY property, it is given a custom field number of SQL_BIGAUTOFIELD, which maps to the 'BigAutoField' value in the DATA_TYPES_REVERSE dict. """ # map pyodbc's cursor.columns to db-api cursor description columns = [[c[3], c[4], None, c[6], c[6], c[8], c[10], c[12]] for c in cursor.columns(table=table_name)] items = [] for column in columns: if VERSION >= (3, 2): if self.connection.sql_server_version >= 2019: sql = """SELECT collation_name FROM sys.columns c inner join sys.tables t on c.object_id = t.object_id WHERE t.name = '%s' and c.name = '%s' """ % (table_name, column[0]) cursor.execute(sql) collation_name = cursor.fetchone() column.append(collation_name[0] if collation_name else '') else: column.append('') if identity_check and self._is_auto_field(cursor, table_name, column[0]): if column[1] == Database.SQL_BIGINT: column[1] = SQL_BIGAUTOFIELD else: column[1] = SQL_AUTOFIELD if column[1] == Database.SQL_WVARCHAR and column[3] < 4000: column[1] = Database.SQL_WCHAR items.append(FieldInfo(*column)) return items def get_sequences(self, cursor, table_name, table_fields=()): cursor.execute(f""" SELECT c.name FROM sys.columns c INNER JOIN sys.tables t ON c.object_id = t.object_id WHERE t.schema_id = SCHEMA_ID({get_schema_name()}) AND t.name = %s AND c.is_identity = 1""", [table_name]) # SQL Server allows only one identity column per table # https://docs.microsoft.com/en-us/sql/t-sql/statements/create-table-transact-sql-identity-property row = cursor.fetchone() return [{'table': table_name, 'column': row[0]}] if row else [] def get_relations(self, cursor, table_name): """ Returns a dictionary of {field_name: (field_name_other_table, other_table)} representing all relationships to the given table. """ # CONSTRAINT_COLUMN_USAGE: http://msdn2.microsoft.com/en-us/library/ms174431.aspx # CONSTRAINT_TABLE_USAGE: http://msdn2.microsoft.com/en-us/library/ms179883.aspx # REFERENTIAL_CONSTRAINTS: http://msdn2.microsoft.com/en-us/library/ms179987.aspx # TABLE_CONSTRAINTS: http://msdn2.microsoft.com/en-us/library/ms181757.aspx sql = f""" SELECT e.COLUMN_NAME AS column_name, c.TABLE_NAME AS referenced_table_name, d.COLUMN_NAME AS referenced_column_name FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS a INNER JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS b ON a.CONSTRAINT_NAME = b.CONSTRAINT_NAME AND a.TABLE_SCHEMA = b.CONSTRAINT_SCHEMA INNER JOIN INFORMATION_SCHEMA.CONSTRAINT_TABLE_USAGE AS c ON b.UNIQUE_CONSTRAINT_NAME = c.CONSTRAINT_NAME AND b.CONSTRAINT_SCHEMA = c.CONSTRAINT_SCHEMA INNER JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE AS d ON c.CONSTRAINT_NAME = d.CONSTRAINT_NAME AND c.CONSTRAINT_SCHEMA = d.CONSTRAINT_SCHEMA INNER JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE AS e ON a.CONSTRAINT_NAME = e.CONSTRAINT_NAME AND a.TABLE_SCHEMA = e.TABLE_SCHEMA WHERE a.TABLE_SCHEMA = {get_schema_name()} AND a.TABLE_NAME = %s AND a.CONSTRAINT_TYPE = 'FOREIGN KEY'""" cursor.execute(sql, (table_name,)) return dict([[item[0], (item[2], item[1])] for item in cursor.fetchall()]) def get_key_columns(self, cursor, table_name): """ Returns a list of (column_name, referenced_table_name, referenced_column_name) for all key columns in given table. """ key_columns = [] cursor.execute(f""" SELECT c.name AS column_name, rt.name AS referenced_table_name, rc.name AS referenced_column_name FROM sys.foreign_key_columns fk INNER JOIN sys.tables t ON t.object_id = fk.parent_object_id INNER JOIN sys.columns c ON c.object_id = t.object_id AND c.column_id = fk.parent_column_id INNER JOIN sys.tables rt ON rt.object_id = fk.referenced_object_id INNER JOIN sys.columns rc ON rc.object_id = rt.object_id AND rc.column_id = fk.referenced_column_id WHERE t.schema_id = SCHEMA_ID({get_schema_name()}) AND t.name = %s""", [table_name]) key_columns.extend([tuple(row) for row in cursor.fetchall()]) return key_columns def get_constraints(self, cursor, table_name): """ Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns. Returns a dict mapping constraint names to their attributes, where attributes is a dict with keys: * columns: List of columns this covers * primary_key: True if primary key, False otherwise * unique: True if this is a unique constraint, False otherwise * foreign_key: (table, column) of target, or None * check: True if check constraint, False otherwise * index: True if index, False otherwise. * orders: The order (ASC/DESC) defined for the columns of indexes * type: The type of the index (btree, hash, etc.) """ constraints = {} # Loop over the key table, collecting things as constraints # This will get PKs, FKs, and uniques, but not CHECK cursor.execute(f""" SELECT kc.constraint_name, kc.column_name, tc.constraint_type, fk.referenced_table_name, fk.referenced_column_name FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS kc INNER JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS tc ON kc.table_schema = tc.table_schema AND kc.table_name = tc.table_name AND kc.constraint_name = tc.constraint_name LEFT OUTER JOIN ( SELECT ps.name AS table_schema, pt.name AS table_name, pc.name AS column_name, rt.name AS referenced_table_name, rc.name AS referenced_column_name FROM sys.foreign_key_columns fkc INNER JOIN sys.tables pt ON fkc.parent_object_id = pt.object_id INNER JOIN sys.schemas ps ON pt.schema_id = ps.schema_id INNER JOIN sys.columns pc ON fkc.parent_object_id = pc.object_id AND fkc.parent_column_id = pc.column_id INNER JOIN sys.tables rt ON fkc.referenced_object_id = rt.object_id INNER JOIN sys.schemas rs ON rt.schema_id = rs.schema_id INNER JOIN sys.columns rc ON fkc.referenced_object_id = rc.object_id AND fkc.referenced_column_id = rc.column_id ) fk ON kc.table_schema = fk.table_schema AND kc.table_name = fk.table_name AND kc.column_name = fk.column_name WHERE kc.table_schema = {get_schema_name()} AND kc.table_name = %s ORDER BY kc.constraint_name ASC, kc.ordinal_position ASC """, [table_name]) for constraint, column, kind, ref_table, ref_column in cursor.fetchall(): # If we're the first column, make the record if constraint not in constraints: constraints[constraint] = { "columns": [], "primary_key": kind.lower() == "primary key", "unique": kind.lower() in ["primary key", "unique"], "foreign_key": (ref_table, ref_column) if kind.lower() == "foreign key" else None, "check": False, "index": False, } # Record the details constraints[constraint]['columns'].append(column) # Now get CHECK constraint columns cursor.execute(f""" SELECT kc.constraint_name, kc.column_name FROM INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE AS kc JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS c ON kc.table_schema = c.table_schema AND kc.table_name = c.table_name AND kc.constraint_name = c.constraint_name WHERE c.constraint_type = 'CHECK' AND kc.table_schema = {get_schema_name()} AND kc.table_name = %s """, [table_name]) for constraint, column in cursor.fetchall(): # If we're the first column, make the record if constraint not in constraints: constraints[constraint] = { "columns": [], "primary_key": False, "unique": False, "foreign_key": None, "check": True, "index": False, } # Record the details constraints[constraint]['columns'].append(column) # Now get indexes cursor.execute(f""" SELECT i.name AS index_name, i.is_unique, i.is_primary_key, i.type, i.type_desc, ic.is_descending_key, c.name AS column_name FROM sys.tables AS t INNER JOIN sys.schemas AS s ON t.schema_id = s.schema_id INNER JOIN sys.indexes AS i ON t.object_id = i.object_id INNER JOIN sys.index_columns AS ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id INNER JOIN sys.columns AS c ON ic.object_id = c.object_id AND ic.column_id = c.column_id WHERE t.schema_id = SCHEMA_ID({get_schema_name()}) AND t.name = %s ORDER BY i.index_id ASC, ic.index_column_id ASC """, [table_name]) indexes = {} for index, unique, primary, type_, desc, order, column in cursor.fetchall(): if index not in indexes: indexes[index] = { "columns": [], "primary_key": primary, "unique": unique, "foreign_key": None, "check": False, "index": True, "orders": [], "type": Index.suffix if type_ in (1, 2) else desc.lower(), } indexes[index]["columns"].append(column) indexes[index]["orders"].append("DESC" if order == 1 else "ASC") for index, constraint in indexes.items(): if index not in constraints: constraints[index] = constraint return constraints def get_primary_key_column(self, cursor, table_name): cursor.execute("SELECT 1 FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = N'%s'" % table_name) row = cursor.fetchone() if row is None: raise ValueError("Table %s does not exist" % table_name) return super().get_primary_key_column(cursor, table_name) mssql-django-1.1.2/mssql/management/000077500000000000000000000000001417607077400174025ustar00rootroot00000000000000mssql-django-1.1.2/mssql/management/__init__.py000066400000000000000000000000001417607077400215010ustar00rootroot00000000000000mssql-django-1.1.2/mssql/management/commands/000077500000000000000000000000001417607077400212035ustar00rootroot00000000000000mssql-django-1.1.2/mssql/management/commands/__init__.py000066400000000000000000000000001417607077400233020ustar00rootroot00000000000000mssql-django-1.1.2/mssql/management/commands/inspectdb.py000066400000000000000000000012201417607077400235230ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. from django.core.management.commands.inspectdb import Command as inspectdb_Command from django.conf import settings class Command(inspectdb_Command): def add_arguments(self, parser): super().add_arguments(parser) parser.add_argument( '--schema', default='dbo', help='Choose the database schema to inspect, default is [dbo]', ) def handle(self, *args, **options): if options["schema"]: settings.SCHEMA_TO_INSPECT = "'" + options["schema"] + "'" return super().handle(*args, **options) mssql-django-1.1.2/mssql/management/commands/install_regex_clr.py000066400000000000000000000015221417607077400252550ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. # Add regex support in SQLServer # Code taken from django-mssql (see https://bitbucket.org/Manfre/django-mssql) from django.core.management.base import BaseCommand from django.db import connection class Command(BaseCommand): help = "Installs the regex_clr.dll assembly with the database" requires_model_validation = False args = 'database_name' def add_arguments(self, parser): parser.add_argument('database_name') def handle(self, *args, **options): database_name = options['database_name'] if not database_name: self.print_help('manage.py', 'install_regex_clr') return connection.creation.install_regex_clr(database_name) print('Installed regex_clr to database %s' % database_name) mssql-django-1.1.2/mssql/operations.py000066400000000000000000000515561417607077400200370ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import datetime import uuid import warnings from django.conf import settings from django.db.backends.base.operations import BaseDatabaseOperations from django.db.models.expressions import Exists, ExpressionWrapper, RawSQL from django.db.models.sql.where import WhereNode from django.utils import timezone from django.utils.encoding import force_str from django import VERSION as django_version import pytz class DatabaseOperations(BaseDatabaseOperations): compiler_module = 'mssql.compiler' cast_char_field_without_max_length = 'nvarchar(max)' def max_in_list_size(self): # The driver might add a few parameters # chose a reasonable number less than 2100 limit return 2048 def _convert_field_to_tz(self, field_name, tzname): if settings.USE_TZ and not tzname == 'UTC': offset = self._get_utcoffset(tzname) field_name = 'DATEADD(second, %d, %s)' % (offset, field_name) return field_name def _get_utcoffset(self, tzname): """ Returns UTC offset for given time zone in seconds """ # SQL Server has no built-in support for tz database, see: # http://blogs.msdn.com/b/sqlprogrammability/archive/2008/03/18/using-time-zone-data-in-sql-server-2008.aspx zone = pytz.timezone(tzname) # no way to take DST into account at this point now = datetime.datetime.now() delta = zone.localize(now, is_dst=False).utcoffset() return delta.days * 86400 + delta.seconds - zone.dst(now).seconds def bulk_batch_size(self, fields, objs): """ Returns the maximum allowed batch size for the backend. The fields are the fields going to be inserted in the batch, the objs contains all the objects to be inserted. """ max_insert_rows = 1000 fields_len = len(fields) if fields_len == 0: # Required for empty model # (bulk_create.tests.BulkCreateTests.test_empty_model) return max_insert_rows # MSSQL allows a query to have 2100 parameters but some parameters are # taken up defining `NVARCHAR` parameters to store the query text and # query parameters for the `sp_executesql` call. This should only take # up 2 parameters but I've had this error when sending 2098 parameters. max_query_params = 2050 # inserts are capped at 1000 rows regardless of number of query params. # bulk_update CASE...WHEN...THEN statement sometimes takes 2 parameters per field return min(max_insert_rows, max_query_params // fields_len // 2) def bulk_insert_sql(self, fields, placeholder_rows): placeholder_rows_sql = (", ".join(row) for row in placeholder_rows) values_sql = ", ".join("(%s)" % sql for sql in placeholder_rows_sql) return "VALUES " + values_sql def cache_key_culling_sql(self): """ Returns a SQL query that retrieves the first cache key greater than the smallest. This is used by the 'db' cache backend to determine where to start culling. """ return "SELECT cache_key FROM (SELECT cache_key, " \ "ROW_NUMBER() OVER (ORDER BY cache_key) AS rn FROM %s" \ ") cache WHERE rn = %%s + 1" def combine_duration_expression(self, connector, sub_expressions): lhs, rhs = sub_expressions sign = ' * -1' if connector == '-' else '' if lhs.startswith('DATEADD'): col, sql = rhs, lhs else: col, sql = lhs, rhs params = [sign for _ in range(sql.count('DATEADD'))] params.append(col) return sql % tuple(params) def combine_expression(self, connector, sub_expressions): """ SQL Server requires special cases for some operators in query expressions """ if connector == '^': return 'POWER(%s)' % ','.join(sub_expressions) elif connector == '<<': return '%s * (2 * %s)' % tuple(sub_expressions) elif connector == '>>': return '%s / (2 * %s)' % tuple(sub_expressions) return super().combine_expression(connector, sub_expressions) def convert_datetimefield_value(self, value, expression, connection): if value is not None: if settings.USE_TZ: value = timezone.make_aware(value, self.connection.timezone) return value def convert_floatfield_value(self, value, expression, connection): if value is not None: value = float(value) return value def convert_uuidfield_value(self, value, expression, connection): if value is not None: value = uuid.UUID(value) return value def convert_booleanfield_value(self, value, expression, connection): return bool(value) if value in (0, 1) else value def date_extract_sql(self, lookup_type, field_name): if lookup_type == 'week_day': return "DATEPART(weekday, %s)" % field_name elif lookup_type == 'week': return "DATEPART(iso_week, %s)" % field_name elif lookup_type == 'iso_year': return "YEAR(DATEADD(day, 26 - DATEPART(isoww, %s), %s))" % (field_name, field_name) else: return "DATEPART(%s, %s)" % (lookup_type, field_name) def date_interval_sql(self, timedelta): """ implements the interval functionality for expressions """ sec = timedelta.seconds + timedelta.days * 86400 sql = 'DATEADD(second, %d%%s, CAST(%%s AS datetime2))' % sec if timedelta.microseconds: sql = 'DATEADD(microsecond, %d%%s, CAST(%s AS datetime2))' % (timedelta.microseconds, sql) return sql def date_trunc_sql(self, lookup_type, field_name, tzname=''): CONVERT_YEAR = 'CONVERT(varchar, DATEPART(year, %s))' % field_name CONVERT_QUARTER = 'CONVERT(varchar, 1+((DATEPART(quarter, %s)-1)*3))' % field_name CONVERT_MONTH = 'CONVERT(varchar, DATEPART(month, %s))' % field_name CONVERT_WEEK = "DATEADD(DAY, (DATEPART(weekday, %s) + 5) %%%% 7 * -1, %s)" % (field_name, field_name) if lookup_type == 'year': return "CONVERT(datetime2, %s + '/01/01')" % CONVERT_YEAR if lookup_type == 'quarter': return "CONVERT(datetime2, %s + '/' + %s + '/01')" % (CONVERT_YEAR, CONVERT_QUARTER) if lookup_type == 'month': return "CONVERT(datetime2, %s + '/' + %s + '/01')" % (CONVERT_YEAR, CONVERT_MONTH) if lookup_type == 'week': return "CONVERT(datetime2, CONVERT(varchar, %s, 112))" % CONVERT_WEEK if lookup_type == 'day': return "CONVERT(datetime2, CONVERT(varchar(12), %s, 112))" % field_name def datetime_cast_date_sql(self, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) sql = 'CAST(%s AS date)' % field_name return sql def datetime_cast_time_sql(self, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) sql = 'CAST(%s AS time)' % field_name return sql def datetime_extract_sql(self, lookup_type, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) return self.date_extract_sql(lookup_type, field_name) def datetime_trunc_sql(self, lookup_type, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) sql = '' if lookup_type in ('year', 'quarter', 'month', 'week', 'day'): sql = self.date_trunc_sql(lookup_type, field_name) elif lookup_type == 'hour': sql = "CONVERT(datetime2, SUBSTRING(CONVERT(varchar, %s, 20), 0, 14) + ':00:00')" % field_name elif lookup_type == 'minute': sql = "CONVERT(datetime2, SUBSTRING(CONVERT(varchar, %s, 20), 0, 17) + ':00')" % field_name elif lookup_type == 'second': sql = "CONVERT(datetime2, CONVERT(varchar, %s, 20))" % field_name return sql def for_update_sql(self, nowait=False, skip_locked=False, of=()): if skip_locked: return 'WITH (ROWLOCK, UPDLOCK, READPAST)' elif nowait: return 'WITH (NOWAIT, ROWLOCK, UPDLOCK)' else: return 'WITH (ROWLOCK, UPDLOCK)' def format_for_duration_arithmetic(self, sql): if sql == '%s': # use DATEADD only once because Django prepares only one parameter for this fmt = 'DATEADD(second, %s / 1000000%%s, CAST(%%s AS datetime2))' sql = '%%s' else: # use DATEADD twice to avoid arithmetic overflow for number part MICROSECOND = "DATEADD(microsecond, %s %%%%%%%% 1000000%%s, CAST(%%s AS datetime2))" fmt = 'DATEADD(second, %s / 1000000%%s, {})'.format(MICROSECOND) sql = (sql, sql) return fmt % sql def fulltext_search_sql(self, field_name): """ Returns the SQL WHERE clause to use in order to perform a full-text search of the given field_name. Note that the resulting string should contain a '%s' placeholder for the value being searched against. """ return 'CONTAINS(%s, %%s)' % field_name def get_db_converters(self, expression): converters = super().get_db_converters(expression) internal_type = expression.output_field.get_internal_type() if internal_type == 'DateTimeField': converters.append(self.convert_datetimefield_value) elif internal_type == 'FloatField': converters.append(self.convert_floatfield_value) elif internal_type == 'UUIDField': converters.append(self.convert_uuidfield_value) elif internal_type in ('BooleanField', 'NullBooleanField'): converters.append(self.convert_booleanfield_value) return converters def last_insert_id(self, cursor, table_name, pk_name): """ Given a cursor object that has just performed an INSERT statement into a table that has an auto-incrementing ID, returns the newly created ID. This method also receives the table name and the name of the primary-key column. """ # TODO: Check how the `last_insert_id` is being used in the upper layers # in context of multithreaded access, compare with other backends # IDENT_CURRENT: http://msdn2.microsoft.com/en-us/library/ms175098.aspx # SCOPE_IDENTITY: http://msdn2.microsoft.com/en-us/library/ms190315.aspx # @@IDENTITY: http://msdn2.microsoft.com/en-us/library/ms187342.aspx # IDENT_CURRENT is not limited by scope and session; it is limited to # a specified table. IDENT_CURRENT returns the value generated for # a specific table in any session and any scope. # SCOPE_IDENTITY and @@IDENTITY return the last identity values that # are generated in any table in the current session. However, # SCOPE_IDENTITY returns values inserted only within the current scope; # @@IDENTITY is not limited to a specific scope. table_name = self.quote_name(table_name) cursor.execute("SELECT CAST(IDENT_CURRENT(%s) AS int)", [table_name]) return cursor.fetchone()[0] def lookup_cast(self, lookup_type, internal_type=None): if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith'): return "UPPER(%s)" return "%s" def max_name_length(self): return 128 def no_limit_value(self): return None def prepare_sql_script(self, sql, _allow_fallback=False): return [sql] def quote_name(self, name): """ Returns a quoted version of the given table, index or column name. Does not quote the given name if it's already been quoted. """ if name.startswith('[') and name.endswith(']'): return name # Quoting once is enough. return '[%s]' % name def random_function_sql(self): """ Returns a SQL expression that returns a random value. """ return "RAND()" def regex_lookup(self, lookup_type): """ Returns the string to use in a query when performing regular expression lookups (using "regex" or "iregex"). The resulting string should contain a '%s' placeholder for the column being searched against. If the feature is not supported (or part of it is not supported), a NotImplementedError exception can be raised. """ match_option = {'iregex': 0, 'regex': 1}[lookup_type] return "dbo.REGEXP_LIKE(%%s, %%s, %s)=1" % (match_option,) def limit_offset_sql(self, low_mark, high_mark): """Return LIMIT/OFFSET SQL clause.""" limit, offset = self._get_limit_offset_params(low_mark, high_mark) return '%s%s' % ( (' OFFSET %d ROWS' % offset) if offset else '', (' FETCH FIRST %d ROWS ONLY' % limit) if limit else '', ) def last_executed_query(self, cursor, sql, params): """ Returns a string of the query last executed by the given cursor, with placeholders replaced with actual values. `sql` is the raw query containing placeholders, and `params` is the sequence of parameters. These are used by default, but this method exists for database backends to provide a better implementation according to their own quoting schemes. """ return super().last_executed_query(cursor, cursor.last_sql, cursor.last_params) def savepoint_create_sql(self, sid): """ Returns the SQL for starting a new savepoint. Only required if the "uses_savepoints" feature is True. The "sid" parameter is a string for the savepoint id. """ return "SAVE TRANSACTION %s" % sid def savepoint_rollback_sql(self, sid): """ Returns the SQL for rolling back the given savepoint. """ return "ROLLBACK TRANSACTION %s" % sid def _build_sequences(self, sequences, cursor): seqs = [] for seq in sequences: cursor.execute("SELECT COUNT(*) FROM %s" % self.quote_name(seq["table"])) rowcnt = cursor.fetchone()[0] elem = {} if rowcnt: elem['start_id'] = 0 else: elem['start_id'] = 1 elem.update(seq) seqs.append(elem) return seqs def _sql_flush_new(self, style, tables, *, reset_sequences=False, allow_cascade=False): if reset_sequences: return [ sequence for sequence in self.connection.introspection.sequence_list() ] return [] def _sql_flush_old(self, style, tables, sequences, allow_cascade=False): return sequences def sql_flush(self, style, tables, *args, **kwargs): """ Returns a list of SQL statements required to remove all data from the given database tables (without actually removing the tables themselves). The returned value also includes SQL statements required to reset DB sequences passed in :param sequences:. The `style` argument is a Style object as returned by either color_style() or no_style() in django.core.management.color. The `allow_cascade` argument determines whether truncation may cascade to tables with foreign keys pointing the tables being truncated. """ if not tables: return [] if django_version >= (3, 1): sequences = self._sql_flush_new(style, tables, *args, **kwargs) else: sequences = self._sql_flush_old(style, tables, *args, **kwargs) from django.db import connections cursor = connections[self.connection.alias].cursor() seqs = self._build_sequences(sequences, cursor) COLUMNS = "TABLE_NAME, CONSTRAINT_NAME" WHERE = "CONSTRAINT_TYPE not in ('PRIMARY KEY','UNIQUE')" cursor.execute( "SELECT {} FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS WHERE {}".format(COLUMNS, WHERE)) fks = cursor.fetchall() sql_list = ['ALTER TABLE %s NOCHECK CONSTRAINT %s;' % (self.quote_name(fk[0]), self.quote_name(fk[1])) for fk in fks] sql_list.extend(['%s %s %s;' % (style.SQL_KEYWORD('DELETE'), style.SQL_KEYWORD('FROM'), style.SQL_FIELD(self.quote_name(table))) for table in tables]) if self.connection.to_azure_sql_db and self.connection.sql_server_version < 2014: warnings.warn("Resetting identity columns is not supported " "on this versios of Azure SQL Database.", RuntimeWarning) else: # Then reset the counters on each table. sql_list.extend(['%s %s (%s, %s, %s) %s %s;' % ( style.SQL_KEYWORD('DBCC'), style.SQL_KEYWORD('CHECKIDENT'), style.SQL_FIELD(self.quote_name(seq["table"])), style.SQL_KEYWORD('RESEED'), style.SQL_FIELD('%d' % seq['start_id']), style.SQL_KEYWORD('WITH'), style.SQL_KEYWORD('NO_INFOMSGS'), ) for seq in seqs]) sql_list.extend(['ALTER TABLE %s CHECK CONSTRAINT %s;' % (self.quote_name(fk[0]), self.quote_name(fk[1])) for fk in fks]) return sql_list def start_transaction_sql(self): """ Returns the SQL statement required to start a transaction. """ return "BEGIN TRANSACTION" def subtract_temporals(self, internal_type, lhs, rhs): lhs_sql, lhs_params = lhs rhs_sql, rhs_params = rhs if internal_type == 'DateField': sql = "CAST(DATEDIFF(day, %(rhs)s, %(lhs)s) AS bigint) * 86400 * 1000000" params = rhs_params + lhs_params else: SECOND = "DATEDIFF(second, %(rhs)s, %(lhs)s)" MICROSECOND = "DATEPART(microsecond, %(lhs)s) - DATEPART(microsecond, %(rhs)s)" sql = "CAST({} AS bigint) * 1000000 + {}".format(SECOND, MICROSECOND) params = rhs_params + lhs_params * 2 + rhs_params return sql % {'lhs': lhs_sql, 'rhs': rhs_sql}, params def tablespace_sql(self, tablespace, inline=False): """ Returns the SQL that will be appended to tables or rows to define a tablespace. Returns '' if the backend doesn't use tablespaces. """ return "ON %s" % self.quote_name(tablespace) def prep_for_like_query(self, x): """Prepares a value for use in a LIKE query.""" # http://msdn2.microsoft.com/en-us/library/ms179859.aspx return force_str(x).replace('\\', '\\\\').replace('[', '[[]').replace('%', '[%]').replace('_', '[_]') def prep_for_iexact_query(self, x): """ Same as prep_for_like_query(), but called for "iexact" matches, which need not necessarily be implemented using "LIKE" in the backend. """ return x def adapt_datetimefield_value(self, value): """ Transforms a datetime value to an object compatible with what is expected by the backend driver for datetime columns. """ if value is None: return None if settings.USE_TZ and timezone.is_aware(value): # pyodbc donesn't support datetimeoffset value = value.astimezone(self.connection.timezone).replace(tzinfo=None) return value def time_trunc_sql(self, lookup_type, field_name, tzname=''): # if self.connection.sql_server_version >= 2012: # fields = { # 'hour': 'DATEPART(hour, %s)' % field_name, # 'minute': 'DATEPART(minute, %s)' % field_name if lookup_type != 'hour' else '0', # 'second': 'DATEPART(second, %s)' % field_name if lookup_type == 'second' else '0', # } # sql = 'TIMEFROMPARTS(%(hour)s, %(minute)s, %(second)s, 0, 0)' % fields if lookup_type == 'hour': sql = "CONVERT(time, SUBSTRING(CONVERT(varchar, %s, 114), 0, 3) + ':00:00')" % field_name elif lookup_type == 'minute': sql = "CONVERT(time, SUBSTRING(CONVERT(varchar, %s, 114), 0, 6) + ':00')" % field_name elif lookup_type == 'second': sql = "CONVERT(time, SUBSTRING(CONVERT(varchar, %s, 114), 0, 9))" % field_name return sql def conditional_expression_supported_in_where_clause(self, expression): """ Following "Moved conditional expression wrapping to the Exact lookup" in django 3.1 https://github.com/django/django/commit/37e6c5b79bd0529a3c85b8c478e4002fd33a2a1d """ if isinstance(expression, (Exists, WhereNode)): return True if isinstance(expression, ExpressionWrapper) and expression.conditional: return self.conditional_expression_supported_in_where_clause(expression.expression) if isinstance(expression, RawSQL) and expression.conditional: return True return False mssql-django-1.1.2/mssql/schema.py000066400000000000000000001612111417607077400171020ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import binascii import datetime from django.db.backends.base.schema import ( BaseDatabaseSchemaEditor, _is_relevant_relation, _related_non_m2m_objects, logger, ) from django.db.backends.ddl_references import ( Columns, IndexName, Statement as DjStatement, Table, ) from django import VERSION as django_version from django.db.models import Index, UniqueConstraint from django.db.models.fields import AutoField, BigAutoField from django.db.models.sql.where import AND from django.db.transaction import TransactionManagementError from django.utils.encoding import force_str if django_version >= (4, 0): from django.db.models.sql import Query from django.db.backends.ddl_references import Expressions class Statement(DjStatement): def __hash__(self): return hash((self.template, str(self.parts['name']))) def __eq__(self, other): return self.template == other.template and str(self.parts['name']) == str(other.parts['name']) def rename_column_references(self, table, old_column, new_column): for part in self.parts.values(): if hasattr(part, 'rename_column_references'): part.rename_column_references(table, old_column, new_column) condition = self.parts['condition'] if condition: self.parts['condition'] = condition.replace(f'[{old_column}]', f'[{new_column}]') class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): _sql_check_constraint = " CONSTRAINT %(name)s CHECK (%(check)s)" _sql_select_default_constraint_name = "SELECT" \ " d.name " \ "FROM sys.default_constraints d " \ "INNER JOIN sys.tables t ON" \ " d.parent_object_id = t.object_id " \ "INNER JOIN sys.columns c ON" \ " d.parent_object_id = c.object_id AND" \ " d.parent_column_id = c.column_id " \ "INNER JOIN sys.schemas s ON" \ " t.schema_id = s.schema_id " \ "WHERE" \ " t.name = %(table)s AND" \ " c.name = %(column)s" sql_alter_column_default = "ADD DEFAULT %(default)s FOR %(column)s" sql_alter_column_no_default = "DROP CONSTRAINT %(column)s" sql_alter_column_not_null = "ALTER COLUMN %(column)s %(type)s NOT NULL" sql_alter_column_null = "ALTER COLUMN %(column)s %(type)s NULL" sql_alter_column_type = "ALTER COLUMN %(column)s %(type)s" sql_create_column = "ALTER TABLE %(table)s ADD %(column)s %(definition)s" sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s" sql_delete_index = "DROP INDEX %(name)s ON %(table)s" sql_delete_table = """ DECLARE @sql_froeign_constraint_name nvarchar(128) DECLARE @sql_drop_constraint nvarchar(300) WHILE EXISTS(SELECT 1 FROM sys.foreign_keys WHERE referenced_object_id = object_id('%(table)s')) BEGIN SELECT TOP 1 @sql_froeign_constraint_name = name FROM sys.foreign_keys WHERE referenced_object_id = object_id('%(table)s') SELECT @sql_drop_constraint = 'ALTER TABLE [' + OBJECT_NAME(parent_object_id) + '] ' + 'DROP CONSTRAINT [' + @sql_froeign_constraint_name + '] ' FROM sys.foreign_keys WHERE referenced_object_id = object_id('%(table)s') and name = @sql_froeign_constraint_name exec sp_executesql @sql_drop_constraint END DROP TABLE %(table)s """ sql_rename_column = "EXEC sp_rename '%(table)s.%(old_column)s', %(new_column)s, 'COLUMN'" sql_rename_table = "EXEC sp_rename %(old_table)s, %(new_table)s" sql_create_unique_null = "CREATE UNIQUE INDEX %(name)s ON %(table)s(%(columns)s) " \ "WHERE %(columns)s IS NOT NULL" def _alter_column_default_sql(self, model, old_field, new_field, drop=False): """ Hook to specialize column default alteration. Return a (sql, params) fragment to add or drop (depending on the drop argument) a default to new_field's column. """ new_default = self.effective_default(new_field) default = '%s' params = [new_default] column = self.quote_name(new_field.column) if drop: params = [] # SQL Server requires the name of the default constraint result = self.execute( self._sql_select_default_constraint_name % { "table": self.quote_value(model._meta.db_table), "column": self.quote_value(new_field.column), }, has_result=True ) if result: for row in result: column = self.quote_name(next(iter(row))) elif self.connection.features.requires_literal_defaults: # Some databases (Oracle) can't take defaults as a parameter # If this is the case, the SchemaEditor for that database should # implement prepare_default(). default = self.prepare_default(new_default) params = [] new_db_params = new_field.db_parameters(connection=self.connection) sql = self.sql_alter_column_no_default if drop else self.sql_alter_column_default return ( sql % { 'column': column, 'type': new_db_params['type'], 'default': default, }, params, ) def _alter_column_null_sql(self, model, old_field, new_field): """ Hook to specialize column null alteration. Return a (sql, params) fragment to set a column to null or non-null as required by new_field, or None if no changes are required. """ if (self.connection.features.interprets_empty_strings_as_nulls and new_field.get_internal_type() in ("CharField", "TextField")): # The field is nullable in the database anyway, leave it alone. return else: new_db_params = new_field.db_parameters(connection=self.connection) sql = self.sql_alter_column_null if new_field.null else self.sql_alter_column_not_null return ( sql % { 'column': self.quote_name(new_field.column), 'type': new_db_params['type'], }, [], ) def _alter_column_type_sql(self, model, old_field, new_field, new_type): new_type = self._set_field_new_type_null_status(old_field, new_type) return super()._alter_column_type_sql(model, old_field, new_field, new_type) def alter_unique_together(self, model, old_unique_together, new_unique_together): """ Deal with a model changing its unique_together. The input unique_togethers must be doubly-nested, not the single-nested ["foo", "bar"] format. """ olds = {tuple(fields) for fields in old_unique_together} news = {tuple(fields) for fields in new_unique_together} # Deleted uniques for fields in olds.difference(news): self._delete_composed_index(model, fields, {'unique': True}, self.sql_delete_index) # Created uniques if django_version >= (4, 0): for field_names in news.difference(olds): fields = [model._meta.get_field(field) for field in field_names] columns = [model._meta.get_field(field).column for field in field_names] condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns]) sql = self._create_unique_sql(model, fields, condition=condition) self.execute(sql) else: for fields in news.difference(olds): columns = [model._meta.get_field(field).column for field in fields] condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns]) sql = self._create_unique_sql(model, columns, condition=condition) self.execute(sql) def _model_indexes_sql(self, model): """ Return a list of all index SQL statements (field indexes, index_together, Meta.indexes) for the specified model. """ if not model._meta.managed or model._meta.proxy or model._meta.swapped: return [] output = [] for field in model._meta.local_fields: output.extend(self._field_indexes_sql(model, field)) for field_names in model._meta.index_together: fields = [model._meta.get_field(field) for field in field_names] output.append(self._create_index_sql(model, fields, suffix="_idx")) if django_version >= (4, 0): for field_names in model._meta.unique_together: fields = [model._meta.get_field(field) for field in field_names] columns = [model._meta.get_field(field).column for field in field_names] condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns]) sql = self._create_unique_sql(model, fields, condition=condition) output.append(sql) else: for field_names in model._meta.unique_together: columns = [model._meta.get_field(field).column for field in field_names] condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns]) sql = self._create_unique_sql(model, columns, condition=condition) output.append(sql) for index in model._meta.indexes: if django_version >= (3, 2) and ( not index.contains_expressions or self.connection.features.supports_expression_indexes ): output.append(index.create_sql(model, self)) else: output.append(index.create_sql(model, self)) return output def _db_table_constraint_names(self, db_table, column_names=None, unique=None, primary_key=None, index=None, foreign_key=None, check=None, type_=None, exclude=None): """Return all constraint names matching the columns and conditions.""" if column_names is not None: column_names = [ self.connection.introspection.identifier_converter(name) for name in column_names ] with self.connection.cursor() as cursor: constraints = self.connection.introspection.get_constraints(cursor, db_table) result = [] for name, infodict in constraints.items(): if column_names is None or column_names == infodict['columns']: if unique is not None and infodict['unique'] != unique: continue if primary_key is not None and infodict['primary_key'] != primary_key: continue if index is not None and infodict['index'] != index: continue if check is not None and infodict['check'] != check: continue if foreign_key is not None and not infodict['foreign_key']: continue if type_ is not None and infodict['type'] != type_: continue if not exclude or name not in exclude: result.append(name) return result def _db_table_delete_constraint_sql(self, template, db_table, name): return Statement( template, table=Table(db_table, self.quote_name), name=self.quote_name(name), include='' ) def _alter_field(self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict=False): """Actually perform a "physical" (non-ManyToMany) field update.""" # the backend doesn't support altering from/to (Big)AutoField # because of the limited capability of SQL Server to edit IDENTITY property for t in (AutoField, BigAutoField): if isinstance(old_field, t) or isinstance(new_field, t): raise NotImplementedError("the backend doesn't support altering from/to %s." % t.__name__) # Drop any FK constraints, we'll remake them later fks_dropped = set() if old_field.remote_field and old_field.db_constraint: # Drop index, SQL Server requires explicit deletion if not hasattr(new_field, 'db_constraint') or not new_field.db_constraint: index_names = self._constraint_names(model, [old_field.column], index=True) for index_name in index_names: self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name)) fk_names = self._constraint_names(model, [old_field.column], foreign_key=True) if strict and len(fk_names) != 1: raise ValueError("Found wrong number (%s) of foreign key constraints for %s.%s" % ( len(fk_names), model._meta.db_table, old_field.column, )) for fk_name in fk_names: fks_dropped.add((old_field.column,)) self.execute(self._delete_constraint_sql(self.sql_delete_fk, model, fk_name)) # Has unique been removed? if old_field.unique and (not new_field.unique or self._field_became_primary_key(old_field, new_field)): # Find the unique constraint for this field constraint_names = self._constraint_names(model, [old_field.column], unique=True, primary_key=False) if strict and len(constraint_names) != 1: raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % ( len(constraint_names), model._meta.db_table, old_field.column, )) for constraint_name in constraint_names: self.execute(self._delete_constraint_sql(self.sql_delete_unique, model, constraint_name)) # Drop incoming FK constraints if the field is a primary key or unique, # which might be a to_field target, and things are going to change. drop_foreign_keys = ( ( (old_field.primary_key and new_field.primary_key) or (old_field.unique and new_field.unique) ) and old_type != new_type ) if drop_foreign_keys: # '_meta.related_field' also contains M2M reverse fields, these # will be filtered out for _old_rel, new_rel in _related_non_m2m_objects(old_field, new_field): rel_fk_names = self._constraint_names( new_rel.related_model, [new_rel.field.column], foreign_key=True ) for fk_name in rel_fk_names: self.execute(self._delete_constraint_sql(self.sql_delete_fk, new_rel.related_model, fk_name)) # Removed an index? (no strict check, as multiple indexes are possible) # Remove indexes if db_index switched to False or a unique constraint # will now be used in lieu of an index. The following lines from the # truth table show all True cases; the rest are False: # # old_field.db_index | old_field.unique | new_field.db_index | new_field.unique # ------------------------------------------------------------------------------ # True | False | False | False # True | False | False | True # True | False | True | True if (old_field.db_index and not old_field.unique and (not new_field.db_index or new_field.unique)) or ( # Drop indexes on nvarchar columns that are changing to a different type # SQL Server requires explicit deletion (old_field.db_index or old_field.unique) and ( (old_type.startswith('nvarchar') and not new_type.startswith('nvarchar')) )): # Find the index for this field meta_index_names = {index.name for index in model._meta.indexes} # Retrieve only BTREE indexes since this is what's created with # db_index=True. index_names = self._constraint_names(model, [old_field.column], index=True, type_=Index.suffix) for index_name in index_names: if index_name not in meta_index_names: # The only way to check if an index was created with # db_index=True or with Index(['field'], name='foo') # is to look at its name (refs #28053). self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name)) # Change check constraints? if (old_db_params['check'] != new_db_params['check'] and old_db_params['check']) or ( # SQL Server requires explicit deletion befor altering column type with the same constraint old_db_params['check'] == new_db_params['check'] and old_db_params['check'] and old_db_params['type'] != new_db_params['type'] ): constraint_names = self._constraint_names(model, [old_field.column], check=True) if strict and len(constraint_names) != 1: raise ValueError("Found wrong number (%s) of check constraints for %s.%s" % ( len(constraint_names), model._meta.db_table, old_field.column, )) for constraint_name in constraint_names: self.execute(self._delete_constraint_sql(self.sql_delete_check, model, constraint_name)) # Have they renamed the column? if old_field.column != new_field.column: sql_restore_index = '' # Drop unique indexes for table to be altered index_names = self._db_table_constraint_names(model._meta.db_table, index=True) for index_name in index_names: if(index_name.endswith('uniq')): with self.connection.cursor() as cursor: cursor.execute(f""" SELECT COL_NAME(ic.object_id,ic.column_id) AS column_name, filter_definition FROM sys.indexes AS i INNER JOIN sys.index_columns AS ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id WHERE i.object_id = OBJECT_ID('{model._meta.db_table}') and i.name = '{index_name}' """) result = cursor.fetchall() columns_to_recreate_index = ', '.join(['%s' % self.quote_name(column[0]) for column in result]) filter_definition = result[0][1] sql_restore_index += f'CREATE UNIQUE INDEX {index_name} ON {model._meta.db_table} ({columns_to_recreate_index}) WHERE {filter_definition};' self.execute(self._db_table_delete_constraint_sql( self.sql_delete_index, model._meta.db_table, index_name)) self.execute(self._rename_field_sql(model._meta.db_table, old_field, new_field, new_type)) # Restore indexes for altered table if(sql_restore_index): self.execute(sql_restore_index.replace(f'[{old_field.column}]', f'[{new_field.column}]')) # Rename all references to the renamed column. for sql in self.deferred_sql: if isinstance(sql, DjStatement): sql.rename_column_references(model._meta.db_table, old_field.column, new_field.column) # Next, start accumulating actions to do actions = [] null_actions = [] post_actions = [] # Type change? if old_type != new_type: fragment, other_actions = self._alter_column_type_sql(model, old_field, new_field, new_type) actions.append(fragment) post_actions.extend(other_actions) # Drop unique constraint, SQL Server requires explicit deletion self._delete_unique_constraints(model, old_field, new_field, strict) # Drop indexes, SQL Server requires explicit deletion self._delete_indexes(model, old_field, new_field) # When changing a column NULL constraint to NOT NULL with a given # default value, we need to perform 4 steps: # 1. Add a default for new incoming writes # 2. Update existing NULL rows with new default # 3. Replace NULL constraint with NOT NULL # 4. Drop the default again. # Default change? old_default = self.effective_default(old_field) new_default = self.effective_default(new_field) needs_database_default = ( old_field.null and not new_field.null and old_default != new_default and new_default is not None and not self.skip_default(new_field) ) if needs_database_default: actions.append(self._alter_column_default_sql(model, old_field, new_field)) # Nullability change? if old_field.null != new_field.null: fragment = self._alter_column_null_sql(model, old_field, new_field) if fragment: null_actions.append(fragment) if not new_field.null: # Drop unique constraint, SQL Server requires explicit deletion self._delete_unique_constraints(model, old_field, new_field, strict) # Drop indexes, SQL Server requires explicit deletion indexes_dropped = self._delete_indexes(model, old_field, new_field) if ( new_field.get_internal_type() not in ("JSONField", "TextField") and (old_field.db_index or not new_field.db_index) and new_field.db_index or (indexes_dropped and sorted(indexes_dropped) == sorted( [index.name for index in model._meta.indexes])) ): create_index_sql_statement = self._create_index_sql(model, [new_field]) if create_index_sql_statement.__str__() not in [sql.__str__() for sql in self.deferred_sql]: post_actions.append((create_index_sql_statement, ())) # Only if we have a default and there is a change from NULL to NOT NULL four_way_default_alteration = ( new_field.has_default() and (old_field.null and not new_field.null) ) if actions or null_actions: if not four_way_default_alteration: # If we don't have to do a 4-way default alteration we can # directly run a (NOT) NULL alteration actions = actions + null_actions # Combine actions together if we can (e.g. postgres) if self.connection.features.supports_combined_alters and actions: sql, params = tuple(zip(*actions)) actions = [(", ".join(sql), sum(params, []))] # Apply those actions for sql, params in actions: self.execute( self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": sql, }, params, ) if four_way_default_alteration: # Update existing rows with default value self.execute( self.sql_update_with_default % { "table": self.quote_name(model._meta.db_table), "column": self.quote_name(new_field.column), "default": "%s", }, [new_default], ) # Since we didn't run a NOT NULL change before we need to do it # now for sql, params in null_actions: self.execute( self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": sql, }, params, ) if post_actions: for sql, params in post_actions: self.execute(sql, params) # If primary_key changed to False, delete the primary key constraint. if old_field.primary_key and not new_field.primary_key: self._delete_primary_key(model, strict) # Added a unique? if self._unique_should_be_added(old_field, new_field): if (self.connection.features.supports_nullable_unique_constraints and not new_field.many_to_many and new_field.null): self.execute( self._create_index_sql( model, [new_field], sql=self.sql_create_unique_null, suffix="_uniq" ) ) else: if django_version >= (4, 0): self.execute(self._create_unique_sql(model, [new_field])) else: self.execute(self._create_unique_sql(model, [new_field.column])) # Added an index? # constraint will no longer be used in lieu of an index. The following # lines from the truth table show all True cases; the rest are False: # # old_field.db_index | old_field.unique | new_field.db_index | new_field.unique # ------------------------------------------------------------------------------ # False | False | True | False # False | True | True | False # True | True | True | False if (not old_field.db_index or old_field.unique) and new_field.db_index and not new_field.unique: self.execute(self._create_index_sql(model, [new_field])) # Restore indexes & unique constraints deleted above, SQL Server requires explicit restoration if (old_type != new_type or (old_field.null and not new_field.null)) and ( old_field.column == new_field.column ): # Restore unique constraints # Note: if nullable they are implemented via an explicit filtered UNIQUE INDEX (not CONSTRAINT) # in order to get ANSI-compliant NULL behaviour (i.e. NULL != NULL, multiple are allowed) if old_field.unique and new_field.unique: if new_field.null: self.execute( self._create_index_sql( model, [old_field], sql=self.sql_create_unique_null, suffix="_uniq" ) ) else: if django_version >= (4, 0): self.execute(self._create_unique_sql(model, [old_field])) else: self.execute(self._create_unique_sql(model, columns=[old_field.column])) else: if django_version >= (4, 0): for field_names in model._meta.unique_together: columns = [model._meta.get_field(field).column for field in field_names] fields = [model._meta.get_field(field) for field in field_names] if old_field.column in columns: condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns]) self.execute(self._create_unique_sql(model, fields, condition=condition)) else: for fields in model._meta.unique_together: columns = [model._meta.get_field(field).column for field in fields] if old_field.column in columns: condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns]) self.execute(self._create_unique_sql(model, columns, condition=condition)) # Restore indexes index_columns = [] if old_field.db_index and new_field.db_index: index_columns.append([old_field]) else: for fields in model._meta.index_together: columns = [model._meta.get_field(field) for field in fields] if old_field.column in [c.column for c in columns]: index_columns.append(columns) if index_columns: for columns in index_columns: create_index_sql_statement = self._create_index_sql(model, columns) if (create_index_sql_statement.__str__() not in [sql.__str__() for sql in self.deferred_sql] + [statement[0].__str__() for statement in post_actions] ): self.execute(create_index_sql_statement) # Type alteration on primary key? Then we need to alter the column # referring to us. rels_to_update = [] if old_field.primary_key and new_field.primary_key and old_type != new_type: rels_to_update.extend(_related_non_m2m_objects(old_field, new_field)) # Changed to become primary key? if self._field_became_primary_key(old_field, new_field): # Make the new one self.execute( self.sql_create_pk % { "table": self.quote_name(model._meta.db_table), "name": self.quote_name( self._create_index_name(model._meta.db_table, [new_field.column], suffix="_pk") ), "columns": self.quote_name(new_field.column), } ) # Update all referencing columns rels_to_update.extend(_related_non_m2m_objects(old_field, new_field)) # Handle our type alters on the other end of rels from the PK stuff above for old_rel, new_rel in rels_to_update: rel_db_params = new_rel.field.db_parameters(connection=self.connection) rel_type = rel_db_params['type'] fragment, other_actions = self._alter_column_type_sql( new_rel.related_model, old_rel.field, new_rel.field, rel_type ) # Drop related_model indexes, so it can be altered index_names = self._db_table_constraint_names(old_rel.related_model._meta.db_table, index=True) for index_name in index_names: self.execute(self._db_table_delete_constraint_sql( self.sql_delete_index, old_rel.related_model._meta.db_table, index_name)) self.execute( self.sql_alter_column % { "table": self.quote_name(new_rel.related_model._meta.db_table), "changes": fragment[0], }, fragment[1], ) for sql, params in other_actions: self.execute(sql, params) # Restore related_model indexes self.execute(self._create_index_sql(new_rel.related_model, [new_rel.field])) # Does it have a foreign key? if (new_field.remote_field and (fks_dropped or not old_field.remote_field or not old_field.db_constraint) and new_field.db_constraint): self.execute(self._create_fk_sql(model, new_field, "_fk_%(to_table)s_%(to_column)s")) # Rebuild FKs that pointed to us if we previously had to drop them if drop_foreign_keys: for rel in new_field.model._meta.related_objects: if _is_relevant_relation(rel, new_field) and rel.field.db_constraint: self.execute(self._create_fk_sql(rel.related_model, rel.field, "_fk")) # Does it have check constraints we need to add? if (old_db_params['check'] != new_db_params['check'] and new_db_params['check']) or ( # SQL Server requires explicit creation after altering column type with the same constraint old_db_params['check'] == new_db_params['check'] and new_db_params['check'] and old_db_params['type'] != new_db_params['type'] ): self.execute( self.sql_create_check % { "table": self.quote_name(model._meta.db_table), "name": self.quote_name( self._create_index_name(model._meta.db_table, [new_field.column], suffix="_check") ), "column": self.quote_name(new_field.column), "check": new_db_params['check'], } ) # Drop the default if we need to # (Django usually does not use in-database defaults) if needs_database_default: changes_sql, params = self._alter_column_default_sql(model, old_field, new_field, drop=True) sql = self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": changes_sql, } self.execute(sql, params) # Reset connection if required if self.connection.features.connection_persists_old_columns: self.connection.close() def _delete_indexes(self, model, old_field, new_field): index_columns = [] index_names = [] if old_field.db_index and new_field.db_index: index_columns.append([old_field.column]) elif old_field.null != new_field.null: index_columns.append([old_field.column]) for fields in model._meta.index_together: columns = [model._meta.get_field(field).column for field in fields] if old_field.column in columns: index_columns.append(columns) for fields in model._meta.unique_together: columns = [model._meta.get_field(field).column for field in fields] if old_field.column in columns: index_columns.append(columns) if index_columns: for columns in index_columns: index_names = self._constraint_names(model, columns, index=True) for index_name in index_names: self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name)) return index_names def _delete_unique_constraints(self, model, old_field, new_field, strict=False): unique_columns = [] if old_field.unique and new_field.unique: unique_columns.append([old_field.column]) if unique_columns: for columns in unique_columns: constraint_names_normal = self._constraint_names(model, columns, unique=True, index=False) constraint_names_index = self._constraint_names(model, columns, unique=True, index=True) constraint_names = constraint_names_normal + constraint_names_index if strict and len(constraint_names) != 1: raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % ( len(constraint_names), model._meta.db_table, old_field.column, )) for constraint_name in constraint_names_normal: self.execute(self._delete_constraint_sql(self.sql_delete_unique, model, constraint_name)) # Unique indexes which are not table constraints must be deleted using the appropriate SQL. # These may exist for example to enforce ANSI-compliant unique constraints on nullable columns. for index_name in constraint_names_index: self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name)) def _rename_field_sql(self, table, old_field, new_field, new_type): new_type = self._set_field_new_type_null_status(old_field, new_type) return super()._rename_field_sql(table, old_field, new_field, new_type) def _set_field_new_type_null_status(self, field, new_type): """ Keep the null property of the old field. If it has changed, it will be handled separately. """ if field.null: new_type += " NULL" else: new_type += " NOT NULL" return new_type def add_field(self, model, field): """ Create a field on a model. Usually involves adding a column, but may involve adding a table instead (for M2M fields). """ # Special-case implicit M2M tables if field.many_to_many and field.remote_field.through._meta.auto_created: return self.create_model(field.remote_field.through) # Get the column's definition definition, params = self.column_sql(model, field, include_default=True) # It might not actually have a column behind it if definition is None: return if (self.connection.features.supports_nullable_unique_constraints and not field.many_to_many and field.null and field.unique): definition = definition.replace(' UNIQUE', '') self.deferred_sql.append(self._create_index_sql( model, [field], sql=self.sql_create_unique_null, suffix="_uniq" )) # Check constraints can go on the column SQL here db_params = field.db_parameters(connection=self.connection) if db_params['check']: definition += " CHECK (%s)" % db_params['check'] # Build the SQL and run it sql = self.sql_create_column % { "table": self.quote_name(model._meta.db_table), "column": self.quote_name(field.column), "definition": definition, } self.execute(sql, params) # Drop the default if we need to # (Django usually does not use in-database defaults) if not self.skip_default(field) and self.effective_default(field) is not None: changes_sql, params = self._alter_column_default_sql(model, None, field, drop=True) sql = self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": changes_sql, } self.execute(sql, params) # Add an index, if required self.deferred_sql.extend(self._field_indexes_sql(model, field)) # Add any FK constraints later if field.remote_field and self.connection.features.supports_foreign_keys and field.db_constraint: self.deferred_sql.append(self._create_fk_sql(model, field, "_fk_%(to_table)s_%(to_column)s")) # Reset connection if required if self.connection.features.connection_persists_old_columns: self.connection.close() if django_version >= (4, 0): def _create_unique_sql(self, model, fields, name=None, condition=None, deferrable=None, include=None, opclasses=None, expressions=None): if (deferrable and not getattr(self.connection.features, 'supports_deferrable_unique_constraints', False) or (condition and not self.connection.features.supports_partial_indexes) or (include and not self.connection.features.supports_covering_indexes) or (expressions and not self.connection.features.supports_expression_indexes)): return None def create_unique_name(*args, **kwargs): return self.quote_name(self._create_index_name(*args, **kwargs)) compiler = Query(model, alias_cols=False).get_compiler(connection=self.connection) columns = [field.column for field in fields] table = model._meta.db_table if name is None: name = IndexName(table, columns, '_uniq', create_unique_name) else: name = self.quote_name(name) if columns: columns = self._index_columns(table, columns, col_suffixes=(), opclasses=opclasses) else: columns = Expressions(table, expressions, compiler, self.quote_value) statement_args = { "deferrable": self._deferrable_constraint_sql(deferrable) } include = self._index_include_sql(model, include) if condition: return Statement( self.sql_create_unique_index, table=self.quote_name(table), name=name, columns=columns, condition=' WHERE ' + condition, **statement_args, include=include, ) if self.connection.features.supports_partial_indexes else None else: return Statement( self.sql_create_unique, table=self.quote_name(table), name=name, columns=columns, **statement_args, include=include, ) else: def _create_unique_sql(self, model, columns, name=None, condition=None, deferrable=None, include=None, opclasses=None, expressions=None): if (deferrable and not getattr(self.connection.features, 'supports_deferrable_unique_constraints', False) or (condition and not self.connection.features.supports_partial_indexes) or (include and not self.connection.features.supports_covering_indexes) or (expressions and not self.connection.features.supports_expression_indexes)): return None def create_unique_name(*args, **kwargs): return self.quote_name(self._create_index_name(*args, **kwargs)) table = Table(model._meta.db_table, self.quote_name) if name is None: name = IndexName(model._meta.db_table, columns, '_uniq', create_unique_name) else: name = self.quote_name(name) columns = Columns(table, columns, self.quote_name) statement_args = { "deferrable": self._deferrable_constraint_sql(deferrable) } if django_version >= (3, 1) else {} include = self._index_include_sql(model, include) if django_version >= (3, 2) else '' if condition: return Statement( self.sql_create_unique_index, table=self.quote_name(table) if isinstance(table, str) else table, name=name, columns=columns, condition=' WHERE ' + condition, **statement_args, include=include, ) if self.connection.features.supports_partial_indexes else None else: return Statement( self.sql_create_unique, table=self.quote_name(table) if isinstance(table, str) else table, name=name, columns=columns, **statement_args, include=include, ) def _create_index_sql(self, model, fields, *, name=None, suffix='', using='', db_tablespace=None, col_suffixes=(), sql=None, opclasses=(), condition=None, include=None, expressions=None): """ Return the SQL statement to create the index for one or several fields. `sql` can be specified if the syntax differs from the standard (GIS indexes, ...). """ if django_version >= (3, 2): return super()._create_index_sql( model, fields=fields, name=name, suffix=suffix, using=using, db_tablespace=db_tablespace, col_suffixes=col_suffixes, sql=sql, opclasses=opclasses, condition=condition, include=include, expressions=expressions, ) return super()._create_index_sql( model, fields=fields, name=name, suffix=suffix, using=using, db_tablespace=db_tablespace, col_suffixes=col_suffixes, sql=sql, opclasses=opclasses, condition=condition, ) def create_model(self, model): """ Takes a model and creates a table for it in the database. Will also create any accompanying indexes or unique constraints. """ # Create column SQL, add FK deferreds if needed column_sqls = [] params = [] for field in model._meta.local_fields: # SQL definition, extra_params = self.column_sql(model, field) if definition is None: continue if (self.connection.features.supports_nullable_unique_constraints and not field.many_to_many and field.null and field.unique): definition = definition.replace(' UNIQUE', '') self.deferred_sql.append(self._create_index_sql( model, [field], sql=self.sql_create_unique_null, suffix="_uniq" )) # Check constraints can go on the column SQL here db_params = field.db_parameters(connection=self.connection) if db_params['check']: # SQL Server requires a name for the check constraint definition += self._sql_check_constraint % { "name": self._create_index_name(model._meta.db_table, [field.column], suffix="_check"), "check": db_params['check'] } # Autoincrement SQL (for backends with inline variant) col_type_suffix = field.db_type_suffix(connection=self.connection) if col_type_suffix: definition += " %s" % col_type_suffix params.extend(extra_params) # FK if field.remote_field and field.db_constraint: to_table = field.remote_field.model._meta.db_table to_column = field.remote_field.model._meta.get_field(field.remote_field.field_name).column if self.sql_create_inline_fk: definition += " " + self.sql_create_inline_fk % { "to_table": self.quote_name(to_table), "to_column": self.quote_name(to_column), } elif self.connection.features.supports_foreign_keys: self.deferred_sql.append(self._create_fk_sql(model, field, "_fk_%(to_table)s_%(to_column)s")) # Add the SQL to our big list column_sqls.append("%s %s" % ( self.quote_name(field.column), definition, )) # Autoincrement SQL (for backends with post table definition variant) if field.get_internal_type() in ("AutoField", "BigAutoField", "SmallAutoField"): autoinc_sql = self.connection.ops.autoinc_sql(model._meta.db_table, field.column) if autoinc_sql: self.deferred_sql.extend(autoinc_sql) # Add any unique_togethers (always deferred, as some fields might be # created afterwards, like geometry fields with some backends) for field_names in model._meta.unique_together: fields = [model._meta.get_field(field) for field in field_names] columns = [model._meta.get_field(field).column for field in field_names] condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns]) if django_version >= (4, 0): self.deferred_sql.append(self._create_unique_sql(model, fields, condition=condition)) else: self.deferred_sql.append(self._create_unique_sql(model, columns, condition=condition)) constraints = [constraint.constraint_sql(model, self) for constraint in model._meta.constraints] # Make the table sql = self.sql_create_table % { "table": self.quote_name(model._meta.db_table), 'definition': ', '.join(constraint for constraint in (*column_sqls, *constraints) if constraint), } if model._meta.db_tablespace: tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace) if tablespace_sql: sql += ' ' + tablespace_sql # Prevent using [] as params, in the case a literal '%' is used in the definition self.execute(sql, params or None) # Add any field index and index_together's (deferred as SQLite3 _remake_table needs it) self.deferred_sql.extend(self._model_indexes_sql(model)) self.deferred_sql = list(set(self.deferred_sql)) # Make M2M tables for field in model._meta.local_many_to_many: if field.remote_field.through._meta.auto_created: self.create_model(field.remote_field.through) def _delete_unique_sql( self, model, name, condition=None, deferrable=None, include=None, opclasses=None, expressions=None ): if ( ( deferrable and not self.connection.features.supports_deferrable_unique_constraints ) or (condition and not self.connection.features.supports_partial_indexes) or (include and not self.connection.features.supports_covering_indexes) or (expressions and not self.connection.features.supports_expression_indexes) ): return None if condition or include or opclasses: sql = self.sql_delete_index with self.connection.cursor() as cursor: cursor.execute( "SELECT 1 FROM INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE WHERE CONSTRAINT_NAME = '%s'" % name) row = cursor.fetchone() if row: sql = self.sql_delete_unique else: sql = self.sql_delete_unique return self._delete_constraint_sql(sql, model, name) def delete_model(self, model): super().delete_model(model) def execute(self, sql, params=(), has_result=False): """ Executes the given SQL statement, with optional parameters. """ result = None # Don't perform the transactional DDL check if SQL is being collected # as it's not going to be executed anyway. if not self.collect_sql and self.connection.in_atomic_block and not self.connection.features.can_rollback_ddl: raise TransactionManagementError( "Executing DDL statements while in a transaction on databases " "that can't perform a rollback is prohibited." ) # Account for non-string statement objects. sql = str(sql) # Log the command we're running, then run it logger.debug("%s; (params %r)", sql, params, extra={'params': params, 'sql': sql}) if self.collect_sql: ending = "" if sql.endswith(";") else ";" if params is not None: self.collected_sql.append((sql % tuple(map(self.quote_value, params))) + ending) else: self.collected_sql.append(sql + ending) else: cursor = self.connection.cursor() cursor.execute(sql, params) if has_result: result = cursor.fetchall() # the cursor can be closed only when the driver supports opening # multiple cursors on a connection because the migration command # has already opened a cursor outside this method if self.connection.supports_mars: cursor.close() return result def prepare_default(self, value): return self.quote_value(value) def quote_value(self, value): """ Returns a quoted version of the value so it's safe to use in an SQL string. This is not safe against injection from user code; it is intended only for use in making SQL scripts or preparing default values for particularly tricky backends (defaults are not user-defined, though, so this is safe). """ if isinstance(value, (datetime.datetime, datetime.date, datetime.time)): return "'%s'" % value elif isinstance(value, str): return "'%s'" % value.replace("'", "''") elif isinstance(value, (bytes, bytearray, memoryview)): return "0x%s" % force_str(binascii.hexlify(value)) elif isinstance(value, bool): return "1" if value else "0" else: return str(value) def remove_field(self, model, field): """ Removes a field from a model. Usually involves deleting a column, but for M2Ms may involve deleting a table. """ # Special-case implicit M2M tables if field.many_to_many and field.remote_field.through._meta.auto_created: return self.delete_model(field.remote_field.through) # It might not actually have a column behind it if field.db_parameters(connection=self.connection)['type'] is None: return # Drop any FK constraints, SQL Server requires explicit deletion with self.connection.cursor() as cursor: constraints = self.connection.introspection.get_constraints(cursor, model._meta.db_table) for name, infodict in constraints.items(): if field.column in infodict['columns'] and infodict['foreign_key']: self.execute(self._delete_constraint_sql(self.sql_delete_fk, model, name)) # Drop any indexes, SQL Server requires explicit deletion for name, infodict in constraints.items(): if field.column in infodict['columns'] and infodict['index']: self.execute(self.sql_delete_index % { "table": self.quote_name(model._meta.db_table), "name": self.quote_name(name), }) # Drop primary key constraint, SQL Server requires explicit deletion for name, infodict in constraints.items(): if field.column in infodict['columns'] and infodict['primary_key']: self.execute(self.sql_delete_pk % { "table": self.quote_name(model._meta.db_table), "name": self.quote_name(name), }) # Drop check constraints, SQL Server requires explicit deletion for name, infodict in constraints.items(): if field.column in infodict['columns'] and infodict['check']: self.execute(self.sql_delete_check % { "table": self.quote_name(model._meta.db_table), "name": self.quote_name(name), }) # Drop unique constraints, SQL Server requires explicit deletion for name, infodict in constraints.items(): if (field.column in infodict['columns'] and infodict['unique'] and not infodict['primary_key'] and not infodict['index']): self.execute(self.sql_delete_unique % { "table": self.quote_name(model._meta.db_table), "name": self.quote_name(name), }) # Delete the column sql = self.sql_delete_column % { "table": self.quote_name(model._meta.db_table), "column": self.quote_name(field.column), } self.execute(sql) # Reset connection if required if self.connection.features.connection_persists_old_columns: self.connection.close() # Remove all deferred statements referencing the deleted column. for sql in list(self.deferred_sql): if isinstance(sql, Statement) and sql.references_column(model._meta.db_table, field.column): self.deferred_sql.remove(sql) def add_constraint(self, model, constraint): if isinstance(constraint, UniqueConstraint) and constraint.condition and constraint.condition.connector != AND: raise NotImplementedError("The backend does not support %s conditions on unique constraint %s." % (constraint.condition.connector, constraint.name)) super().add_constraint(model, constraint) def _collate_sql(self, collation): return ' COLLATE ' + collation def _create_index_name(self, table_name, column_names, suffix=""): index_name = super()._create_index_name(table_name, column_names, suffix) # Check if the db_table specified a user-defined schema if('].[' in index_name): new_index_name = index_name.replace('[', '').replace(']', '').replace('.', '_') return new_index_name return index_name mssql-django-1.1.2/setup.cfg000066400000000000000000000001721417607077400157500ustar00rootroot00000000000000[flake8] exclude = .git,__pycache__,migrations # W504 is mutually exclusive with W503 ignore = W504 max-line-length = 119 mssql-django-1.1.2/setup.py000066400000000000000000000030571417607077400156460ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. from os import path from setuptools import find_packages, setup CLASSIFIERS = [ 'License :: OSI Approved :: BSD License', 'Framework :: Django', "Operating System :: POSIX :: Linux", "Operating System :: Microsoft :: Windows", 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Framework :: Django :: 2.2', 'Framework :: Django :: 3.0', 'Framework :: Django :: 3.1', 'Framework :: Django :: 3.2', 'Framework :: Django :: 4.0', ] this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f: long_description = f.read() setup( name='mssql-django', version='1.1.2', description='Django backend for Microsoft SQL Server', long_description=long_description, long_description_content_type='text/markdown', author='Microsoft', author_email='opencode@microsoft.com', url='https://github.com/microsoft/mssql-django', project_urls={ 'Release Notes': 'https://github.com/microsoft/mssql-django/releases', }, license='BSD', packages=find_packages(), install_requires=[ 'django>=2.2,<4.1', 'pyodbc>=3.0', 'pytz', ], package_data={'mssql': ['regex_clr.dll']}, classifiers=CLASSIFIERS, keywords='django', ) mssql-django-1.1.2/test.sh000077500000000000000000000044711417607077400154530ustar00rootroot00000000000000# TODO: # # * m2m_through_regress # * many_to_one_null set -e DJANGO_VERSION="$(python -m django --version)" cd django git fetch --depth=1 origin +refs/tags/*:refs/tags/* git checkout $DJANGO_VERSION pip install -r tests/requirements/py3.txt coverage run tests/runtests.py --settings=testapp.settings --noinput \ aggregation \ aggregation_regress \ annotations \ backends \ basic \ bulk_create \ constraints \ custom_columns \ custom_lookups \ custom_managers \ custom_methods \ custom_migration_operations \ custom_pk \ datatypes \ dates \ datetimes \ db_functions \ db_typecasts \ db_utils \ dbshell \ defer \ defer_regress \ delete \ delete_regress \ distinct_on_fields \ empty \ expressions \ expressions_case \ expressions_window \ extra_regress \ field_deconstruction \ field_defaults \ field_subclassing \ filtered_relation \ fixtures \ fixtures_model_package \ fixtures_regress \ force_insert_update \ foreign_object \ from_db_value \ generic_relations \ generic_relations_regress \ get_earliest_or_latest \ get_object_or_404 \ get_or_create \ indexes \ inspectdb \ introspection \ invalid_models_tests \ known_related_objects \ lookup \ m2m_and_m2o \ m2m_intermediary \ m2m_multiple \ m2m_recursive \ m2m_regress \ m2m_signals \ m2m_through \ m2o_recursive \ managers_regress \ many_to_many \ many_to_one \ max_lengths \ migrate_signals \ migration_test_data_persistence \ migrations \ migrations2 \ model_fields \ model_indexes \ model_options \ mutually_referential \ nested_foreign_keys \ null_fk \ null_fk_ordering \ null_queries \ one_to_one \ or_lookups \ order_with_respect_to \ ordering \ pagination \ prefetch_related \ queries \ queryset_pickle \ raw_query \ reverse_lookup \ save_delete_hooks \ schema \ select_for_update \ select_related \ select_related_onetoone \ select_related_regress \ serializers \ transaction_hooks \ transactions \ update \ update_only_fields python -m coverage xml --include '*mssql*' --omit '*virtualenvs*' -o coverage.xml mssql-django-1.1.2/testapp/000077500000000000000000000000001417607077400156075ustar00rootroot00000000000000mssql-django-1.1.2/testapp/__init__.py000066400000000000000000000000001417607077400177060ustar00rootroot00000000000000mssql-django-1.1.2/testapp/migrations/000077500000000000000000000000001417607077400177635ustar00rootroot00000000000000mssql-django-1.1.2/testapp/migrations/0001_initial.py000066400000000000000000000045301417607077400224300ustar00rootroot00000000000000# Generated by Django 2.2.8.dev20191112211527 on 2019-11-15 01:38 import uuid from django.db import migrations, models import django class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Author', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=100)), ], ), migrations.CreateModel( name='Editor', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=100)), ], ), migrations.CreateModel( name='Post', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255, verbose_name='title')), ], ), migrations.AddField( model_name='post', name='alt_editor', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='testapp.Editor'), ), migrations.AddField( model_name='post', name='author', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='testapp.Author'), ), migrations.AlterUniqueTogether( name='post', unique_together={('author', 'title', 'alt_editor')}, ), migrations.CreateModel( name='Comment', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='testapp.Post')), ('text', models.TextField(verbose_name='text')), ('created_at', models.DateTimeField(default=django.utils.timezone.now)), ], ), migrations.CreateModel( name='UUIDModel', fields=[ ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), ], ), ] mssql-django-1.1.2/testapp/migrations/0002_test_unique_nullable_part1.py000066400000000000000000000013001417607077400263220ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0001_initial'), ] operations = [ # Prep test for issue https://github.com/ESSolutions/django-mssql-backend/issues/38 # Create with a field that is unique *and* nullable so it is implemented with a filtered unique index. migrations.CreateModel( name='TestUniqueNullableModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('test_field', models.CharField(max_length=100, null=True, unique=True)), ], ), ] mssql-django-1.1.2/testapp/migrations/0003_test_unique_nullable_part2.py000066400000000000000000000011311417607077400263260ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0002_test_unique_nullable_part1'), ] operations = [ # Run test for issue https://github.com/ESSolutions/django-mssql-backend/issues/38 # Now remove the null=True to check this transition is correctly handled. migrations.AlterField( model_name='testuniquenullablemodel', name='test_field', field=models.CharField(default='', max_length=100, unique=True), preserve_default=False, ), ] mssql-django-1.1.2/testapp/migrations/0004_test_unique_type_change_part1.py000066400000000000000000000017771417607077400270360ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0003_test_unique_nullable_part2'), ] # Prep test for issue https://github.com/ESSolutions/django-mssql-backend/issues/45 operations = [ # for case 1: migrations.AddField( model_name='testuniquenullablemodel', name='x', field=models.CharField(max_length=10, null=True, unique=True), ), # for case 2: migrations.CreateModel( name='TestNullableUniqueTogetherModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('a', models.CharField(max_length=50, null=True)), ('b', models.CharField(max_length=50)), ('c', models.CharField(max_length=50)), ], options={ 'unique_together': {('a', 'b')}, }, ), ] mssql-django-1.1.2/testapp/migrations/0005_test_unique_type_change_part2.py000066400000000000000000000025771417607077400270370ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0004_test_unique_type_change_part1'), ] # Run test for issue https://github.com/ESSolutions/django-mssql-backend/issues/45 operations = [ # Case 1: changing max_length changes the column type - the filtered UNIQUE INDEX which implements # the nullable unique constraint, should be correctly reinstated after this change of column type # (see also the specific unit test which checks that multiple rows with NULL are allowed) migrations.AlterField( model_name='testuniquenullablemodel', name='x', field=models.CharField(max_length=11, null=True, unique=True), ), # Case 2: the filtered UNIQUE INDEX implementing the partially nullable `unique_together` constraint # should be correctly reinstated after this column type change migrations.AlterField( model_name='testnullableuniquetogethermodel', name='a', field=models.CharField(max_length=51, null=True), ), # ...similarly adding another field to the `unique_together` should preserve the constraint correctly migrations.AlterUniqueTogether( name='testnullableuniquetogethermodel', unique_together={('a', 'b', 'c')}, ), ] mssql-django-1.1.2/testapp/migrations/0006_test_remove_onetoone_field_part1.py000066400000000000000000000013471417607077400275230ustar00rootroot00000000000000# Generated by Django 3.0.4 on 2020-04-20 14:59 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('testapp', '0005_test_unique_type_change_part2'), ] operations = [ migrations.CreateModel( name='TestRemoveOneToOneFieldModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('a', models.CharField(max_length=50)), ('b', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, to='testapp.TestRemoveOneToOneFieldModel')), ], ), ] mssql-django-1.1.2/testapp/migrations/0007_test_remove_onetoone_field_part2.py000066400000000000000000000005701417607077400275220ustar00rootroot00000000000000# Generated by Django 3.0.4 on 2020-04-20 14:59 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('testapp', '0006_test_remove_onetoone_field_part1'), ] operations = [ migrations.RemoveField( model_name='testremoveonetoonefieldmodel', name='b', ), ] mssql-django-1.1.2/testapp/migrations/0008_test_drop_table_with_foreign_key_reference_part1.py000066400000000000000000000011101417607077400327100ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0007_test_remove_onetoone_field_part2'), ] operations = [ migrations.CreateModel( name="Pony", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel( name="Rider", fields=[ ("id", models.AutoField(primary_key=True)), ("pony", models.ForeignKey("testapp.Pony", models.CASCADE)), ]), ] mssql-django-1.1.2/testapp/migrations/0009_test_drop_table_with_foreign_key_reference_part2.py000066400000000000000000000007351417607077400327260ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): ''' Sql server will generate a error if drop a table that is referenced by a foreign key constraint. This test is to check if the table can be dropped correctly. ''' dependencies = [ ('testapp', '0008_test_drop_table_with_foreign_key_reference_part1'), ] operations = [ migrations.DeleteModel("Pony"), migrations.DeleteModel("Rider"), ] mssql-django-1.1.2/testapp/migrations/0010_pizza_topping.py000066400000000000000000000013711417607077400236740ustar00rootroot00000000000000# Generated by Django 3.1.7 on 2021-03-16 17:07 from django.db import migrations, models import uuid class Migration(migrations.Migration): dependencies = [ ('testapp', '0009_test_drop_table_with_foreign_key_reference_part2'), ] operations = [ migrations.CreateModel( name='Topping', fields=[ ('name', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)), ], ), migrations.CreateModel( name='Pizza', fields=[ ('name', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)), ('toppings', models.ManyToManyField(to='testapp.Topping')), ], ), ] mssql-django-1.1.2/testapp/migrations/0011_test_unique_constraints.py000066400000000000000000000041431417607077400257740ustar00rootroot00000000000000# Generated by Django 3.1.5 on 2021-01-18 00:05 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0010_pizza_topping'), ] operations = [ migrations.CreateModel( name='TestUnsupportableUniqueConstraint', fields=[ ( 'id', models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name='ID', ), ), ('_type', models.CharField(max_length=50)), ('status', models.CharField(max_length=50)), ], options={ 'managed': False, }, ), migrations.CreateModel( name='TestSupportableUniqueConstraint', fields=[ ( 'id', models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name='ID', ), ), ('_type', models.CharField(max_length=50)), ('status', models.CharField(max_length=50)), ], ), migrations.AddConstraint( model_name='testsupportableuniqueconstraint', constraint=models.UniqueConstraint( condition=models.Q( ('status', 'in_progress'), ('status', 'needs_changes'), ('status', 'published'), ), fields=('_type',), name='and_constraint', ), ), migrations.AddConstraint( model_name='testsupportableuniqueconstraint', constraint=models.UniqueConstraint( condition=models.Q(status__in=['in_progress', 'needs_changes']), fields=('_type',), name='in_constraint', ), ), ] mssql-django-1.1.2/testapp/migrations/0012_test_indexes_retained_part1.py000066400000000000000000000012601417607077400264560ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0011_test_unique_constraints'), ] # Prep test for issue https://github.com/ESSolutions/django-mssql-backend/issues/58 operations = [ migrations.CreateModel( name='TestIndexesRetained', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('a', models.IntegerField(db_index=True)), ('b', models.IntegerField(db_index=True)), ('c', models.IntegerField(db_index=True)), ], ), ] mssql-django-1.1.2/testapp/migrations/0013_test_indexes_retained_part2.py000066400000000000000000000014471417607077400264670ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0012_test_indexes_retained_part1'), ] # Run test for issue https://github.com/ESSolutions/django-mssql-backend/issues/58 # where the following operations should leave indexes intact operations = [ migrations.AlterField( model_name='testindexesretained', name='a', field=models.IntegerField(db_index=True, null=True), ), migrations.RenameField( model_name='testindexesretained', old_name='b', new_name='b_renamed', ), migrations.RenameModel( old_name='TestIndexesRetained', new_name='TestIndexesRetainedRenamed', ), ] mssql-django-1.1.2/testapp/migrations/0014_test_rename_m2mfield_part1.py000066400000000000000000000015671417607077400262060ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0013_test_indexes_retained_part2'), ] operations = [ # Prep test for issue https://github.com/microsoft/mssql-django/issues/86 migrations.CreateModel( name='M2MOtherModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=10)), ], ), migrations.CreateModel( name='TestRenameManyToManyFieldModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('others', models.ManyToManyField(to='testapp.M2MOtherModel')), ], ), ] mssql-django-1.1.2/testapp/migrations/0015_test_rename_m2mfield_part2.py000066400000000000000000000011251417607077400261760ustar00rootroot00000000000000from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testapp', '0014_test_rename_m2mfield_part1'), ] operations = [ # Run test for issue https://github.com/microsoft/mssql-django/issues/86 # Must be in a separate migration so that the unique index was created # (deferred after the previous migration) before we do the rename. migrations.RenameField( model_name='testrenamemanytomanyfieldmodel', old_name='others', new_name='others_renamed', ), ] mssql-django-1.1.2/testapp/migrations/__init__.py000066400000000000000000000000001417607077400220620ustar00rootroot00000000000000mssql-django-1.1.2/testapp/models.py000066400000000000000000000116371417607077400174540ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. import uuid from django.db import models from django.db.models import Q from django.utils import timezone class Author(models.Model): name = models.CharField(max_length=100) class Editor(models.Model): name = models.CharField(max_length=100) class Post(models.Model): title = models.CharField('title', max_length=255) author = models.ForeignKey(Author, models.CASCADE) # Optional secondary author alt_editor = models.ForeignKey(Editor, models.SET_NULL, blank=True, null=True) class Meta: unique_together = ( ('author', 'title', 'alt_editor'), ) def __str__(self): return self.title class Comment(models.Model): post = models.ForeignKey(Post, on_delete=models.CASCADE) text = models.TextField('text') created_at = models.DateTimeField(default=timezone.now) def __str__(self): return self.text class UUIDModel(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) def __str__(self): return self.pk class TestUniqueNullableModel(models.Model): # Issue https://github.com/ESSolutions/django-mssql-backend/issues/38: # This field started off as unique=True *and* null=True so it is implemented with a filtered unique index # Then it is made non-nullable by a subsequent migration, to check this is correctly handled (the index # should be dropped, then a normal unique constraint should be added, now that the column is not nullable) test_field = models.CharField(max_length=100, unique=True) # Issue https://github.com/ESSolutions/django-mssql-backend/issues/45 (case 1) # Field used for testing changing the 'type' of a field that's both unique & nullable x = models.CharField(max_length=11, null=True, unique=True) class TestNullableUniqueTogetherModel(models.Model): class Meta: unique_together = (('a', 'b', 'c'),) # Issue https://github.com/ESSolutions/django-mssql-backend/issues/45 (case 2) # Fields used for testing changing the 'type of a field that is in a `unique_together` a = models.CharField(max_length=51, null=True) b = models.CharField(max_length=50) c = models.CharField(max_length=50) class TestRemoveOneToOneFieldModel(models.Model): # Issue https://github.com/ESSolutions/django-mssql-backend/pull/51 # Fields used for testing removing OneToOne field. Verifies that delete_unique # does not try to remove indexes that have already been removed # b = models.OneToOneField('self', on_delete=models.SET_NULL, null=True) a = models.CharField(max_length=50) class TestIndexesRetainedRenamed(models.Model): # Issue https://github.com/ESSolutions/django-mssql-backend/issues/58 # In all these cases the column index should still exist afterwards # case (a) `a` starts out not nullable, but then is changed to be nullable a = models.IntegerField(db_index=True, null=True) # case (b) column originally called `b` is renamed b_renamed = models.IntegerField(db_index=True) # case (c) this entire model is renamed - this is just a column whose index can be checked afterwards c = models.IntegerField(db_index=True) class M2MOtherModel(models.Model): name = models.CharField(max_length=10) class TestRenameManyToManyFieldModel(models.Model): # Issue https://github.com/microsoft/mssql-django/issues/86 others_renamed = models.ManyToManyField(M2MOtherModel) class Topping(models.Model): name = models.UUIDField(primary_key=True, default=uuid.uuid4) class Pizza(models.Model): name = models.UUIDField(primary_key=True, default=uuid.uuid4) toppings = models.ManyToManyField(Topping) def __str__(self): return "%s (%s)" % ( self.name, ", ".join(topping.name for topping in self.toppings.all()), ) class TestUnsupportableUniqueConstraint(models.Model): class Meta: managed = False constraints = [ models.UniqueConstraint( name='or_constraint', fields=['_type'], condition=(Q(status='in_progress') | Q(status='needs_changes')), ), ] _type = models.CharField(max_length=50) status = models.CharField(max_length=50) class TestSupportableUniqueConstraint(models.Model): class Meta: constraints = [ models.UniqueConstraint( name='and_constraint', fields=['_type'], condition=( Q(status='in_progress') & Q(status='needs_changes') & Q(status='published') ), ), models.UniqueConstraint( name='in_constraint', fields=['_type'], condition=(Q(status__in=['in_progress', 'needs_changes'])), ), ] _type = models.CharField(max_length=50) status = models.CharField(max_length=50) mssql-django-1.1.2/testapp/runners.py000066400000000000000000000025561417607077400176650ustar00rootroot00000000000000from django.test.runner import DiscoverRunner from django.conf import settings import xmlrunner EXCLUDED_TESTS = getattr(settings, 'EXCLUDED_TESTS', []) REGEX_TESTS = getattr(settings, 'REGEX_TESTS', []) ENABLE_REGEX_TESTS = getattr(settings, 'ENABLE_REGEX_TESTS', False) def MarkexpectedFailure(): def decorator(test_item): def wrapper(): raise "Expected Failure" wrapper.__unittest_expecting_failure__ = True return wrapper return decorator class ExcludedTestSuiteRunner(DiscoverRunner): def build_suite(self, *args, **kwargs): suite = super().build_suite(*args, **kwargs) tests = [] for case in suite: test_name = case._testMethodName if ( ENABLE_REGEX_TESTS and case.id() in EXCLUDED_TESTS or not ENABLE_REGEX_TESTS and case.id() in EXCLUDED_TESTS + REGEX_TESTS ): test_method = getattr(case, test_name) setattr(case, test_name, MarkexpectedFailure()(test_method)) tests.append(case) suite._tests = tests return suite def run_suite(self, suite): kwargs = dict(verbosity=1, descriptions=False) with open('./result.xml', 'wb') as xml: return xmlrunner.XMLTestRunner( output=xml, **kwargs).run(suite) mssql-django-1.1.2/testapp/settings.py000066400000000000000000000345661417607077400200370ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. DATABASES = { "default": { "ENGINE": "mssql", "NAME": "default", "USER": "sa", "PASSWORD": "MyPassword42", "HOST": "localhost", "PORT": "1433", "OPTIONS": {"driver": "ODBC Driver 17 for SQL Server", }, }, 'other': { "ENGINE": "mssql", "NAME": "other", "USER": "sa", "PASSWORD": "MyPassword42", "HOST": "localhost", "PORT": "1433", "OPTIONS": {"driver": "ODBC Driver 17 for SQL Server", }, }, } INSTALLED_APPS = ( 'django.contrib.contenttypes', 'django.contrib.staticfiles', 'django.contrib.auth', 'mssql', 'testapp', ) SECRET_KEY = "django_tests_secret_key" PASSWORD_HASHERS = [ 'django.contrib.auth.hashers.PBKDF2PasswordHasher', ] DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' ENABLE_REGEX_TESTS = False USE_TZ = False TEST_RUNNER = "testapp.runners.ExcludedTestSuiteRunner" EXCLUDED_TESTS = [ 'aggregation.tests.AggregateTestCase.test_expression_on_aggregation', 'aggregation_regress.tests.AggregationTests.test_annotated_conditional_aggregate', 'aggregation_regress.tests.AggregationTests.test_annotation_with_value', 'aggregation.tests.AggregateTestCase.test_distinct_on_aggregate', 'annotations.tests.NonAggregateAnnotationTestCase.test_annotate_exists', 'custom_lookups.tests.BilateralTransformTests.test_transform_order_by', 'expressions.tests.BasicExpressionsTests.test_filtering_on_annotate_that_uses_q', 'expressions.tests.BasicExpressionsTests.test_order_by_exists', 'expressions.tests.ExpressionOperatorTests.test_righthand_power', 'expressions.tests.FTimeDeltaTests.test_datetime_subtraction_microseconds', 'expressions.tests.FTimeDeltaTests.test_duration_with_datetime_microseconds', 'expressions.tests.IterableLookupInnerExpressionsTests.test_expressions_in_lookups_join_choice', 'expressions_case.tests.CaseExpressionTests.test_annotate_with_in_clause', 'expressions_window.tests.WindowFunctionTests.test_nth_returns_null', 'expressions_window.tests.WindowFunctionTests.test_nthvalue', 'expressions_window.tests.WindowFunctionTests.test_range_n_preceding_and_following', 'field_deconstruction.tests.FieldDeconstructionTests.test_binary_field', 'ordering.tests.OrderingTests.test_orders_nulls_first_on_filtered_subquery', 'get_or_create.tests.UpdateOrCreateTransactionTests.test_creation_in_transaction', 'indexes.tests.PartialIndexTests.test_multiple_conditions', 'introspection.tests.IntrospectionTests.test_get_constraints', 'migrations.test_executor.ExecutorTests.test_alter_id_type_with_fk', 'migrations.test_operations.OperationTests.test_add_constraint_percent_escaping', 'migrations.test_operations.OperationTests.test_alter_field_pk', 'migrations.test_operations.OperationTests.test_alter_field_reloads_state_on_fk_with_to_field_target_changes', 'migrations.test_operations.OperationTests.test_autofield_foreignfield_growth', 'schema.tests.SchemaTests.test_alter_auto_field_to_char_field', 'schema.tests.SchemaTests.test_alter_auto_field_to_integer_field', 'schema.tests.SchemaTests.test_alter_implicit_id_to_explicit', 'schema.tests.SchemaTests.test_alter_int_pk_to_autofield_pk', 'schema.tests.SchemaTests.test_alter_int_pk_to_bigautofield_pk', 'schema.tests.SchemaTests.test_alter_pk_with_self_referential_field', 'schema.tests.SchemaTests.test_no_db_constraint_added_during_primary_key_change', 'schema.tests.SchemaTests.test_remove_field_check_does_not_remove_meta_constraints', 'schema.tests.SchemaTests.test_remove_field_unique_does_not_remove_meta_constraints', 'schema.tests.SchemaTests.test_text_field_with_db_index', 'schema.tests.SchemaTests.test_unique_together_with_fk', 'schema.tests.SchemaTests.test_unique_together_with_fk_with_existing_index', 'aggregation.tests.AggregateTestCase.test_count_star', 'aggregation_regress.tests.AggregationTests.test_values_list_annotation_args_ordering', 'datatypes.tests.DataTypesTestCase.test_error_on_timezone', 'db_functions.math.test_degrees.DegreesTests.test_integer', 'db_functions.math.test_power.PowerTests.test_integer', 'db_functions.math.test_radians.RadiansTests.test_integer', 'db_functions.text.test_pad.PadTests.test_pad', 'db_functions.text.test_replace.ReplaceTests.test_case_sensitive', 'expressions.tests.ExpressionOperatorTests.test_lefthand_bitwise_right_shift_operator', 'expressions.tests.FTimeDeltaTests.test_invalid_operator', 'fixtures_regress.tests.TestFixtures.test_loaddata_raises_error_when_fixture_has_invalid_foreign_key', 'invalid_models_tests.test_ordinary_fields.TextFieldTests.test_max_length_warning', 'model_indexes.tests.IndexesTests.test_db_tablespace', 'ordering.tests.OrderingTests.test_deprecated_values_annotate', 'queries.test_qs_combinators.QuerySetSetOperationTests.test_limits', 'backends.tests.BackendTestCase.test_unicode_password', 'introspection.tests.IntrospectionTests.test_get_table_description_types', 'migrations.test_commands.MigrateTests.test_migrate_syncdb_app_label', 'migrations.test_commands.MigrateTests.test_migrate_syncdb_deferred_sql_executed_with_schemaeditor', 'migrations.test_operations.OperationTests.test_alter_field_pk_fk', 'schema.tests.SchemaTests.test_add_foreign_key_quoted_db_table', 'schema.tests.SchemaTests.test_unique_and_reverse_m2m', 'schema.tests.SchemaTests.test_unique_no_unnecessary_fk_drops', 'select_for_update.tests.SelectForUpdateTests.test_for_update_after_from', 'backends.tests.LastExecutedQueryTest.test_last_executed_query', 'db_functions.datetime.test_now.NowTests.test_basic', 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_extract_year_exact_lookup', 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_extract_year_greaterthan_lookup', 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_extract_year_lessthan_lookup', 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_extract_year_exact_lookup', 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_extract_year_greaterthan_lookup', 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_extract_year_lessthan_lookup', 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_trunc_ambiguous_and_invalid_times', 'delete.tests.DeletionTests.test_only_referenced_fields_selected', 'queries.test_db_returning.ReturningValuesTests.test_insert_returning', 'queries.test_db_returning.ReturningValuesTests.test_insert_returning_non_integer', 'backends.tests.BackendTestCase.test_queries', 'introspection.tests.IntrospectionTests.test_smallautofield', 'schema.tests.SchemaTests.test_inline_fk', 'aggregation.tests.AggregateTestCase.test_aggregation_subquery_annotation_exists', 'aggregation.tests.AggregateTestCase.test_aggregation_subquery_annotation_values_collision', 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_extract_func_with_timezone', 'db_functions.text.test_md5.MD5Tests.test_basic', 'db_functions.text.test_md5.MD5Tests.test_transform', 'db_functions.text.test_sha1.SHA1Tests.test_basic', 'db_functions.text.test_sha1.SHA1Tests.test_transform', 'db_functions.text.test_sha224.SHA224Tests.test_basic', 'db_functions.text.test_sha224.SHA224Tests.test_transform', 'db_functions.text.test_sha256.SHA256Tests.test_basic', 'db_functions.text.test_sha256.SHA256Tests.test_transform', 'db_functions.text.test_sha384.SHA384Tests.test_basic', 'db_functions.text.test_sha384.SHA384Tests.test_transform', 'db_functions.text.test_sha512.SHA512Tests.test_basic', 'db_functions.text.test_sha512.SHA512Tests.test_transform', 'expressions.tests.BasicExpressionsTests.test_case_in_filter_if_boolean_output_field', 'expressions.tests.BasicExpressionsTests.test_subquery_in_filter', 'expressions.tests.FTimeDeltaTests.test_date_subquery_subtraction', 'expressions.tests.FTimeDeltaTests.test_datetime_subquery_subtraction', 'expressions.tests.FTimeDeltaTests.test_time_subquery_subtraction', 'expressions.tests.BasicExpressionsTests.test_filtering_on_q_that_is_boolean', 'migrations.test_operations.OperationTests.test_alter_field_reloads_state_on_fk_with_to_field_target_type_change', 'migrations.test_operations.OperationTests.test_autofield__bigautofield_foreignfield_growth', 'migrations.test_operations.OperationTests.test_smallfield_autofield_foreignfield_growth', 'migrations.test_operations.OperationTests.test_smallfield_bigautofield_foreignfield_growth', 'schema.tests.SchemaTests.test_alter_auto_field_quoted_db_column', 'schema.tests.SchemaTests.test_alter_autofield_pk_to_bigautofield_pk_sequence_owner', 'schema.tests.SchemaTests.test_alter_autofield_pk_to_smallautofield_pk_sequence_owner', 'schema.tests.SchemaTests.test_alter_primary_key_quoted_db_table', 'schema.tests.SchemaTests.test_alter_smallint_pk_to_smallautofield_pk', 'annotations.tests.NonAggregateAnnotationTestCase.test_combined_expression_annotation_with_aggregation', 'bulk_create.tests.BulkCreateTests.test_bulk_insert_nullable_fields', 'db_functions.comparison.test_cast.CastTests.test_cast_to_integer', 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_extract_func', 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_extract_iso_weekday_func', 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_extract_func', 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_extract_iso_weekday_func', 'datetimes.tests.DateTimesTests.test_datetimes_ambiguous_and_invalid_times', 'expressions.tests.ExpressionOperatorTests.test_lefthand_bitwise_xor', 'expressions.tests.ExpressionOperatorTests.test_lefthand_bitwise_xor_null', 'inspectdb.tests.InspectDBTestCase.test_number_field_types', 'inspectdb.tests.InspectDBTestCase.test_json_field', 'ordering.tests.OrderingTests.test_default_ordering_by_f_expression', 'ordering.tests.OrderingTests.test_order_by_nulls_first', 'ordering.tests.OrderingTests.test_order_by_nulls_last', 'queries.test_qs_combinators.QuerySetSetOperationTests.test_ordering_by_f_expression_and_alias', 'queries.test_db_returning.ReturningValuesTests.test_insert_returning_multiple', 'dbshell.tests.DbshellCommandTestCase.test_command_missing', 'schema.tests.SchemaTests.test_char_field_pk_to_auto_field', 'datetimes.tests.DateTimesTests.test_21432', # JSONFields 'model_fields.test_jsonfield.TestQuerying.test_has_key_list', 'model_fields.test_jsonfield.TestQuerying.test_has_key_null_value', 'model_fields.test_jsonfield.TestQuerying.test_key_quoted_string', 'model_fields.test_jsonfield.TestQuerying.test_lookups_with_key_transform', 'model_fields.test_jsonfield.TestQuerying.test_ordering_grouping_by_count', 'model_fields.test_jsonfield.TestQuerying.test_isnull_key', 'model_fields.test_jsonfield.TestQuerying.test_none_key', 'model_fields.test_jsonfield.TestQuerying.test_none_key_and_exact_lookup', 'model_fields.test_jsonfield.TestQuerying.test_key_escape', 'model_fields.test_jsonfield.TestQuerying.test_ordering_by_transform', 'expressions_window.tests.WindowFunctionTests.test_key_transform', # Django 3.2 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_trunc_func_with_timezone', 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_trunc_timezone_applied_before_truncation', 'expressions.tests.ExistsTests.test_optimizations', 'expressions.tests.FTimeDeltaTests.test_delta_add', 'expressions.tests.FTimeDeltaTests.test_delta_subtract', 'expressions.tests.FTimeDeltaTests.test_delta_update', 'expressions.tests.FTimeDeltaTests.test_exclude', 'expressions.tests.FTimeDeltaTests.test_mixed_comparisons1', 'expressions.tests.FTimeDeltaTests.test_negative_timedelta_update', 'inspectdb.tests.InspectDBTestCase.test_field_types', 'lookup.tests.LookupTests.test_in_ignore_none', 'lookup.tests.LookupTests.test_in_ignore_none_with_unhashable_items', 'queries.test_qs_combinators.QuerySetSetOperationTests.test_exists_union', 'introspection.tests.IntrospectionTests.test_get_constraints_unique_indexes_orders', 'schema.tests.SchemaTests.test_ci_cs_db_collation', 'select_for_update.tests.SelectForUpdateTests.test_unsuported_no_key_raises_error', # Django 4.0 'aggregation.tests.AggregateTestCase.test_aggregation_default_using_date_from_database', 'aggregation.tests.AggregateTestCase.test_aggregation_default_using_datetime_from_database', 'aggregation.tests.AggregateTestCase.test_aggregation_default_using_time_from_database', 'expressions.tests.FTimeDeltaTests.test_durationfield_multiply_divide', 'lookup.tests.LookupQueryingTests.test_alias', 'lookup.tests.LookupQueryingTests.test_filter_exists_lhs', 'lookup.tests.LookupQueryingTests.test_filter_lookup_lhs', 'lookup.tests.LookupQueryingTests.test_filter_subquery_lhs', 'lookup.tests.LookupQueryingTests.test_filter_wrapped_lookup_lhs', 'lookup.tests.LookupQueryingTests.test_lookup_in_order_by', 'lookup.tests.LookupTests.test_lookup_rhs', 'order_with_respect_to.tests.OrderWithRespectToBaseTests.test_previous_and_next_in_order', 'ordering.tests.OrderingTests.test_default_ordering_does_not_affect_group_by', 'queries.test_explain.ExplainUnsupportedTests.test_message', 'aggregation.tests.AggregateTestCase.test_coalesced_empty_result_set', 'aggregation.tests.AggregateTestCase.test_empty_result_optimization', 'queries.tests.Queries6Tests.test_col_alias_quoted', 'backends.tests.BackendTestCase.test_queries_logger', 'migrations.test_operations.OperationTests.test_alter_field_pk_mti_fk', 'migrations.test_operations.OperationTests.test_run_sql_add_missing_semicolon_on_collect_sql', ] REGEX_TESTS = [ 'lookup.tests.LookupTests.test_regex', 'lookup.tests.LookupTests.test_regex_backreferencing', 'lookup.tests.LookupTests.test_regex_non_ascii', 'lookup.tests.LookupTests.test_regex_non_string', 'lookup.tests.LookupTests.test_regex_null', 'model_fields.test_jsonfield.TestQuerying.test_key_iregex', 'model_fields.test_jsonfield.TestQuerying.test_key_regex', ] mssql-django-1.1.2/testapp/tests/000077500000000000000000000000001417607077400167515ustar00rootroot00000000000000mssql-django-1.1.2/testapp/tests/__init__.py000066400000000000000000000000001417607077400210500ustar00rootroot00000000000000mssql-django-1.1.2/testapp/tests/test_constraints.py000066400000000000000000000135661417607077400227440ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. from django.db import connections, migrations, models from django.db.migrations.state import ProjectState from django.db.utils import IntegrityError from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature from mssql.base import DatabaseWrapper from ..models import ( Author, Editor, M2MOtherModel, Post, TestUniqueNullableModel, TestNullableUniqueTogetherModel, TestRenameManyToManyFieldModel, ) @skipUnlessDBFeature('supports_nullable_unique_constraints') class TestNullableUniqueColumn(TestCase): def test_multiple_nulls(self): # Issue https://github.com/ESSolutions/django-mssql-backend/issues/45 (case 1) # After field `x` has had its type changed, the filtered UNIQUE INDEX which is # implementing the nullable unique constraint should still be correctly in place # i.e. allowing multiple NULLs but still enforcing uniqueness of non-NULLs # Allowed TestUniqueNullableModel.objects.create(x=None, test_field='randomness') TestUniqueNullableModel.objects.create(x=None, test_field='doesntmatter') # Disallowed TestUniqueNullableModel.objects.create(x="foo", test_field='irrelevant') with self.assertRaises(IntegrityError): TestUniqueNullableModel.objects.create(x="foo", test_field='nonsense') @skipUnlessDBFeature('supports_partially_nullable_unique_constraints') class TestPartiallyNullableUniqueTogether(TestCase): def test_partially_nullable(self): # Check basic behaviour of `unique_together` where at least 1 of the columns is nullable # It should be possible to have 2 rows both with NULL `alt_editor` author = Author.objects.create(name="author") Post.objects.create(title="foo", author=author) Post.objects.create(title="foo", author=author) # But `unique_together` is still enforced for non-NULL values editor = Editor.objects.create(name="editor") Post.objects.create(title="foo", author=author, alt_editor=editor) with self.assertRaises(IntegrityError): Post.objects.create(title="foo", author=author, alt_editor=editor) def test_after_type_change(self): # Issue https://github.com/ESSolutions/django-mssql-backend/issues/45 (case 2) # After one of the fields in the `unique_together` has had its type changed # in a migration, the constraint should still be correctly enforced # Multiple rows with a=NULL are considered different TestNullableUniqueTogetherModel.objects.create(a=None, b='bbb', c='ccc') TestNullableUniqueTogetherModel.objects.create(a=None, b='bbb', c='ccc') # Uniqueness still enforced for non-NULL values TestNullableUniqueTogetherModel.objects.create(a='aaa', b='bbb', c='ccc') with self.assertRaises(IntegrityError): TestNullableUniqueTogetherModel.objects.create(a='aaa', b='bbb', c='ccc') class TestRenameManyToManyField(TestCase): def test_uniqueness_still_enforced_afterwards(self): # Issue https://github.com/microsoft/mssql-django/issues/86 # Prep thing1 = TestRenameManyToManyFieldModel.objects.create() other1 = M2MOtherModel.objects.create(name='1') other2 = M2MOtherModel.objects.create(name='2') thing1.others_renamed.set([other1, other2]) # Check that the unique_together on the through table is still enforced ThroughModel = TestRenameManyToManyFieldModel.others_renamed.through with self.assertRaises(IntegrityError, msg='Through model fails to enforce uniqueness after m2m rename'): # This should fail due to the unique_together because (thing1, other1) is already in the through table ThroughModel.objects.create(testrenamemanytomanyfieldmodel=thing1, m2mothermodel=other1) class TestUniqueConstraints(TransactionTestCase): def test_unsupportable_unique_constraint(self): # Only execute tests when running against SQL Server connection = connections['default'] if isinstance(connection, DatabaseWrapper): class TestMigration(migrations.Migration): initial = True operations = [ migrations.CreateModel( name='TestUnsupportableUniqueConstraint', fields=[ ( 'id', models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name='ID', ), ), ('_type', models.CharField(max_length=50)), ('status', models.CharField(max_length=50)), ], ), migrations.AddConstraint( model_name='testunsupportableuniqueconstraint', constraint=models.UniqueConstraint( condition=models.Q( ('status', 'in_progress'), ('status', 'needs_changes'), _connector='OR', ), fields=('_type',), name='or_constraint', ), ), ] migration = TestMigration('testapp', 'test_unsupportable_unique_constraint') with connection.schema_editor(atomic=True) as editor: with self.assertRaisesRegex( NotImplementedError, "does not support OR conditions" ): return migration.apply(ProjectState(), editor) mssql-django-1.1.2/testapp/tests/test_expressions.py000066400000000000000000000061011417607077400227420ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. from unittest import skipUnless from django import VERSION from django.db.models import IntegerField, F from django.db.models.expressions import Case, Exists, OuterRef, Subquery, Value, When from django.test import TestCase, skipUnlessDBFeature from ..models import Author, Comment, Post, Editor DJANGO3 = VERSION[0] >= 3 class TestSubquery(TestCase): def setUp(self): self.author = Author.objects.create(name="author") self.post = Post.objects.create(title="foo", author=self.author) def test_with_count(self): newest = Comment.objects.filter(post=OuterRef('pk')).order_by('-created_at') Post.objects.annotate( post_exists=Subquery(newest.values('text')[:1]) ).filter(post_exists=True).count() class TestExists(TestCase): def setUp(self): self.author = Author.objects.create(name="author") self.post = Post.objects.create(title="foo", author=self.author) def test_with_count(self): Post.objects.annotate( post_exists=Exists(Post.objects.all()) ).filter(post_exists=True).count() @skipUnless(DJANGO3, "Django 3 specific tests") def test_with_case_when(self): author = Author.objects.annotate( has_post=Case( When(Exists(Post.objects.filter(author=OuterRef('pk')).values('pk')), then=Value(1)), default=Value(0), output_field=IntegerField(), ) ).get() self.assertEqual(author.has_post, 1) @skipUnless(DJANGO3, "Django 3 specific tests") def test_order_by_exists(self): author_without_posts = Author.objects.create(name="other author") authors_by_posts = Author.objects.order_by(Exists(Post.objects.filter(author=OuterRef('pk'))).desc()) self.assertSequenceEqual(authors_by_posts, [self.author, author_without_posts]) authors_by_posts = Author.objects.order_by(Exists(Post.objects.filter(author=OuterRef('pk'))).asc()) self.assertSequenceEqual(authors_by_posts, [author_without_posts, self.author]) @skipUnless(DJANGO3, "Django 3 specific tests") @skipUnlessDBFeature("order_by_nulls_first") class TestOrderBy(TestCase): def setUp(self): self.author = Author.objects.create(name="author") self.post = Post.objects.create(title="foo", author=self.author) self.editor = Editor.objects.create(name="editor") self.post_alt = Post.objects.create(title="Post with editor", author=self.author, alt_editor=self.editor) def test_order_by_nulls_last(self): results = Post.objects.order_by(F("alt_editor").asc(nulls_last=True)).all() self.assertEqual(len(results), 2) self.assertIsNotNone(results[0].alt_editor) self.assertIsNone(results[1].alt_editor) def test_order_by_nulls_first(self): results = Post.objects.order_by(F("alt_editor").desc(nulls_first=True)).all() self.assertEqual(len(results), 2) self.assertIsNone(results[0].alt_editor) self.assertIsNotNone(results[1].alt_editor) mssql-django-1.1.2/testapp/tests/test_fields.py000066400000000000000000000003521417607077400216300ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. from django.test import TestCase from ..models import UUIDModel class TestUUIDField(TestCase): def test_create(self): UUIDModel.objects.create() mssql-django-1.1.2/testapp/tests/test_indexes.py000066400000000000000000000033111417607077400220170ustar00rootroot00000000000000import django.db from django.test import TestCase from ..models import ( TestIndexesRetainedRenamed ) class TestIndexesRetained(TestCase): """ Issue https://github.com/ESSolutions/django-mssql-backend/issues/58 Indexes dropped during a migration should be re-created afterwards assuming the field still has `db_index=True` """ @classmethod def setUpClass(cls): super().setUpClass() # Pre-fetch which indexes exist for the relevant test model # now that all the test migrations have run connection = django.db.connections[django.db.DEFAULT_DB_ALIAS] cls.constraints = connection.introspection.get_constraints( connection.cursor(), table_name=TestIndexesRetainedRenamed._meta.db_table ) cls.indexes = {k: v for k, v in cls.constraints.items() if v['index'] is True} def _assert_index_exists(self, columns): matching = {k: v for k, v in self.indexes.items() if set(v['columns']) == columns} assert len(matching) == 1, ( "Expected 1 index for columns %s but found %d %s" % ( columns, len(matching), ', '.join(matching.keys()) ) ) def test_field_made_nullable(self): # case (a) of https://github.com/ESSolutions/django-mssql-backend/issues/58 self._assert_index_exists({'a'}) def test_field_renamed(self): # case (b) of https://github.com/ESSolutions/django-mssql-backend/issues/58 self._assert_index_exists({'b_renamed'}) def test_table_renamed(self): # case (c) of https://github.com/ESSolutions/django-mssql-backend/issues/58 self._assert_index_exists({'c'}) mssql-django-1.1.2/testapp/tests/test_lookups.py000066400000000000000000000007441417607077400220630ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation. # Licensed under the BSD license. from django.test import TestCase from ..models import Pizza, Topping class TestLookups(TestCase): def test_large_number_of_params_UUID(self): iterations = 3000 for _ in range(iterations): Pizza.objects.create() Topping.objects.create() prefetch_result = Pizza.objects.prefetch_related('toppings') self.assertEqual(len(prefetch_result), iterations) mssql-django-1.1.2/tox.ini000066400000000000000000000010731417607077400154430ustar00rootroot00000000000000[tox] envlist = {py36,py37}-django22, {py36,py37,py38,py39}-django30, {py36,py37,py38,py39}-django31, {py36,py37,py38,py39}-django32, {py38, py39, py310}-django40 [testenv] allowlist_externals = /bin/bash /usr/bin/bash C:\Program Files\Git\bin\bash.EXE commands = python manage.py test --noinput bash test.sh deps = coverage==5.5 unittest-xml-reporting django22: django==2.2.* django30: django>=3.0,<3.1 django31: django>=3.1,<3.2 django32: django==3.2.* django40: django>=4.0a1,<4.1