pgspecial-1.9.0/0000755000076500000240000000000013174677543013736 5ustar irinastaff00000000000000pgspecial-1.9.0/License.txt0000644000076500000240000000270012732064265016046 0ustar irinastaff00000000000000Copyright (c) 2015, dbcli All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of pgspecial nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. pgspecial-1.9.0/MANIFEST.in0000644000076500000240000000005613120261217015447 0ustar irinastaff00000000000000include License.txt recursive-include tests * pgspecial-1.9.0/pgspecial/0000755000076500000240000000000013174677543015705 5ustar irinastaff00000000000000pgspecial-1.9.0/pgspecial/__init__.py0000644000076500000240000000041413174675710020007 0ustar irinastaff00000000000000__all__ = [] __version__ = '1.9.0' def export(defn): """Decorator to explicitly mark functions that are exposed in a lib.""" globals()[defn.__name__] = defn __all__.append(defn.__name__) return defn from . import dbcommands from . import iocommands pgspecial-1.9.0/pgspecial/dbcommands.py0000644000076500000240000014362013174656601020364 0ustar irinastaff00000000000000import logging from collections import namedtuple from .main import special_command, RAW_QUERY TableInfo = namedtuple("TableInfo", ['checks', 'relkind', 'hasindex', 'hasrules', 'hastriggers', 'hasoids', 'tablespace', 'reloptions', 'reloftype', 'relpersistence']) log = logging.getLogger(__name__) @special_command('\\l', '\\l[+] [pattern]', 'List databases.', aliases=('\\list',)) def list_databases(cur, pattern, verbose): query = '''SELECT d.datname as "Name", pg_catalog.pg_get_userbyid(d.datdba) as "Owner", pg_catalog.pg_encoding_to_char(d.encoding) as "Encoding", d.datcollate as "Collate", d.datctype as "Ctype", pg_catalog.array_to_string(d.datacl, E'\n') AS "Access privileges"''' if verbose: query += ''', CASE WHEN pg_catalog.has_database_privilege(d.datname, 'CONNECT') THEN pg_catalog.pg_size_pretty(pg_catalog.pg_database_size(d.datname)) ELSE 'No Access' END as "Size", t.spcname as "Tablespace", pg_catalog.shobj_description(d.oid, 'pg_database') as "Description"''' query += ''' FROM pg_catalog.pg_database d ''' if verbose: query += ''' JOIN pg_catalog.pg_tablespace t on d.dattablespace = t.oid ''' params = [] if pattern: query += ''' WHERE d.datname ~ %s ''' _, schema = sql_name_pattern(pattern) params.append(schema) query = cur.mogrify(query + ' ORDER BY 1', params) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, cur.statusmessage)] else: return [(None, None, None, cur.statusmessage)] @special_command('\\du', '\\du[+] [pattern]', 'List roles.') def list_roles(cur, pattern, verbose): """ Returns (title, rows, headers, status) """ if cur.connection.server_version > 90000: sql = ''' SELECT r.rolname, r.rolsuper, r.rolinherit, r.rolcreaterole, r.rolcreatedb, r.rolcanlogin, r.rolconnlimit, r.rolvaliduntil, ARRAY(SELECT b.rolname FROM pg_catalog.pg_auth_members m JOIN pg_catalog.pg_roles b ON (m.roleid = b.oid) WHERE m.member = r.oid) as memberof, ''' if verbose: sql += ''' pg_catalog.shobj_description(r.oid, 'pg_authid') AS description, ''' sql += ''' r.rolreplication FROM pg_catalog.pg_roles r ''' else: sql = ''' SELECT u.usename AS rolname, u.usesuper AS rolsuper, true AS rolinherit, false AS rolcreaterole, u.usecreatedb AS rolcreatedb, true AS rolcanlogin, -1 AS rolconnlimit, u.valuntil as rolvaliduntil, ARRAY(SELECT g.groname FROM pg_catalog.pg_group g WHERE u.usesysid = ANY(g.grolist)) as memberof FROM pg_catalog.pg_user u ''' params = [] if pattern: _, schema = sql_name_pattern(pattern) sql += 'WHERE r.rolname ~ %s' params.append(schema) sql = cur.mogrify(sql + " ORDER BY 1", params) log.debug(sql) cur.execute(sql) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, cur.statusmessage)] @special_command('\\db', '\\db[+] [pattern]', 'List tablespaces.') def list_tablespaces(cur, pattern, **_): """ Returns (title, rows, headers, status) """ cur.execute("SELECT EXISTS(SELECT * FROM pg_proc WHERE proname = 'pg_tablespace_location')") (is_location,) = cur.fetchone() sql = '''SELECT n.spcname AS "Name", pg_catalog.pg_get_userbyid(n.spcowner) AS "Owner",''' sql += " pg_catalog.pg_tablespace_location(n.oid)" if is_location else " 'Not supported'" sql += ''' AS "Location" FROM pg_catalog.pg_tablespace n''' params = [] if pattern: _, tbsp = sql_name_pattern(pattern) sql += " WHERE n.spcname ~ %s" params.append(tbsp) sql = cur.mogrify(sql + " ORDER BY 1", params) log.debug(sql) cur.execute(sql) headers = [x[0] for x in cur.description] if cur.description else None return [(None, cur, headers, cur.statusmessage)] @special_command('\\dn', '\\dn[+] [pattern]', 'List schemas.') def list_schemas(cur, pattern, verbose): """ Returns (title, rows, headers, status) """ sql = '''SELECT n.nspname AS "Name", pg_catalog.pg_get_userbyid(n.nspowner) AS "Owner"''' + (''', pg_catalog.array_to_string(n.nspacl, E'\\n') AS "Access privileges", pg_catalog.obj_description(n.oid, 'pg_namespace') AS "Description"''' if verbose else '') + """ FROM pg_catalog.pg_namespace n WHERE n.nspname """ params = [] if pattern: _, schema = sql_name_pattern(pattern) sql += '~ %s' params.append(schema) else: sql += "!~ '^pg_' AND n.nspname <> 'information_schema'" sql = cur.mogrify(sql + " ORDER BY 1", params) log.debug(sql) cur.execute(sql) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, cur.statusmessage)] @special_command('\\dx', '\\dx[+] [pattern]', 'List extensions.') def list_extensions(cur, pattern, verbose): # Note: psql \dx command seems to ignore schema patterns _, name_pattern = sql_name_pattern(pattern) if verbose: extensions = _find_extensions(cur, name_pattern) if not extensions: msg = 'Did not find any extension named "%s"' % pattern return [(None, cur, [], msg)] results = [] for ext_name, oid in extensions: title = 'Objects in extension "%s"' % ext_name cur, headers, status = _describe_extension(cur, oid) results.append((title, cur, headers, status)) return results sql = ''' SELECT e.extname AS "Name", e.extversion AS "Version", n.nspname AS "Schema", c.description AS "Description" FROM pg_catalog.pg_extension e LEFT JOIN pg_catalog.pg_namespace n ON n.oid = e.extnamespace LEFT JOIN pg_catalog.pg_description c ON c.objoid = e.oid AND c.classoid = 'pg_catalog.pg_extension'::pg_catalog.regclass ''' if name_pattern: sql = cur.mogrify(sql + ' WHERE e.extname ~ %s', [name_pattern]) sql += ' ORDER BY 1' log.debug(sql) cur.execute(sql) headers = [x[0] for x in cur.description] return [(None, cur, headers, cur.statusmessage)] def _find_extensions(cur, pattern): sql = 'SELECT e.extname, e.oid FROM pg_catalog.pg_extension e' if pattern: sql = cur.mogrify(sql + ' WHERE e.extname ~ %s', [pattern]) sql += ' ORDER BY 1' log.debug(sql) cur.execute(sql) return cur.fetchall() def _describe_extension(cur, oid): sql = ''' SELECT pg_catalog.pg_describe_object(classid, objid, 0) AS "Object Description" FROM pg_catalog.pg_depend WHERE refclassid = 'pg_catalog.pg_extension'::pg_catalog.regclass AND refobjid = %s AND deptype = 'e' ORDER BY 1''' sql = cur.mogrify(sql, [oid]) log.debug(sql) cur.execute(sql) headers = [x[0] for x in cur.description] return cur, headers, cur.statusmessage def list_objects(cur, pattern, verbose, relkinds): """ Returns (title, rows, header, status) This method is used by list_tables, list_views, list_materialized views and list_indexes relkinds is a list of strings to filter pg_class.relkind """ schema_pattern, table_pattern = sql_name_pattern(pattern) if verbose: verbose_columns = ''' ,pg_catalog.pg_size_pretty(pg_catalog.pg_table_size(c.oid)) as "Size", pg_catalog.obj_description(c.oid, 'pg_class') as "Description" ''' else: verbose_columns = '' sql = '''SELECT n.nspname as "Schema", c.relname as "Name", CASE c.relkind WHEN 'r' THEN 'table' WHEN 'v' THEN 'view' WHEN 'm' THEN 'materialized view' WHEN 'i' THEN 'index' WHEN 'S' THEN 'sequence' WHEN 's' THEN 'special' WHEN 'f' THEN 'foreign table' END as "Type", pg_catalog.pg_get_userbyid(c.relowner) as "Owner" ''' + verbose_columns + ''' FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE c.relkind = ANY(%s) ''' params = [relkinds] if schema_pattern: sql += ' AND n.nspname ~ %s' params.append(schema_pattern) else: sql += ''' AND n.nspname <> 'pg_catalog' AND n.nspname <> 'information_schema' AND n.nspname !~ '^pg_toast' AND pg_catalog.pg_table_is_visible(c.oid) ''' if table_pattern: sql += ' AND c.relname ~ %s' params.append(table_pattern) sql = cur.mogrify(sql + ' ORDER BY 1, 2', params) log.debug(sql) cur.execute(sql) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, cur.statusmessage)] @special_command('\\dt', '\\dt[+] [pattern]', 'List tables.') def list_tables(cur, pattern, verbose): return list_objects(cur, pattern, verbose, ['r', '']) @special_command('\\dv', '\\dv[+] [pattern]', 'List views.') def list_views(cur, pattern, verbose): return list_objects(cur, pattern, verbose, ['v', 's', '']) @special_command('\\dm', '\\dm[+] [pattern]', 'List materialized views.') def list_materialized_views(cur, pattern, verbose): return list_objects(cur, pattern, verbose, ['m', 's', '']) @special_command('\\ds', '\\ds[+] [pattern]', 'List sequences.') def list_sequences(cur, pattern, verbose): return list_objects(cur, pattern, verbose, ['S', 's', '']) @special_command('\\di', '\\di[+] [pattern]', 'List indexes.') def list_indexes(cur, pattern, verbose): return list_objects(cur, pattern, verbose, ['i', 's', '']) @special_command('\\df', '\\df[+] [pattern]', 'List functions.') def list_functions(cur, pattern, verbose): if verbose: verbose_columns = ''' ,CASE WHEN p.provolatile = 'i' THEN 'immutable' WHEN p.provolatile = 's' THEN 'stable' WHEN p.provolatile = 'v' THEN 'volatile' END as "Volatility", pg_catalog.pg_get_userbyid(p.proowner) as "Owner", l.lanname as "Language", p.prosrc as "Source code", pg_catalog.obj_description(p.oid, 'pg_proc') as "Description" ''' verbose_table = ''' LEFT JOIN pg_catalog.pg_language l ON l.oid = p.prolang''' else: verbose_columns = verbose_table = '' if cur.connection.server_version > 90000: sql = ''' SELECT n.nspname as "Schema", p.proname as "Name", pg_catalog.pg_get_function_result(p.oid) as "Result data type", pg_catalog.pg_get_function_arguments(p.oid) as "Argument data types", CASE WHEN p.proisagg THEN 'agg' WHEN p.proiswindow THEN 'window' WHEN p.prorettype = 'pg_catalog.trigger'::pg_catalog.regtype THEN 'trigger' ELSE 'normal' END as "Type" ''' + verbose_columns + ''' FROM pg_catalog.pg_proc p LEFT JOIN pg_catalog.pg_namespace n ON n.oid = p.pronamespace ''' + verbose_table + ''' WHERE ''' else: sql = ''' SELECT n.nspname as "Schema", p.proname as "Name", pg_catalog.format_type(p.prorettype, NULL) as "Result data type", pg_catalog.oidvectortypes(p.proargtypes) as "Argument data types", CASE WHEN p.proisagg THEN 'agg' WHEN p.prorettype = 'pg_catalog.trigger'::pg_catalog.regtype THEN 'trigger' ELSE 'normal' END as "Type" ''' + verbose_columns + ''' FROM pg_catalog.pg_proc p LEFT JOIN pg_catalog.pg_namespace n ON n.oid = p.pronamespace ''' + verbose_table + ''' WHERE ''' schema_pattern, func_pattern = sql_name_pattern(pattern) params = [] if schema_pattern: sql += ' n.nspname ~ %s ' params.append(schema_pattern) else: sql += ' pg_catalog.pg_function_is_visible(p.oid) ' if func_pattern: sql += ' AND p.proname ~ %s ' params.append(func_pattern) if not (schema_pattern or func_pattern): sql += ''' AND n.nspname <> 'pg_catalog' AND n.nspname <> 'information_schema' ''' sql = cur.mogrify(sql + ' ORDER BY 1, 2, 4', params) log.debug(sql) cur.execute(sql) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, cur.statusmessage)] @special_command('\\dT', '\\dT[S+] [pattern]', 'List data types') def list_datatypes(cur, pattern, verbose): assert True sql = '''SELECT n.nspname as "Schema", pg_catalog.format_type(t.oid, NULL) AS "Name", ''' if verbose: sql += r''' t.typname AS "Internal name", CASE WHEN t.typrelid != 0 THEN CAST('tuple' AS pg_catalog.text) WHEN t.typlen < 0 THEN CAST('var' AS pg_catalog.text) ELSE CAST(t.typlen AS pg_catalog.text) END AS "Size", pg_catalog.array_to_string( ARRAY( SELECT e.enumlabel FROM pg_catalog.pg_enum e WHERE e.enumtypid = t.oid ORDER BY e.enumsortorder ), E'\n') AS "Elements", pg_catalog.array_to_string(t.typacl, E'\n') AS "Access privileges", pg_catalog.obj_description(t.oid, 'pg_type') AS "Description"''' else: sql += ''' pg_catalog.obj_description(t.oid, 'pg_type') as "Description" ''' if cur.connection.server_version > 90000: sql += ''' FROM pg_catalog.pg_type t LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace WHERE (t.typrelid = 0 OR ( SELECT c.relkind = 'c' FROM pg_catalog.pg_class c WHERE c.oid = t.typrelid)) AND NOT EXISTS( SELECT 1 FROM pg_catalog.pg_type el WHERE el.oid = t.typelem AND el.typarray = t.oid) ''' else: sql += ''' FROM pg_catalog.pg_type t LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace WHERE (t.typrelid = 0 OR ( SELECT c.relkind = 'c' FROM pg_catalog.pg_class c WHERE c.oid = t.typrelid)) ''' schema_pattern, type_pattern = sql_name_pattern(pattern) params = [] if schema_pattern: sql += ' AND n.nspname ~ %s ' params.append(schema_pattern) else: sql += ' AND pg_catalog.pg_type_is_visible(t.oid) ' if type_pattern: sql += ''' AND (t.typname ~ %s OR pg_catalog.format_type(t.oid, NULL) ~ %s) ''' params.extend(2 * [type_pattern]) if not (schema_pattern or type_pattern): sql += ''' AND n.nspname <> 'pg_catalog' AND n.nspname <> 'information_schema' ''' sql = cur.mogrify(sql + ' ORDER BY 1, 2', params) log.debug(sql) cur.execute(sql) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, cur.statusmessage)] @special_command('describe', 'DESCRIBE [pattern]', '', hidden=True, case_sensitive=False) @special_command('\\d', '\\d[+] [pattern]', 'List or describe tables, views and sequences.') def describe_table_details(cur, pattern, verbose): """ Returns (title, rows, headers, status) """ # This is a simple \d[+] command. No table name to follow. if not pattern: return list_objects(cur, pattern, verbose, ['r', 'v', 'm', 'S', 'f', '']) # This is a \d command. A royal pain in the ass. schema, relname = sql_name_pattern(pattern) where = [] params = [] if not pattern: where.append('pg_catalog.pg_table_is_visible(c.oid)') if schema: where.append('n.nspname ~ %s') params.append(schema) if relname: where.append('c.relname ~ %s') params.append(relname) sql = """SELECT c.oid, n.nspname, c.relname FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace """ + ('WHERE ' + ' AND '.join(where) if where else '') + """ ORDER BY 2,3""" sql = cur.mogrify(sql, params) # Execute the sql, get the results and call describe_one_table_details on each table. log.debug(sql) cur.execute(sql) if not (cur.rowcount > 0): return [(None, None, None, 'Did not find any relation named %s.' % pattern)] results = [] for oid, nspname, relname in cur.fetchall(): results.append(describe_one_table_details(cur, nspname, relname, oid, verbose)) return results def describe_one_table_details(cur, schema_name, relation_name, oid, verbose): if verbose: suffix = """pg_catalog.array_to_string(c.reloptions || array(select 'toast.' || x from pg_catalog.unnest(tc.reloptions) x), ', ')""" else: suffix = "''" if cur.connection.server_version > 90000: sql = """SELECT c.relchecks, c.relkind, c.relhasindex, c.relhasrules, c.relhastriggers, c.relhasoids, %s, c.reltablespace, CASE WHEN c.reloftype = 0 THEN '' ELSE c.reloftype::pg_catalog.regtype::pg_catalog.text END, c.relpersistence FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_class tc ON (c.reltoastrelid = tc.oid) WHERE c.oid = '%s'""" % (suffix, oid) elif cur.connection.server_version >= 80400: sql = """SELECT c.relchecks, c.relkind, c.relhasindex, c.relhasrules, c.relhastriggers, c.relhasoids, %s, c.reltablespace, 0 AS reloftype, 'p' AS relpersistence FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_class tc ON (c.reltoastrelid = tc.oid) WHERE c.oid = '%s'""" % (suffix, oid) else: sql = """SELECT c.relchecks, c.relkind, c.relhasindex, c.relhasrules, c.reltriggers > 0 AS relhastriggers, c.relhasoids, %s, c.reltablespace, 0 AS reloftype, 'p' AS relpersistence FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_class tc ON (c.reltoastrelid = tc.oid) WHERE c.oid = '%s'""" % (suffix, oid) # Create a namedtuple called tableinfo and match what's in describe.c log.debug(sql) cur.execute(sql) if (cur.rowcount > 0): tableinfo = TableInfo._make(cur.fetchone()) else: return (None, None, None, 'Did not find any relation with OID %s.' % oid) # If it's a seq, fetch it's value and store it for later. if tableinfo.relkind == 'S': # Do stuff here. sql = '''SELECT * FROM "%s"."%s"''' % (schema_name, relation_name) log.debug(sql) cur.execute(sql) if not (cur.rowcount > 0): return (None, None, None, 'Something went wrong.') seq_values = cur.fetchone() # Get column info if cur.connection.server_version > 90000: sql = """SELECT a.attname, pg_catalog.format_type(a.atttypid, a.atttypmod), ( SELECT substring(pg_catalog.pg_get_expr(d.adbin, d.adrelid) for 128) FROM pg_catalog.pg_attrdef d WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum AND a.atthasdef ), a.attnotnull, a.attnum, ( SELECT c.collname FROM pg_catalog.pg_collation c, pg_catalog.pg_type t WHERE c.oid = a.attcollation AND t.oid = a.atttypid AND a.attcollation <> t.typcollation ) AS attcollation """ else: sql = """SELECT a.attname, pg_catalog.format_type(a.atttypid, a.atttypmod), ( SELECT substring(pg_catalog.pg_get_expr(d.adbin, d.adrelid) for 128) FROM pg_catalog.pg_attrdef d WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum AND a.atthasdef ), a.attnotnull, a.attnum, NULL AS attcollation """ if tableinfo.relkind == 'i': sql += """, pg_catalog.pg_get_indexdef(a.attrelid, a.attnum, TRUE) AS indexdef""" else: sql += """, NULL AS indexdef""" if tableinfo.relkind == 'f': sql += """, CASE WHEN attfdwoptions IS NULL THEN '' ELSE '(' || array_to_string(ARRAY(SELECT quote_ident(option_name) || ' ' || quote_literal(option_value) FROM pg_options_to_table(attfdwoptions)), ', ') || ')' END AS attfdwoptions""" else: sql += """, NULL AS attfdwoptions""" if verbose: sql += """, a.attstorage""" sql += """, CASE WHEN a.attstattarget=-1 THEN NULL ELSE a.attstattarget END AS attstattarget""" if (tableinfo.relkind == 'r' or tableinfo.relkind == 'v' or tableinfo.relkind == 'm' or tableinfo.relkind == 'f' or tableinfo.relkind == 'c'): sql += """, pg_catalog.col_description(a.attrelid, a.attnum)""" sql += """ FROM pg_catalog.pg_attribute a WHERE a.attrelid = '%s' AND a.attnum > 0 AND NOT a.attisdropped ORDER BY a.attnum; """ % oid log.debug(sql) cur.execute(sql) res = cur.fetchall() # Set the column names. headers = ['Column', 'Type'] show_modifiers = False if (tableinfo.relkind == 'r' or tableinfo.relkind == 'v' or tableinfo.relkind == 'm' or tableinfo.relkind == 'f' or tableinfo.relkind == 'c'): headers.append('Modifiers') show_modifiers = True if (tableinfo.relkind == 'S'): headers.append("Value") if (tableinfo.relkind == 'i'): headers.append("Definition") if (tableinfo.relkind == 'f'): headers.append("FDW Options") if (verbose): headers.append("Storage") if (tableinfo.relkind == 'r' or tableinfo.relkind == 'm' or tableinfo.relkind == 'f'): headers.append("Stats target") # Column comments, if the relkind supports this feature. */ if (tableinfo.relkind == 'r' or tableinfo.relkind == 'v' or tableinfo.relkind == 'm' or tableinfo.relkind == 'c' or tableinfo.relkind == 'f'): headers.append("Description") view_def = '' # /* Check if table is a view or materialized view */ if ((tableinfo.relkind == 'v' or tableinfo.relkind == 'm') and verbose): sql = """SELECT pg_catalog.pg_get_viewdef('%s'::pg_catalog.oid, true)""" % oid log.debug(sql) cur.execute(sql) if cur.rowcount > 0: view_def = cur.fetchone() # Prepare the cells of the table to print. cells = [] for i, row in enumerate(res): cell = [] cell.append(row[0]) # Column cell.append(row[1]) # Type if show_modifiers: modifier = '' if row[5]: modifier += ' collate %s' % row[5] if row[3]: modifier += ' not null' if row[2]: modifier += ' default %s' % row[2] cell.append(modifier) # Sequence if tableinfo.relkind == 'S': cell.append(seq_values[i]) # Index column if TableInfo.relkind == 'i': cell.append(row[6]) # /* FDW options for foreign table column, only for 9.2 or later */ if tableinfo.relkind == 'f': cell.append(row[7]) if verbose: storage = row[8] if storage[0] == 'p': cell.append('plain') elif storage[0] == 'm': cell.append('main') elif storage[0] == 'x': cell.append('extended') elif storage[0] == 'e': cell.append('external') else: cell.append('???') if (tableinfo.relkind == 'r' or tableinfo.relkind == 'm' or tableinfo.relkind == 'f'): cell.append(row[9]) # /* Column comments, if the relkind supports this feature. */ if (tableinfo.relkind == 'r' or tableinfo.relkind == 'v' or tableinfo.relkind == 'm' or tableinfo.relkind == 'c' or tableinfo.relkind == 'f'): cell.append(row[10]) cells.append(cell) # Make Footers status = [] if (tableinfo.relkind == 'i'): # /* Footer information about an index */ if cur.connection.server_version > 90000: sql = """SELECT i.indisunique, i.indisprimary, i.indisclustered, i.indisvalid, (NOT i.indimmediate) AND EXISTS ( SELECT 1 FROM pg_catalog.pg_constraint WHERE conrelid = i.indrelid AND conindid = i.indexrelid AND contype IN ('p','u','x') AND condeferrable ) AS condeferrable, (NOT i.indimmediate) AND EXISTS ( SELECT 1 FROM pg_catalog.pg_constraint WHERE conrelid = i.indrelid AND conindid = i.indexrelid AND contype IN ('p','u','x') AND condeferred ) AS condeferred, a.amname, c2.relname, pg_catalog.pg_get_expr(i.indpred, i.indrelid, true) FROM pg_catalog.pg_index i, pg_catalog.pg_class c, pg_catalog.pg_class c2, pg_catalog.pg_am a WHERE i.indexrelid = c.oid AND c.oid = '%s' AND c.relam = a.oid AND i.indrelid = c2.oid; """ % oid else: sql = """SELECT i.indisunique, i.indisprimary, i.indisclustered, 't' AS indisvalid, 'f' AS condeferrable, 'f' AS condeferred, a.amname, c2.relname, pg_catalog.pg_get_expr(i.indpred, i.indrelid, true) FROM pg_catalog.pg_index i, pg_catalog.pg_class c, pg_catalog.pg_class c2, pg_catalog.pg_am a WHERE i.indexrelid = c.oid AND c.oid = '%s' AND c.relam = a.oid AND i.indrelid = c2.oid; """ % oid log.debug(sql) cur.execute(sql) (indisunique, indisprimary, indisclustered, indisvalid, deferrable, deferred, indamname, indtable, indpred) = cur.fetchone() if indisprimary: status.append("primary key, ") elif indisunique: status.append("unique, ") status.append("%s, " % indamname) #/* we assume here that index and table are in same schema */ status.append('for table "%s.%s"' % (schema_name, indtable)) if indpred: status.append(", predicate (%s)" % indpred) if indisclustered: status.append(", clustered") if not indisvalid: status.append(", invalid") if deferrable: status.append(", deferrable") if deferred: status.append(", initially deferred") status.append('\n') #add_tablespace_footer(&cont, tableinfo.relkind, #tableinfo.tablespace, true); elif tableinfo.relkind == 'S': # /* Footer information about a sequence */ # /* Get the column that owns this sequence */ sql = ("SELECT pg_catalog.quote_ident(nspname) || '.' ||" "\n pg_catalog.quote_ident(relname) || '.' ||" "\n pg_catalog.quote_ident(attname)" "\nFROM pg_catalog.pg_class c" "\nINNER JOIN pg_catalog.pg_depend d ON c.oid=d.refobjid" "\nINNER JOIN pg_catalog.pg_namespace n ON n.oid=c.relnamespace" "\nINNER JOIN pg_catalog.pg_attribute a ON (" "\n a.attrelid=c.oid AND" "\n a.attnum=d.refobjsubid)" "\nWHERE d.classid='pg_catalog.pg_class'::pg_catalog.regclass" "\n AND d.refclassid='pg_catalog.pg_class'::pg_catalog.regclass" "\n AND d.objid=%s \n AND d.deptype='a'" % oid) log.debug(sql) cur.execute(sql) result = cur.fetchone() if result: status.append("Owned by: %s" % result[0]) #/* #* If we get no rows back, don't show anything (obviously). We should #* never get more than one row back, but if we do, just ignore it and #* don't print anything. #*/ elif (tableinfo.relkind == 'r' or tableinfo.relkind == 'm' or tableinfo.relkind == 'f'): #/* Footer information about a table */ if (tableinfo.hasindex): if cur.connection.server_version > 90000: sql = """SELECT c2.relname, i.indisprimary, i.indisunique, i.indisclustered, i.indisvalid, pg_catalog.pg_get_indexdef(i.indexrelid, 0, true), pg_catalog.pg_get_constraintdef(con.oid, true), contype, condeferrable, condeferred, c2.reltablespace FROM pg_catalog.pg_class c, pg_catalog.pg_class c2, pg_catalog.pg_index i LEFT JOIN pg_catalog.pg_constraint con ON conrelid = i.indrelid AND conindid = i.indexrelid AND contype IN ('p','u','x') WHERE c.oid = '%s' AND c.oid = i.indrelid AND i.indexrelid = c2.oid ORDER BY i.indisprimary DESC, i.indisunique DESC, c2.relname; """ % oid else: sql = """SELECT c2.relname, i.indisprimary, i.indisunique, i.indisclustered, 't' AS indisvalid, pg_catalog.pg_get_indexdef(i.indexrelid, 0, true), pg_catalog.pg_get_constraintdef(con.oid, true), contype, condeferrable, condeferred, c2.reltablespace FROM pg_catalog.pg_class c, pg_catalog.pg_class c2, pg_catalog.pg_index i LEFT JOIN pg_catalog.pg_constraint con ON conrelid = i.indrelid AND contype IN ('p','u','x') WHERE c.oid = '%s' AND c.oid = i.indrelid AND i.indexrelid = c2.oid ORDER BY i.indisprimary DESC, i.indisunique DESC, c2.relname; """ % oid log.debug(sql) result = cur.execute(sql) if (cur.rowcount > 0): status.append("Indexes:\n") for row in cur: #/* untranslated index name */ status.append(' "%s"' % row[0]) #/* If exclusion constraint, print the constraintdef */ if row[7] == "x": status.append(' ') status.append(row[6]) else: #/* Label as primary key or unique (but not both) */ if row[1]: status.append(" PRIMARY KEY,") elif row[2]: if row[7] == "u": status.append(" UNIQUE CONSTRAINT,") else: status.append(" UNIQUE,") # /* Everything after "USING" is echoed verbatim */ indexdef = row[5] usingpos = indexdef.find(" USING ") if (usingpos >= 0): indexdef = indexdef[(usingpos + 7):] status.append(" %s" % indexdef) # /* Need these for deferrable PK/UNIQUE indexes */ if row[8]: status.append(" DEFERRABLE") if row[9]: status.append(" INITIALLY DEFERRED") # /* Add these for all cases */ if row[3]: status.append(" CLUSTER") if not row[4]: status.append(" INVALID") status.append('\n') # printTableAddFooter(&cont, buf.data); # /* Print tablespace of the index on the same line */ # add_tablespace_footer(&cont, 'i', # atooid(PQgetvalue(result, i, 10)), # false); # /* print table (and column) check constraints */ if (tableinfo.checks): sql = ("SELECT r.conname, " "pg_catalog.pg_get_constraintdef(r.oid, true)\n" "FROM pg_catalog.pg_constraint r\n" "WHERE r.conrelid = '%s' AND r.contype = 'c'\n" "ORDER BY 1;" % oid) log.debug(sql) cur.execute(sql) if (cur.rowcount > 0): status.append("Check constraints:\n") for row in cur: #/* untranslated contraint name and def */ status.append(" \"%s\" %s" % row) status.append('\n') #/* print foreign-key constraints (there are none if no triggers) */ if (tableinfo.hastriggers): sql = ("SELECT conname,\n" " pg_catalog.pg_get_constraintdef(r.oid, true) as condef\n" "FROM pg_catalog.pg_constraint r\n" "WHERE r.conrelid = '%s' AND r.contype = 'f' ORDER BY 1;" % oid) log.debug(sql) cur.execute(sql) if (cur.rowcount > 0): status.append("Foreign-key constraints:\n") for row in cur: #/* untranslated constraint name and def */ status.append(" \"%s\" %s\n" % row) #/* print incoming foreign-key references (none if no triggers) */ if (tableinfo.hastriggers): sql = ("SELECT conrelid::pg_catalog.regclass, conname,\n" " pg_catalog.pg_get_constraintdef(c.oid, true) as condef\n" "FROM pg_catalog.pg_constraint c\n" "WHERE c.confrelid = '%s' AND c.contype = 'f' ORDER BY 1;" % oid) log.debug(sql) cur.execute(sql) if (cur.rowcount > 0): status.append("Referenced by:\n") for row in cur: status.append(" TABLE \"%s\" CONSTRAINT \"%s\" %s\n" % row) # /* print rules */ if (tableinfo.hasrules and tableinfo.relkind != 'm'): sql = ("SELECT r.rulename, trim(trailing ';' from pg_catalog.pg_get_ruledef(r.oid, true)), " "ev_enabled\n" "FROM pg_catalog.pg_rewrite r\n" "WHERE r.ev_class = '%s' ORDER BY 1;" % oid) log.debug(sql) cur.execute(sql) if (cur.rowcount > 0): for category in range(4): have_heading = False for row in cur: if category == 0 and row[2] == 'O': list_rule = True elif category == 1 and row[2] == 'D': list_rule = True elif category == 2 and row[2] == 'A': list_rule = True elif category == 3 and row[2] == 'R': list_rule = True if not list_rule: continue if not have_heading: if category == 0: status.append("Rules:") if category == 1: status.append("Disabled rules:") if category == 2: status.append("Rules firing always:") if category == 3: status.append("Rules firing on replica only:") have_heading = True # /* Everything after "CREATE RULE" is echoed verbatim */ ruledef = row[1] status.append(" %s" % ruledef) if (view_def): #/* Footer information about a view */ status.append("View definition:\n") status.append("%s \n" % view_def) #/* print rules */ if tableinfo.hasrules: sql = ("SELECT r.rulename, trim(trailing ';' from pg_catalog.pg_get_ruledef(r.oid, true))\n" "FROM pg_catalog.pg_rewrite r\n" "WHERE r.ev_class = '%s' AND r.rulename != '_RETURN' ORDER BY 1;" % oid) log.debug(sql) cur.execute(sql) if (cur.rowcount > 0): status.append("Rules:\n") for row in cur: #/* Everything after "CREATE RULE" is echoed verbatim */ ruledef = row[1] status.append(" %s\n" % ruledef) #/* # * Print triggers next, if any (but only user-defined triggers). This # * could apply to either a table or a view. # */ if tableinfo.hastriggers: if cur.connection.server_version > 90000: sql = """SELECT t.tgname, pg_catalog.pg_get_triggerdef(t.oid, true), t.tgenabled FROM pg_catalog.pg_trigger t WHERE t.tgrelid = '%s' AND NOT t.tgisinternal ORDER BY 1 """ % oid else: sql = """SELECT t.tgname, pg_catalog.pg_get_triggerdef(t.oid), t.tgenabled FROM pg_catalog.pg_trigger t WHERE t.tgrelid = '%s' ORDER BY 1 """ % oid log.debug(sql) cur.execute(sql) if cur.rowcount > 0: #/* #* split the output into 4 different categories. Enabled triggers, #* disabled triggers and the two special ALWAYS and REPLICA #* configurations. #*/ for category in range(4): have_heading = False; list_trigger = False; for row in cur: #/* # * Check if this trigger falls into the current category # */ tgenabled = row[2] if category ==0: if (tgenabled == 'O' or tgenabled == True): list_trigger = True elif category ==1: if (tgenabled == 'D' or tgenabled == False): list_trigger = True elif category ==2: if (tgenabled == 'A'): list_trigger = True elif category ==3: if (tgenabled == 'R'): list_trigger = True if list_trigger == False: continue; # /* Print the category heading once */ if not have_heading: if category == 0: status.append("Triggers:") elif category == 1: status.append("Disabled triggers:") elif category == 2: status.append("Triggers firing always:") elif category == 3: status.append("Triggers firing on replica only:") status.append('\n') have_heading = True #/* Everything after "TRIGGER" is echoed verbatim */ tgdef = row[1] triggerpos = tgdef.find(" TRIGGER ") if triggerpos >= 0: tgdef = triggerpos + 9; status.append(" %s\n" % row[1][tgdef:]) #/* #* Finish printing the footer information about a table. #*/ if (tableinfo.relkind == 'r' or tableinfo.relkind == 'm' or tableinfo.relkind == 'f'): #/* print foreign server name */ if tableinfo.relkind == 'f': #/* Footer information about foreign table */ sql = ("SELECT s.srvname,\n" " array_to_string(ARRAY(SELECT " " quote_ident(option_name) || ' ' || " " quote_literal(option_value) FROM " " pg_options_to_table(ftoptions)), ', ') " "FROM pg_catalog.pg_foreign_table f,\n" " pg_catalog.pg_foreign_server s\n" "WHERE f.ftrelid = %s AND s.oid = f.ftserver;" % oid) log.debug(sql) cur.execute(sql) row = cur.fetchone() #/* Print server name */ status.append("Server: %s\n" % row[0]) #/* Print per-table FDW options, if any */ if (row[1]): status.append("FDW Options: (%s)\n" % row[1]) #/* print inherited tables */ sql = ("SELECT c.oid::pg_catalog.regclass FROM pg_catalog.pg_class c, " "pg_catalog.pg_inherits i WHERE c.oid=i.inhparent AND " "i.inhrelid = '%s' ORDER BY inhseqno;" % oid) log.debug(sql) cur.execute(sql) spacer = '' if cur.rowcount > 0: status.append("Inherits") for row in cur: status.append("%s: %s,\n" % (spacer, row)) spacer = ' ' * len('Inherits') #/* print child tables */ if cur.connection.server_version > 90000: sql = """SELECT c.oid::pg_catalog.regclass FROM pg_catalog.pg_class c, pg_catalog.pg_inherits i WHERE c.oid = i.inhrelid AND i.inhparent = '%s' ORDER BY c.oid::pg_catalog.regclass::pg_catalog.text; """ % oid else: sql = """SELECT c.oid::pg_catalog.regclass FROM pg_catalog.pg_class c, pg_catalog.pg_inherits i WHERE c.oid = i.inhrelid AND i.inhparent = '%s' ORDER BY c.oid; """ % oid log.debug(sql) cur.execute(sql) if not verbose: #/* print the number of child tables, if any */ if (cur.rowcount > 0): status.append("Number of child tables: %d (Use \d+ to list" " them.)\n" % cur.rowcount) else: if (cur.rowcount > 0): status.append('Child tables') spacer = ':' trailer = ',\n' #/* display the list of child tables */ for idx, row in enumerate(cur, 1): if idx == 2: spacer = ' ' * (len('Child tables') + 1) if idx == cur.rowcount: trailer = '\n' status.append("%s %s%s" % (spacer, row[0], trailer)) #/* Table type */ if (tableinfo.reloftype): status.append("Typed table of type: %s\n" % tableinfo.reloftype) #/* OIDs, if verbose and not a materialized view */ if (verbose and tableinfo.relkind != 'm'): status.append("Has OIDs: %s\n" % ("yes" if tableinfo.hasoids else "no")) #/* Tablespace info */ #add_tablespace_footer(&cont, tableinfo.relkind, tableinfo.tablespace, #true); # /* reloptions, if verbose */ if (verbose and tableinfo.reloptions): status.append("Options: %s\n" % tableinfo.reloptions) return (None, cells, headers, "".join(status)) def sql_name_pattern(pattern): """ Takes a wildcard-pattern and converts to an appropriate SQL pattern to be used in a WHERE clause. Returns: schema_pattern, table_pattern >>> sql_name_pattern('foo*."b""$ar*"') ('^(foo.*)$', '^(b"\\\\$ar\\\\*)$') """ inquotes = False relname = '' schema = None pattern_len = len(pattern) i = 0 while i < pattern_len: c = pattern[i] if c == '"': if inquotes and i + 1 < pattern_len and pattern[i + 1] == '"': relname += '"' i += 1 else: inquotes = not inquotes elif not inquotes and c.isupper(): relname += c.lower() elif not inquotes and c == '*': relname += '.*' elif not inquotes and c == '?': relname += '.' elif not inquotes and c == '.': # Found schema/name separator, move current pattern to schema schema = relname relname = '' else: # Dollar is always quoted, whether inside quotes or not. if c == '$' or inquotes and c in '|*+?()[]{}.^\\': relname += '\\' relname += c i += 1 if relname: relname = '^(' + relname + ')$' if schema: schema = '^(' + schema + ')$' return schema, relname class _FakeCursor(list): "Minimalistic wrapper simulating a real cursor, as far as pgcli is concerned." def rowcount(self): return len(self) @special_command('\\sf', '\\sf[+] FUNCNAME', 'Show a function\'s definition.') def show_function_definition(cur, pattern, verbose): if '(' in pattern: sql = cur.mogrify("SELECT %s::pg_catalog.regprocedure::pg_catalog.oid", [pattern]) else: sql = cur.mogrify("SELECT %s::pg_catalog.regproc::pg_catalog.oid", [pattern]) log.debug(sql) cur.execute(sql) (foid,) = cur.fetchone() sql = cur.mogrify("SELECT pg_catalog.pg_get_functiondef(%s) as source", [foid]) log.debug(sql) cur.execute(sql) if cur.description: headers = [x[0] for x in cur.description] if verbose: (source,) = cur.fetchone() rows = _FakeCursor() rown = None for row in source.splitlines(): if rown is None: if row.startswith('AS '): rown = 1 else: rown += 1 rows.append('%-7s %s' % ('' if rown is None else rown, row)) cur = [('\n'.join(rows) + '\n',)] else: headers = None return [(None, cur, headers, None)] pgspecial-1.9.0/pgspecial/help/0000755000076500000240000000000013174677543016635 5ustar irinastaff00000000000000pgspecial-1.9.0/pgspecial/help/__init__.py0000644000076500000240000000000012732064265020722 0ustar irinastaff00000000000000pgspecial-1.9.0/pgspecial/help/commands.py0000644000076500000240000022762212732064265021011 0ustar irinastaff00000000000000helpcommands = { "ABORT": { "description": "Description\nABORT rolls back the current transaction and causes", "synopsis": "\nABORT [ WORK | TRANSACTION ]\n" }, "ALTER AGGREGATE": { "description": "Description\nALTER AGGREGATE changes the definition of an", "synopsis": "\nALTER AGGREGATE name ( aggregate_signature ) RENAME TO new_name\nALTER AGGREGATE name ( aggregate_signature )\n OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\nALTER AGGREGATE name ( aggregate_signature ) SET SCHEMA new_schema\nwhere aggregate_signature is:\n\n* |\n[ argmode ] [ argname ] argtype [ , ... ] |\n[ [ argmode ] [ argname ] argtype [ , ... ] ] ORDER BY [ argmode ] [ argname ] argtype [ , ... ]\n" }, "ALTER COLLATION": { "description": "Description\nALTER COLLATION changes the definition of a", "synopsis": "\nALTER COLLATION name RENAME TO new_name\nALTER COLLATION name OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\nALTER COLLATION name SET SCHEMA new_schema\n" }, "ALTER CONVERSION": { "description": "Description\nALTER CONVERSION changes the definition of a", "synopsis": "\nALTER CONVERSION name RENAME TO new_name\nALTER CONVERSION name OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\nALTER CONVERSION name SET SCHEMA new_schema\n" }, "ALTER DATABASE": { "description": "Description\nALTER DATABASE changes the attributes", "synopsis": "\nALTER DATABASE name [ [ WITH ] option [ ... ] ]\n\nwhere option can be:\n\n ALLOW_CONNECTIONS allowconn\n CONNECTION LIMIT connlimit\n IS_TEMPLATE istemplate\n\nALTER DATABASE name RENAME TO new_name\n\nALTER DATABASE name OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\n\nALTER DATABASE name SET TABLESPACE new_tablespace\n\nALTER DATABASE name SET configuration_parameter { TO | = } { value | DEFAULT }\nALTER DATABASE name SET configuration_parameter FROM CURRENT\nALTER DATABASE name RESET configuration_parameter\nALTER DATABASE name RESET ALL\n" }, "ALTER DEFAULT PRIVILEGES": { "description": "Description\nALTER DEFAULT PRIVILEGES allows you to set the privileges", "synopsis": "\nALTER DEFAULT PRIVILEGES\n [ FOR { ROLE | USER } target_role [, ...] ]\n [ IN SCHEMA schema_name [, ...] ]\n abbreviated_grant_or_revoke\nwhere abbreviated_grant_or_revoke is one of:\n\nGRANT { { SELECT | INSERT | UPDATE | DELETE | TRUNCATE | REFERENCES | TRIGGER }\n [, ...] | ALL [ PRIVILEGES ] }\n ON TABLES\n TO { [ GROUP ] role_name | PUBLIC } [, ...] [ WITH GRANT OPTION ]\n\nGRANT { { USAGE | SELECT | UPDATE }\n [, ...] | ALL [ PRIVILEGES ] }\n ON SEQUENCES\n TO { [ GROUP ] role_name | PUBLIC } [, ...] [ WITH GRANT OPTION ]\n\nGRANT { EXECUTE | ALL [ PRIVILEGES ] }\n ON FUNCTIONS\n TO { [ GROUP ] role_name | PUBLIC } [, ...] [ WITH GRANT OPTION ]\n\nGRANT { USAGE | ALL [ PRIVILEGES ] }\n ON TYPES\n TO { [ GROUP ] role_name | PUBLIC } [, ...] [ WITH GRANT OPTION ]\n\nREVOKE [ GRANT OPTION FOR ]\n { { SELECT | INSERT | UPDATE | DELETE | TRUNCATE | REFERENCES | TRIGGER }\n [, ...] | ALL [ PRIVILEGES ] }\n ON TABLES\n FROM { [ GROUP ] role_name | PUBLIC } [, ...]\n [ CASCADE | RESTRICT ]\n\nREVOKE [ GRANT OPTION FOR ]\n { { USAGE | SELECT | UPDATE }\n [, ...] | ALL [ PRIVILEGES ] }\n ON SEQUENCES\n FROM { [ GROUP ] role_name | PUBLIC } [, ...]\n [ CASCADE | RESTRICT ]\n\nREVOKE [ GRANT OPTION FOR ]\n { EXECUTE | ALL [ PRIVILEGES ] }\n ON FUNCTIONS\n FROM { [ GROUP ] role_name | PUBLIC } [, ...]\n [ CASCADE | RESTRICT ]\n\nREVOKE [ GRANT OPTION FOR ]\n { USAGE | ALL [ PRIVILEGES ] }\n ON TYPES\n FROM { [ GROUP ] role_name | PUBLIC } [, ...]\n [ CASCADE | RESTRICT ]\n" }, "ALTER DOMAIN": { "description": "Description\nALTER DOMAIN changes the definition of an existing domain.", "synopsis": "\nALTER DOMAIN name\n { SET DEFAULT expression | DROP DEFAULT }\nALTER DOMAIN name\n { SET | DROP } NOT NULL\nALTER DOMAIN name\n ADD domain_constraint [ NOT VALID ]\nALTER DOMAIN name\n DROP CONSTRAINT [ IF EXISTS ] constraint_name [ RESTRICT | CASCADE ]\nALTER DOMAIN name\n RENAME CONSTRAINT constraint_name TO new_constraint_name\nALTER DOMAIN name\n VALIDATE CONSTRAINT constraint_name\nALTER DOMAIN name\n OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\nALTER DOMAIN name\n RENAME TO new_name\nALTER DOMAIN name\n SET SCHEMA new_schema\n" }, "ALTER EVENT TRIGGER": { "description": "Description\nALTER EVENT TRIGGER changes properties of an", "synopsis": "\nALTER EVENT TRIGGER name DISABLE\nALTER EVENT TRIGGER name ENABLE [ REPLICA | ALWAYS ]\nALTER EVENT TRIGGER name OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\nALTER EVENT TRIGGER name RENAME TO new_name\n" }, "ALTER EXTENSION": { "description": "Description\nALTER EXTENSION changes the definition of an installed", "synopsis": "\nALTER EXTENSION name UPDATE [ TO new_version ]\nALTER EXTENSION name SET SCHEMA new_schema\nALTER EXTENSION name ADD member_object\nALTER EXTENSION name DROP member_object\nwhere member_object is:\n\n AGGREGATE aggregate_name ( aggregate_signature ) |\n CAST (source_type AS target_type) |\n COLLATION object_name |\n CONVERSION object_name |\n DOMAIN object_name |\n EVENT TRIGGER object_name |\n FOREIGN DATA WRAPPER object_name |\n FOREIGN TABLE object_name |\n FUNCTION function_name ( [ [ argmode ] [ argname ] argtype [, ...] ] ) |\n MATERIALIZED VIEW object_name |\n OPERATOR operator_name (left_type, right_type) |\n OPERATOR CLASS object_name USING index_method |\n OPERATOR FAMILY object_name USING index_method |\n [ PROCEDURAL ] LANGUAGE object_name |\n SCHEMA object_name |\n SEQUENCE object_name |\n SERVER object_name |\n TABLE object_name |\n TEXT SEARCH CONFIGURATION object_name |\n TEXT SEARCH DICTIONARY object_name |\n TEXT SEARCH PARSER object_name |\n TEXT SEARCH TEMPLATE object_name |\n TRANSFORM FOR type_name LANGUAGE lang_name |\n TYPE object_name |\n VIEW object_name\nand aggregate_signature is:\n\n* |\n[ argmode ] [ argname ] argtype [ , ... ] |\n[ [ argmode ] [ argname ] argtype [ , ... ] ] ORDER BY [ argmode ] [ argname ] argtype [ , ... ]\n" }, "ALTER FOREIGN DATA WRAPPER": { "description": "Description\nALTER FOREIGN DATA WRAPPER changes the", "synopsis": "\nALTER FOREIGN DATA WRAPPER name\n [ HANDLER handler_function | NO HANDLER ]\n [ VALIDATOR validator_function | NO VALIDATOR ]\n [ OPTIONS ( [ ADD | SET | DROP ] option ['value'] [, ... ]) ]\nALTER FOREIGN DATA WRAPPER name OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\nALTER FOREIGN DATA WRAPPER name RENAME TO new_name\n" }, "ALTER FOREIGN TABLE": { "description": "Description\nALTER FOREIGN TABLE changes the definition of an", "synopsis": "\nALTER FOREIGN TABLE [ IF EXISTS ] [ ONLY ] name [ * ]\n action [, ... ]\nALTER FOREIGN TABLE [ IF EXISTS ] [ ONLY ] name [ * ]\n RENAME [ COLUMN ] column_name TO new_column_name\nALTER FOREIGN TABLE [ IF EXISTS ] name\n RENAME TO new_name\nALTER FOREIGN TABLE [ IF EXISTS ] name\n SET SCHEMA new_schema\nwhere action is one of:\n\n ADD [ COLUMN ] column_name data_type [ COLLATE collation ] [ column_constraint [ ... ] ]\n DROP [ COLUMN ] [ IF EXISTS ] column_name [ RESTRICT | CASCADE ]\n ALTER [ COLUMN ] column_name [ SET DATA ] TYPE data_type [ COLLATE collation ]\n ALTER [ COLUMN ] column_name SET DEFAULT expression\n ALTER [ COLUMN ] column_name DROP DEFAULT\n ALTER [ COLUMN ] column_name { SET | DROP } NOT NULL\n ALTER [ COLUMN ] column_name SET STATISTICS integer\n ALTER [ COLUMN ] column_name SET ( attribute_option = value [, ... ] )\n ALTER [ COLUMN ] column_name RESET ( attribute_option [, ... ] )\n ALTER [ COLUMN ] column_name SET STORAGE { PLAIN | EXTERNAL | EXTENDED | MAIN }\n ALTER [ COLUMN ] column_name OPTIONS ( [ ADD | SET | DROP ] option ['value'] [, ... ])\n ADD table_constraint [ NOT VALID ]\n VALIDATE CONSTRAINT constraint_name\n DROP CONSTRAINT [ IF EXISTS ] constraint_name [ RESTRICT | CASCADE ]\n DISABLE TRIGGER [ trigger_name | ALL | USER ]\n ENABLE TRIGGER [ trigger_name | ALL | USER ]\n ENABLE REPLICA TRIGGER trigger_name\n ENABLE ALWAYS TRIGGER trigger_name\n SET WITH OIDS\n SET WITHOUT OIDS\n INHERIT parent_table\n NO INHERIT parent_table\n OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\n OPTIONS ( [ ADD | SET | DROP ] option ['value'] [, ... ])\n" }, "ALTER FUNCTION": { "description": "Description\nALTER FUNCTION changes the definition of a", "synopsis": "\nALTER FUNCTION name ( [ [ argmode ] [ argname ] argtype [, ...] ] )\n action [ ... ] [ RESTRICT ]\nALTER FUNCTION name ( [ [ argmode ] [ argname ] argtype [, ...] ] )\n RENAME TO new_name\nALTER FUNCTION name ( [ [ argmode ] [ argname ] argtype [, ...] ] )\n OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\nALTER FUNCTION name ( [ [ argmode ] [ argname ] argtype [, ...] ] )\n SET SCHEMA new_schema\nwhere action is one of:\n\n CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT\n IMMUTABLE | STABLE | VOLATILE | [ NOT ] LEAKPROOF\n [ EXTERNAL ] SECURITY INVOKER | [ EXTERNAL ] SECURITY DEFINER\n PARALLEL { UNSAFE | RESTRICTED | SAFE }\n COST execution_cost\n ROWS result_rows\n SET configuration_parameter { TO | = } { value | DEFAULT }\n SET configuration_parameter FROM CURRENT\n RESET configuration_parameter\n RESET ALL\n" }, "ALTER GROUP": { "description": "Description\nALTER GROUP changes the attributes of a user group.", "synopsis": "\nALTER GROUP role_specification ADD USER user_name [, ... ]\nALTER GROUP role_specification DROP USER user_name [, ... ]\n\nwhere role_specification can be:\nrole_name\n | CURRENT_USER\n | SESSION_USER\n\nALTER GROUP group_name RENAME TO new_name\n" }, "ALTER INDEX": { "description": "Description\nALTER INDEX changes the definition of an existing index.", "synopsis": "\nALTER INDEX [ IF EXISTS ] name RENAME TO new_name\nALTER INDEX [ IF EXISTS ] name SET TABLESPACE tablespace_name\nALTER INDEX [ IF EXISTS ] name SET ( storage_parameter = value [, ... ] )\nALTER INDEX [ IF EXISTS ] name RESET ( storage_parameter [, ... ] )\nALTER INDEX ALL IN TABLESPACE name [ OWNED BY role_name [, ... ] ]\n SET TABLESPACE new_tablespace [ NOWAIT ]\n" }, "ALTER LANGUAGE": { "description": "Description\nALTER LANGUAGE changes the definition of a", "synopsis": "\nALTER [ PROCEDURAL ] LANGUAGE name RENAME TO new_name\nALTER [ PROCEDURAL ] LANGUAGE name OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\n" }, "ALTER LARGE OBJECT": { "description": "Description\nALTER LARGE OBJECT changes the definition of a", "synopsis": "\nALTER LARGE OBJECT large_object_oid OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\n" }, "ALTER MATERIALIZED VIEW": { "description": "Description\nALTER MATERIALIZED VIEW changes various auxiliary", "synopsis": "\nALTER MATERIALIZED VIEW [ IF EXISTS ] name\naction [, ... ]\nALTER MATERIALIZED VIEW [ IF EXISTS ] name\n RENAME [ COLUMN ] column_name TO new_column_name\nALTER MATERIALIZED VIEW [ IF EXISTS ] name\n RENAME TO new_name\nALTER MATERIALIZED VIEW [ IF EXISTS ] name\n SET SCHEMA new_schema\nALTER MATERIALIZED VIEW ALL IN TABLESPACE name [ OWNED BY role_name [, ... ] ]\n SET TABLESPACE new_tablespace [ NOWAIT ]\n\nwhere action is one of:\n\n ALTER [ COLUMN ] column_name SET STATISTICS integer\n ALTER [ COLUMN ] column_name SET ( attribute_option = value [, ... ] )\n ALTER [ COLUMN ] column_name RESET ( attribute_option [, ... ] )\n ALTER [ COLUMN ] column_name SET STORAGE { PLAIN | EXTERNAL | EXTENDED | MAIN }\n CLUSTER ON index_name\n SET WITHOUT CLUSTER\n SET ( storage_parameter = value [, ... ] )\n RESET ( storage_parameter [, ... ] )\n OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\n SET TABLESPACE new_tablespace\n" }, "ALTER OPCLASS": { "description": "Description\nALTER OPERATOR CLASS changes the definition of", "synopsis": "\nALTER OPERATOR CLASS name USING index_method\n RENAME TO new_name\n\nALTER OPERATOR CLASS name USING index_method\n OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\n\nALTER OPERATOR CLASS name USING index_method\n SET SCHEMA new_schema\n" }, "ALTER OPERATOR": { "description": "Description\nALTER OPERATOR changes the definition of", "synopsis": "\nALTER OPERATOR name ( { left_type | NONE } , { right_type | NONE } )\n OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\n\nALTER OPERATOR name ( { left_type | NONE } , { right_type | NONE } )\n SET SCHEMA new_schema\n\nALTER OPERATOR name ( { left_type | NONE } , { right_type | NONE } )\n SET ( { RESTRICT = { res_proc | NONE }\n | JOIN = { join_proc | NONE }\n } [, ... ] )\n" }, "ALTER OPFAMILY": { "description": "Description\nALTER OPERATOR FAMILY changes the definition of", "synopsis": "\nALTER OPERATOR FAMILY name USING index_method ADD\n { OPERATOR strategy_number operator_name ( op_type, op_type )\n [ FOR SEARCH | FOR ORDER BY sort_family_name ]\n | FUNCTION support_number [ ( op_type [ , op_type ] ) ]\n function_name ( argument_type [, ...] )\n } [, ... ]\n\nALTER OPERATOR FAMILY name USING index_method DROP\n { OPERATOR strategy_number ( op_type [ , op_type ] )\n | FUNCTION support_number ( op_type [ , op_type ] )\n } [, ... ]\n\nALTER OPERATOR FAMILY name USING index_method\n RENAME TO new_name\n\nALTER OPERATOR FAMILY name USING index_method\n OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\n\nALTER OPERATOR FAMILY name USING index_method\n SET SCHEMA new_schema\n" }, "ALTER POLICY": { "description": "Description\nALTER POLICY changes the ", "synopsis": "\nALTER POLICY name ON table_name\n [ RENAME TO new_name ]\n [ TO { role_name | PUBLIC | CURRENT_USER | SESSION_USER } [, ...] ]\n [ USING ( using_expression ) ]\n [ WITH CHECK ( check_expression ) ]\n" }, "ALTER ROLE": { "description": "Description\nALTER ROLE changes the attributes of a", "synopsis": "\nALTER ROLE role_specification [ WITH ] option [ ... ]\n\nwhere option can be:\n\n SUPERUSER | NOSUPERUSER\n | CREATEDB | NOCREATEDB\n | CREATEROLE | NOCREATEROLE\n | CREATEUSER | NOCREATEUSER\n | INHERIT | NOINHERIT\n | LOGIN | NOLOGIN\n | REPLICATION | NOREPLICATION\n | BYPASSRLS | NOBYPASSRLS\n | CONNECTION LIMIT connlimit\n | [ ENCRYPTED | UNENCRYPTED ] PASSWORD 'password'\n | VALID UNTIL 'timestamp'\n\nALTER ROLE name RENAME TO new_name\n\nALTER ROLE { role_specification | ALL } [ IN DATABASE database_name ] SET configuration_parameter { TO | = } { value | DEFAULT }\nALTER ROLE { role_specification | ALL } [ IN DATABASE database_name ] SET configuration_parameter FROM CURRENT\nALTER ROLE { role_specification | ALL } [ IN DATABASE database_name ] RESET configuration_parameter\nALTER ROLE { role_specification | ALL } [ IN DATABASE database_name ] RESET ALL\n\nwhere role_specification can be:\n\n [ GROUP ] role_name\n | CURRENT_USER\n | SESSION_USER\n" }, "ALTER RULE": { "description": "Description\nALTER RULE changes properties of an existing", "synopsis": "\nALTER RULE name ON table_name RENAME TO new_name\n" }, "ALTER SCHEMA": { "description": "Description\nALTER SCHEMA changes the definition of a schema.", "synopsis": "\nALTER SCHEMA name RENAME TO new_name\nALTER SCHEMA name OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\n" }, "ALTER SEQUENCE": { "description": "Description\nALTER SEQUENCE changes the parameters of an existing", "synopsis": "\nALTER SEQUENCE [ IF EXISTS ] name [ INCREMENT [ BY ] increment ]\n [ MINVALUE minvalue | NO MINVALUE ] [ MAXVALUE maxvalue | NO MAXVALUE ]\n [ START [ WITH ] start ]\n [ RESTART [ [ WITH ] restart ] ]\n [ CACHE cache ] [ [ NO ] CYCLE ]\n [ OWNED BY { table_name.column_name | NONE } ]\nALTER SEQUENCE [ IF EXISTS ] name OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\nALTER SEQUENCE [ IF EXISTS ] name RENAME TO new_name\nALTER SEQUENCE [ IF EXISTS ] name SET SCHEMA new_schema\n" }, "ALTER SERVER": { "description": "Description\nALTER SERVER changes the definition of a foreign", "synopsis": "\nALTER SERVER name [ VERSION 'new_version' ]\n [ OPTIONS ( [ ADD | SET | DROP ] option ['value'] [, ... ] ) ]\nALTER SERVER name OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\nALTER SERVER name RENAME TO new_name\n" }, "ALTER SYSTEM": { "description": "Description\nALTER SYSTEM is used for changing server configuration", "synopsis": "\nALTER SYSTEM SET configuration_parameter { TO | = } { value | 'value' | DEFAULT }\n\nALTER SYSTEM RESET configuration_parameter\nALTER SYSTEM RESET ALL\n" }, "ALTER TABLE": { "description": "Description\nALTER TABLE changes the definition of an existing table.", "synopsis": "\nALTER TABLE [ IF EXISTS ] [ ONLY ] name [ * ]\n action [, ... ]\nALTER TABLE [ IF EXISTS ] [ ONLY ] name [ * ]\n RENAME [ COLUMN ] column_name TO new_column_name\nALTER TABLE [ IF EXISTS ] [ ONLY ] name [ * ]\n RENAME CONSTRAINT constraint_name TO new_constraint_name\nALTER TABLE [ IF EXISTS ] name\n RENAME TO new_name\nALTER TABLE [ IF EXISTS ] name\n SET SCHEMA new_schema\nALTER TABLE ALL IN TABLESPACE name [ OWNED BY role_name [, ... ] ]\n SET TABLESPACE new_tablespace [ NOWAIT ]\n\nwhere action is one of:\n\n ADD [ COLUMN ] [ IF NOT EXISTS ] column_name data_type [ COLLATE collation ] [ column_constraint [ ... ] ]\n DROP [ COLUMN ] [ IF EXISTS ] column_name [ RESTRICT | CASCADE ]\n ALTER [ COLUMN ] column_name [ SET DATA ] TYPE data_type [ COLLATE collation ] [ USING expression ]\n ALTER [ COLUMN ] column_name SET DEFAULT expression\n ALTER [ COLUMN ] column_name DROP DEFAULT\n ALTER [ COLUMN ] column_name { SET | DROP } NOT NULL\n ALTER [ COLUMN ] column_name SET STATISTICS integer\n ALTER [ COLUMN ] column_name SET ( attribute_option = value [, ... ] )\n ALTER [ COLUMN ] column_name RESET ( attribute_option [, ... ] )\n ALTER [ COLUMN ] column_name SET STORAGE { PLAIN | EXTERNAL | EXTENDED | MAIN }\n ADD table_constraint [ NOT VALID ]\n ADD table_constraint_using_index\n ALTER CONSTRAINT constraint_name [ DEFERRABLE | NOT DEFERRABLE ] [ INITIALLY DEFERRED | INITIALLY IMMEDIATE ]\n VALIDATE CONSTRAINT constraint_name\n DROP CONSTRAINT [ IF EXISTS ] constraint_name [ RESTRICT | CASCADE ]\n DISABLE TRIGGER [ trigger_name | ALL | USER ]\n ENABLE TRIGGER [ trigger_name | ALL | USER ]\n ENABLE REPLICA TRIGGER trigger_name\n ENABLE ALWAYS TRIGGER trigger_name\n DISABLE RULE rewrite_rule_name\n ENABLE RULE rewrite_rule_name\n ENABLE REPLICA RULE rewrite_rule_name\n ENABLE ALWAYS RULE rewrite_rule_name\n DISABLE ROW LEVEL SECURITY\n ENABLE ROW LEVEL SECURITY\n CLUSTER ON index_name\n SET WITHOUT CLUSTER\n SET WITH OIDS\n SET WITHOUT OIDS\n SET TABLESPACE new_tablespace\n SET { LOGGED | UNLOGGED }\n SET ( storage_parameter = value [, ... ] )\n RESET ( storage_parameter [, ... ] )\n INHERIT parent_table\n NO INHERIT parent_table\n OF type_name\n NOT OF\n OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\n REPLICA IDENTITY { DEFAULT | USING INDEX index_name | FULL | NOTHING }\n\nand table_constraint_using_index is:\n\n [ CONSTRAINT constraint_name ]\n { UNIQUE | PRIMARY KEY } USING INDEX index_name\n [ DEFERRABLE | NOT DEFERRABLE ] [ INITIALLY DEFERRED | INITIALLY IMMEDIATE ]\n" }, "ALTER TABLESPACE": { "description": "Description\nALTER TABLESPACE can be used to change the definition of", "synopsis": "\nALTER TABLESPACE name RENAME TO new_name\nALTER TABLESPACE name OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\nALTER TABLESPACE name SET ( tablespace_option = value [, ... ] )\nALTER TABLESPACE name RESET ( tablespace_option [, ... ] )\n" }, "ALTER TRIGGER": { "description": "Description\nALTER TRIGGER changes properties of an existing", "synopsis": "\nALTER TRIGGER name ON table_name RENAME TO new_name\n" }, "ALTER TSCONFIG": { "description": "Description\nALTER TEXT SEARCH CONFIGURATION changes the definition of", "synopsis": "\nALTER TEXT SEARCH CONFIGURATION name\n ADD MAPPING FOR token_type [, ... ] WITH dictionary_name [, ... ]\nALTER TEXT SEARCH CONFIGURATION name\n ALTER MAPPING FOR token_type [, ... ] WITH dictionary_name [, ... ]\nALTER TEXT SEARCH CONFIGURATION name\n ALTER MAPPING REPLACE old_dictionary WITH new_dictionary\nALTER TEXT SEARCH CONFIGURATION name\n ALTER MAPPING FOR token_type [, ... ] REPLACE old_dictionary WITH new_dictionary\nALTER TEXT SEARCH CONFIGURATION name\n DROP MAPPING [ IF EXISTS ] FOR token_type [, ... ]\nALTER TEXT SEARCH CONFIGURATION name RENAME TO new_name\nALTER TEXT SEARCH CONFIGURATION name OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\nALTER TEXT SEARCH CONFIGURATION name SET SCHEMA new_schema\n" }, "ALTER TSDICTIONARY": { "description": "Description\nALTER TEXT SEARCH DICTIONARY changes the definition of", "synopsis": "\nALTER TEXT SEARCH DICTIONARY name (\n option [ = value ] [, ... ]\n)\nALTER TEXT SEARCH DICTIONARY name RENAME TO new_name\nALTER TEXT SEARCH DICTIONARY name OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\nALTER TEXT SEARCH DICTIONARY name SET SCHEMA new_schema\n" }, "ALTER TSPARSER": { "description": "Description\nALTER TEXT SEARCH PARSER changes the definition of", "synopsis": "\nALTER TEXT SEARCH PARSER name RENAME TO new_name\nALTER TEXT SEARCH PARSER name SET SCHEMA new_schema\n" }, "ALTER TSTEMPLATE": { "description": "Description\nALTER TEXT SEARCH TEMPLATE changes the definition of", "synopsis": "\nALTER TEXT SEARCH TEMPLATE name RENAME TO new_name\nALTER TEXT SEARCH TEMPLATE name SET SCHEMA new_schema\n" }, "ALTER TYPE": { "description": "Description\nALTER TYPE changes the definition of an existing type.", "synopsis": "\nALTER TYPE name action [, ... ]\nALTER TYPE name OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\nALTER TYPE name RENAME ATTRIBUTE attribute_name TO new_attribute_name [ CASCADE | RESTRICT ]\nALTER TYPE name RENAME TO new_name\nALTER TYPE name SET SCHEMA new_schema\nALTER TYPE name ADD VALUE [ IF NOT EXISTS ] new_enum_value [ { BEFORE | AFTER } existing_enum_value ]\n\nwhere action is one of:\n\n ADD ATTRIBUTE attribute_name data_type [ COLLATE collation ] [ CASCADE | RESTRICT ]\n DROP ATTRIBUTE [ IF EXISTS ] attribute_name [ CASCADE | RESTRICT ]\n ALTER ATTRIBUTE attribute_name [ SET DATA ] TYPE data_type [ COLLATE collation ] [ CASCADE | RESTRICT ]\n" }, "ALTER USER": { "description": "Description\nALTER USER is now an alias for", "synopsis": "\nALTER USER role_specification [ WITH ] option [ ... ]\n\nwhere option can be:\n\n SUPERUSER | NOSUPERUSER\n | CREATEDB | NOCREATEDB\n | CREATEROLE | NOCREATEROLE\n | CREATEUSER | NOCREATEUSER\n | INHERIT | NOINHERIT\n | LOGIN | NOLOGIN\n | REPLICATION | NOREPLICATION\n | CONNECTION LIMIT connlimit\n | [ ENCRYPTED | UNENCRYPTED ] PASSWORD 'password'\n | VALID UNTIL 'timestamp'\n\nALTER USER name RENAME TO new_name\n\nALTER USER role_specification SET configuration_parameter { TO | = } { value | DEFAULT }\nALTER USER role_specification SET configuration_parameter FROM CURRENT\nALTER USER role_specification RESET configuration_parameter\nALTER USER role_specification RESET ALL\n\nwhere role_specification can be:\n\n [ GROUP ] role_name\n | CURRENT_USER\n | SESSION_USER\n" }, "ALTER USER MAPPING": { "description": "Description\nALTER USER MAPPING changes the definition of a", "synopsis": "\nALTER USER MAPPING FOR { user_name | USER | CURRENT_USER | SESSION_USER | PUBLIC }\n SERVER server_name\n OPTIONS ( [ ADD | SET | DROP ] option ['value'] [, ... ] )\n" }, "ALTER VIEW": { "description": "Description\nALTER VIEW changes various auxiliary properties", "synopsis": "\nALTER VIEW [ IF EXISTS ] name ALTER [ COLUMN ] column_name SET DEFAULT expression\nALTER VIEW [ IF EXISTS ] name ALTER [ COLUMN ] column_name DROP DEFAULT\nALTER VIEW [ IF EXISTS ] name OWNER TO { new_owner | CURRENT_USER | SESSION_USER }\nALTER VIEW [ IF EXISTS ] name RENAME TO new_name\nALTER VIEW [ IF EXISTS ] name SET SCHEMA new_schema\nALTER VIEW [ IF EXISTS ] name SET ( view_option_name [= view_option_value] [, ... ] )\nALTER VIEW [ IF EXISTS ] name RESET ( view_option_name [, ... ] )\n" }, "ANALYZE": { "description": "Description\nANALYZE collects statistics about the contents", "synopsis": "\nANALYZE [ VERBOSE ] [ table_name [ ( column_name [, ...] ) ] ]\n" }, "BEGIN": { "description": "Description\nBEGIN initiates a transaction block, that is,", "synopsis": "\nBEGIN [ WORK | TRANSACTION ] [ transaction_mode [, ...] ]\n\nwhere transaction_mode is one of:\n\n ISOLATION LEVEL { SERIALIZABLE | REPEATABLE READ | READ COMMITTED | READ UNCOMMITTED }\n READ WRITE | READ ONLY\n [ NOT ] DEFERRABLE\n" }, "CHECKPOINT": { "description": "Description\n A checkpoint is a point in the transaction log sequence at which", "synopsis": "\nCHECKPOINT\n" }, "CLOSE": { "description": "Description\nCLOSE frees the resources associated with an open cursor.", "synopsis": "\nCLOSE { name | ALL }\n" }, "CLUSTER": { "description": "Description\nCLUSTER instructs PostgreSQL", "synopsis": "\nCLUSTER [VERBOSE] table_name [ USING index_name ]\nCLUSTER [VERBOSE]\n" }, "COMMENT": { "description": "Description\nCOMMENT stores a comment about a database object.", "synopsis": "\nCOMMENT ON\n{\n AGGREGATE aggregate_name ( aggregate_signature ) |\n CAST (source_type AS target_type) |\n COLLATION object_name |\n COLUMN relation_name.column_name |\n CONSTRAINT constraint_name ON table_name |\n CONSTRAINT constraint_name ON DOMAIN domain_name |\n CONVERSION object_name |\n DATABASE object_name |\n DOMAIN object_name |\n EXTENSION object_name |\n EVENT TRIGGER object_name |\n FOREIGN DATA WRAPPER object_name |\n FOREIGN TABLE object_name |\n FUNCTION function_name ( [ [ argmode ] [ argname ] argtype [, ...] ] ) |\n INDEX object_name |\n LARGE OBJECT large_object_oid |\n MATERIALIZED VIEW object_name |\n OPERATOR operator_name (left_type, right_type) |\n OPERATOR CLASS object_name USING index_method |\n OPERATOR FAMILY object_name USING index_method |\n POLICY policy_name ON table_name |\n [ PROCEDURAL ] LANGUAGE object_name |\n ROLE object_name |\n RULE rule_name ON table_name |\n SCHEMA object_name |\n SEQUENCE object_name |\n SERVER object_name |\n TABLE object_name |\n TABLESPACE object_name |\n TEXT SEARCH CONFIGURATION object_name |\n TEXT SEARCH DICTIONARY object_name |\n TEXT SEARCH PARSER object_name |\n TEXT SEARCH TEMPLATE object_name |\n TRANSFORM FOR type_name LANGUAGE lang_name |\n TRIGGER trigger_name ON table_name |\n TYPE object_name |\n VIEW object_name\n} IS 'text'\n\nwhere aggregate_signature is:\n\n* |\n[ argmode ] [ argname ] argtype [ , ... ] |\n[ [ argmode ] [ argname ] argtype [ , ... ] ] ORDER BY [ argmode ] [ argname ] argtype [ , ... ]\n" }, "COMMIT": { "description": "Description\nCOMMIT commits the current transaction. All", "synopsis": "\nCOMMIT [ WORK | TRANSACTION ]\n" }, "COMMIT PREPARED": { "description": "Description\nCOMMIT PREPARED commits a transaction that is in", "synopsis": "\nCOMMIT PREPARED transaction_id\n" }, "COPY": { "description": "Description\nCOPY moves data between", "synopsis": "\nCOPY table_name [ ( column_name [, ...] ) ]\n FROM { 'filename' | PROGRAM 'command' | STDIN }\n [ [ WITH ] ( option [, ...] ) ]\n\nCOPY { table_name [ ( column_name [, ...] ) ] | ( query ) }\n TO { 'filename' | PROGRAM 'command' | STDOUT }\n [ [ WITH ] ( option [, ...] ) ]\n\nwhere option can be one of:\n\n FORMAT format_name\n OIDS [ boolean ]\n FREEZE [ boolean ]\n DELIMITER 'delimiter_character'\n NULL 'null_string'\n HEADER [ boolean ]\n QUOTE 'quote_character'\n ESCAPE 'escape_character'\n FORCE_QUOTE { ( column_name [, ...] ) | * }\n FORCE_NOT_NULL ( column_name [, ...] )\n FORCE_NULL ( column_name [, ...] )\n ENCODING 'encoding_name'\n" }, "CREATE AGGREGATE": { "description": "Description\nCREATE AGGREGATE defines a new aggregate", "synopsis": "\nCREATE AGGREGATE name ( [ argmode ] [ argname ] arg_data_type [ , ... ] ) (\n SFUNC = sfunc,\n STYPE = state_data_type\n [ , SSPACE = state_data_size ]\n [ , FINALFUNC = ffunc ]\n [ , FINALFUNC_EXTRA ]\n [ , INITCOND = initial_condition ]\n [ , MSFUNC = msfunc ]\n [ , MINVFUNC = minvfunc ]\n [ , MSTYPE = mstate_data_type ]\n [ , MSSPACE = mstate_data_size ]\n [ , MFINALFUNC = mffunc ]\n [ , MFINALFUNC_EXTRA ]\n [ , MINITCOND = minitial_condition ]\n [ , SORTOP = sort_operator ]\n)\n\nCREATE AGGREGATE name ( [ [ argmode ] [ argname ] arg_data_type [ , ... ] ]\n ORDER BY [ argmode ] [ argname ] arg_data_type [ , ... ] ) (\n SFUNC = sfunc,\n STYPE = state_data_type\n [ , SSPACE = state_data_size ]\n [ , FINALFUNC = ffunc ]\n [ , FINALFUNC_EXTRA ]\n [ , INITCOND = initial_condition ]\n [ , HYPOTHETICAL ]\n)\n\nor the old syntax\n\nCREATE AGGREGATE name (\n BASETYPE = base_type,\n SFUNC = sfunc,\n STYPE = state_data_type\n [ , SSPACE = state_data_size ]\n [ , FINALFUNC = ffunc ]\n [ , FINALFUNC_EXTRA ]\n [ , INITCOND = initial_condition ]\n [ , MSFUNC = msfunc ]\n [ , MINVFUNC = minvfunc ]\n [ , MSTYPE = mstate_data_type ]\n [ , MSSPACE = mstate_data_size ]\n [ , MFINALFUNC = mffunc ]\n [ , MFINALFUNC_EXTRA ]\n [ , MINITCOND = minitial_condition ]\n [ , SORTOP = sort_operator ]\n)\n" }, "CREATE CAST": { "description": "Description\nCREATE CAST defines a new cast. A cast", "synopsis": "\nCREATE CAST (source_type AS target_type)\n WITH FUNCTION function_name (argument_type [, ...])\n [ AS ASSIGNMENT | AS IMPLICIT ]\n\nCREATE CAST (source_type AS target_type)\n WITHOUT FUNCTION\n [ AS ASSIGNMENT | AS IMPLICIT ]\n\nCREATE CAST (source_type AS target_type)\n WITH INOUT\n [ AS ASSIGNMENT | AS IMPLICIT ]\n" }, "CREATE COLLATION": { "description": "Description\nCREATE COLLATION defines a new collation using", "synopsis": "\nCREATE COLLATION name (\n [ LOCALE = locale, ]\n [ LC_COLLATE = lc_collate, ]\n [ LC_CTYPE = lc_ctype ]\n)\nCREATE COLLATION name FROM existing_collation\n" }, "CREATE CONVERSION": { "description": "Description\nCREATE CONVERSION defines a new conversion between", "synopsis": "\nCREATE [ DEFAULT ] CONVERSION name\n FOR source_encoding TO dest_encoding FROM function_name\n" }, "CREATE DATABASE": { "description": "Description\nCREATE DATABASE creates a new", "synopsis": "\nCREATE DATABASE name\n [ [ WITH ] [ OWNER [=] user_name ]\n [ TEMPLATE [=] template ]\n [ ENCODING [=] encoding ]\n [ LC_COLLATE [=] lc_collate ]\n [ LC_CTYPE [=] lc_ctype ]\n [ TABLESPACE [=] tablespace_name ]\n [ ALLOW_CONNECTIONS [=] allowconn ]\n [ CONNECTION LIMIT [=] connlimit ] ]\n [ IS_TEMPLATE [=] istemplate ]\n" }, "CREATE DOMAIN": { "description": "Description\nCREATE DOMAIN creates a new domain. A domain is", "synopsis": "\nCREATE DOMAIN name [ AS ] data_type\n [ COLLATE collation ]\n [ DEFAULT expression ]\n [ constraint [ ... ] ]\n\nwhere constraint is:\n\n[ CONSTRAINT constraint_name ]\n{ NOT NULL | NULL | CHECK (expression) }\n" }, "CREATE EVENT TRIGGER": { "description": "Description\nCREATE EVENT TRIGGER creates a new event trigger.", "synopsis": "\nCREATE EVENT TRIGGER name\n ON event\n [ WHEN filter_variable IN (filter_value [, ... ]) [ AND ... ] ]\n EXECUTE PROCEDURE function_name()\n" }, "CREATE EXTENSION": { "description": "Description\nCREATE EXTENSION loads a new extension into the current", "synopsis": "\nCREATE EXTENSION [ IF NOT EXISTS ] extension_name\n [ WITH ] [ SCHEMA schema_name ]\n [ VERSION version ]\n [ FROM old_version ]\n" }, "CREATE FOREIGN DATA WRAPPER": { "description": "Description\nCREATE FOREIGN DATA WRAPPER creates a new", "synopsis": "\nCREATE FOREIGN DATA WRAPPER name\n [ HANDLER handler_function | NO HANDLER ]\n [ VALIDATOR validator_function | NO VALIDATOR ]\n [ OPTIONS ( option 'value' [, ... ] ) ]\n" }, "CREATE FOREIGN TABLE": { "description": "Description\nCREATE FOREIGN TABLE creates a new foreign table", "synopsis": "\nCREATE FOREIGN TABLE [ IF NOT EXISTS ] table_name ( [\n { column_name data_type [ OPTIONS ( option 'value' [, ... ] ) ] [ COLLATE collation ] [ column_constraint [ ... ] ]\n | table_constraint }\n [, ... ]\n] )\n[ INHERITS ( parent_table [, ... ] ) ]\n SERVER server_name\n[ OPTIONS ( option 'value' [, ... ] ) ]\n\nwhere column_constraint is:\n\n[ CONSTRAINT constraint_name ]\n{ NOT NULL |\n NULL |\n CHECK ( expression ) [ NO INHERIT ] |\n DEFAULT default_expr }\n\nand table_constraint is:\n\n[ CONSTRAINT constraint_name ]\nCHECK ( expression ) [ NO INHERIT ]\n" }, "CREATE FUNCTION": { "description": "Description\nCREATE FUNCTION defines a new function.", "synopsis": "\nCREATE [ OR REPLACE ] FUNCTION\n name ( [ [ argmode ] [ argname ] argtype [ { DEFAULT | = } default_expr ] [, ...] ] )\n [ RETURNS rettype\n | RETURNS TABLE ( column_name column_type [, ...] ) ]\n { LANGUAGE lang_name\n | TRANSFORM { FOR TYPE type_name } [, ... ]\n | WINDOW\n | IMMUTABLE | STABLE | VOLATILE | [ NOT ] LEAKPROOF\n | CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT\n | [ EXTERNAL ] SECURITY INVOKER | [ EXTERNAL ] SECURITY DEFINER\n | PARALLEL { UNSAFE | RESTRICTED | SAFE }\n | COST execution_cost\n | ROWS result_rows\n | SET configuration_parameter { TO value | = value | FROM CURRENT }\n | AS 'definition'\n | AS 'obj_file', 'link_symbol'\n } ...\n [ WITH ( attribute [, ...] ) ]\n" }, "CREATE GROUP": { "description": "Description\nCREATE GROUP is now an alias for", "synopsis": "\nCREATE GROUP name [ [ WITH ] option [ ... ] ]\n\nwhere option can be:\n\n SUPERUSER | NOSUPERUSER\n | CREATEDB | NOCREATEDB\n | CREATEROLE | NOCREATEROLE\n | CREATEUSER | NOCREATEUSER\n | INHERIT | NOINHERIT\n | LOGIN | NOLOGIN\n | [ ENCRYPTED | UNENCRYPTED ] PASSWORD 'password'\n | VALID UNTIL 'timestamp'\n | IN ROLE role_name [, ...]\n | IN GROUP role_name [, ...]\n | ROLE role_name [, ...]\n | ADMIN role_name [, ...]\n | USER role_name [, ...]\n | SYSID uid\n" }, "CREATE INDEX": { "description": "Description\nCREATE INDEX constructs an index on the specified column(s)", "synopsis": "\nCREATE [ UNIQUE ] INDEX [ CONCURRENTLY ] [ [ IF NOT EXISTS ] name ] ON table_name [ USING method ]\n ( { column_name | ( expression ) } [ COLLATE collation ] [ opclass ] [ ASC | DESC ] [ NULLS { FIRST | LAST } ] [, ...] )\n [ WITH ( storage_parameter = value [, ... ] ) ]\n [ TABLESPACE tablespace_name ]\n [ WHERE predicate ]\n" }, "CREATE LANGUAGE": { "description": "Description\nCREATE LANGUAGE registers a new", "synopsis": "\nCREATE [ OR REPLACE ] [ PROCEDURAL ] LANGUAGE name\nCREATE [ OR REPLACE ] [ TRUSTED ] [ PROCEDURAL ] LANGUAGE name\n HANDLER call_handler [ INLINE inline_handler ] [ VALIDATOR valfunction ]\n" }, "CREATE MATERIALIZED VIEW": { "description": "Description\nCREATE MATERIALIZED VIEW defines a materialized view of", "synopsis": "\nCREATE MATERIALIZED VIEW [ IF NOT EXISTS ] table_name\n [ (column_name [, ...] ) ]\n [ WITH ( storage_parameter [= value] [, ... ] ) ]\n [ TABLESPACE tablespace_name ]\n AS query\n [ WITH [ NO ] DATA ]\n" }, "CREATE OPCLASS": { "description": "Description\nCREATE OPERATOR CLASS creates a new operator class.", "synopsis": "\nCREATE OPERATOR CLASS name [ DEFAULT ] FOR TYPE data_type\n USING index_method [ FAMILY family_name ] AS\n { OPERATOR strategy_number operator_name [ ( op_type, op_type ) ] [ FOR SEARCH | FOR ORDER BY sort_family_name ]\n | FUNCTION support_number [ ( op_type [ , op_type ] ) ] function_name ( argument_type [, ...] )\n | STORAGE storage_type\n } [, ... ]\n" }, "CREATE OPERATOR": { "description": "Description\nCREATE OPERATOR defines a new operator,", "synopsis": "\nCREATE OPERATOR name (\n PROCEDURE = function_name\n [, LEFTARG = left_type ] [, RIGHTARG = right_type ]\n [, COMMUTATOR = com_op ] [, NEGATOR = neg_op ]\n [, RESTRICT = res_proc ] [, JOIN = join_proc ]\n [, HASHES ] [, MERGES ]\n)\n" }, "CREATE OPFAMILY": { "description": "Description\nCREATE OPERATOR FAMILY creates a new operator family.", "synopsis": "\nCREATE OPERATOR FAMILY name USING index_method\n" }, "CREATE POLICY": { "description": "Description\n The CREATE POLICY command defines a new policy for a", "synopsis": "\nCREATE POLICY name ON table_name\n [ FOR { ALL | SELECT | INSERT | UPDATE | DELETE } ]\n [ TO { role_name | PUBLIC | CURRENT_USER | SESSION_USER } [, ...] ]\n [ USING ( using_expression ) ]\n [ WITH CHECK ( check_expression ) ]\n" }, "CREATE ROLE": { "description": "Description\nCREATE ROLE adds a new role to a", "synopsis": "\nCREATE ROLE name [ [ WITH ] option [ ... ] ]\n\nwhere option can be:\n\n SUPERUSER | NOSUPERUSER\n | CREATEDB | NOCREATEDB\n | CREATEROLE | NOCREATEROLE\n | CREATEUSER | NOCREATEUSER\n | INHERIT | NOINHERIT\n | LOGIN | NOLOGIN\n | REPLICATION | NOREPLICATION\n | BYPASSRLS | NOBYPASSRLS\n | CONNECTION LIMIT connlimit\n | [ ENCRYPTED | UNENCRYPTED ] PASSWORD 'password'\n | VALID UNTIL 'timestamp'\n | IN ROLE role_name [, ...]\n | IN GROUP role_name [, ...]\n | ROLE role_name [, ...]\n | ADMIN role_name [, ...]\n | USER role_name [, ...]\n | SYSID uid\n" }, "CREATE RULE": { "description": "Description\nCREATE RULE defines a new rule applying to a specified", "synopsis": "\nCREATE [ OR REPLACE ] RULE name AS ON event\n TO table_name [ WHERE condition ]\n DO [ ALSO | INSTEAD ] { NOTHING | command | ( command ; command ... ) }\n\nwhere event can be one of:\n\n SELECT | INSERT | UPDATE | DELETE\n" }, "CREATE SCHEMA": { "description": "Description\nCREATE SCHEMA enters a new schema", "synopsis": "\nCREATE SCHEMA schema_name [ AUTHORIZATION role_specification ] [ schema_element [ ... ] ]\nCREATE SCHEMA AUTHORIZATION role_specification [ schema_element [ ... ] ]\nCREATE SCHEMA IF NOT EXISTS schema_name [ AUTHORIZATION role_specification ]\nCREATE SCHEMA IF NOT EXISTS AUTHORIZATION role_specification\nwhere role_specification can be:\n\n [ GROUP ] user_name\n | CURRENT_USER\n | SESSION_USER\n" }, "CREATE SEQUENCE": { "description": "Description\nCREATE SEQUENCE creates a new sequence number", "synopsis": "\nCREATE [ TEMPORARY | TEMP ] SEQUENCE [ IF NOT EXISTS ] name [ INCREMENT [ BY ] increment ]\n [ MINVALUE minvalue | NO MINVALUE ] [ MAXVALUE maxvalue | NO MAXVALUE ]\n [ START [ WITH ] start ] [ CACHE cache ] [ [ NO ] CYCLE ]\n [ OWNED BY { table_name.column_name | NONE } ]\n" }, "CREATE SERVER": { "description": "Description\nCREATE SERVER defines a new foreign server. The", "synopsis": "\nCREATE SERVER server_name [ TYPE 'server_type' ] [ VERSION 'server_version' ]\n FOREIGN DATA WRAPPER fdw_name\n [ OPTIONS ( option 'value' [, ... ] ) ]\n" }, "CREATE TABLE": { "description": "Description\nCREATE TABLE will create a new, initially empty table", "synopsis": "\nCREATE [ [ GLOBAL | LOCAL ] { TEMPORARY | TEMP } | UNLOGGED ] TABLE [ IF NOT EXISTS ] table_name ( [\n { column_name data_type [ COLLATE collation ] [ column_constraint [ ... ] ]\n | table_constraint\n | LIKE source_table [ like_option ... ] }\n [, ... ]\n] )\n[ INHERITS ( parent_table [, ... ] ) ]\n[ WITH ( storage_parameter [= value] [, ... ] ) | WITH OIDS | WITHOUT OIDS ]\n[ ON COMMIT { PRESERVE ROWS | DELETE ROWS | DROP } ]\n[ TABLESPACE tablespace_name ]\n\nCREATE [ [ GLOBAL | LOCAL ] { TEMPORARY | TEMP } | UNLOGGED ] TABLE [ IF NOT EXISTS ] table_name\n OF type_name [ (\n { column_name WITH OPTIONS [ column_constraint [ ... ] ]\n | table_constraint }\n [, ... ]\n) ]\n[ WITH ( storage_parameter [= value] [, ... ] ) | WITH OIDS | WITHOUT OIDS ]\n[ ON COMMIT { PRESERVE ROWS | DELETE ROWS | DROP } ]\n[ TABLESPACE tablespace_name ]\n\nwhere column_constraint is:\n\n[ CONSTRAINT constraint_name ]\n{ NOT NULL |\n NULL |\n CHECK ( expression ) [ NO INHERIT ] |\n DEFAULT default_expr |\n UNIQUE index_parameters |\n PRIMARY KEY index_parameters |\n REFERENCES reftable [ ( refcolumn ) ] [ MATCH FULL | MATCH PARTIAL | MATCH SIMPLE ]\n [ ON DELETE action ] [ ON UPDATE action ] }\n[ DEFERRABLE | NOT DEFERRABLE ] [ INITIALLY DEFERRED | INITIALLY IMMEDIATE ]\n\nand table_constraint is:\n\n[ CONSTRAINT constraint_name ]\n{ CHECK ( expression ) [ NO INHERIT ] |\n UNIQUE ( column_name [, ... ] ) index_parameters |\n PRIMARY KEY ( column_name [, ... ] ) index_parameters |\n EXCLUDE [ USING index_method ] ( exclude_element WITH operator [, ... ] ) index_parameters [ WHERE ( predicate ) ] |\n FOREIGN KEY ( column_name [, ... ] ) REFERENCES reftable [ ( refcolumn [, ... ] ) ]\n [ MATCH FULL | MATCH PARTIAL | MATCH SIMPLE ] [ ON DELETE action ] [ ON UPDATE action ] }\n[ DEFERRABLE | NOT DEFERRABLE ] [ INITIALLY DEFERRED | INITIALLY IMMEDIATE ]\n\nand like_option is:\n\n{ INCLUDING | EXCLUDING } { DEFAULTS | CONSTRAINTS | INDEXES | STORAGE | COMMENTS | ALL }\n\nindex_parameters in UNIQUE, PRIMARY KEY, and EXCLUDE constraints are:\n\n[ WITH ( storage_parameter [= value] [, ... ] ) ]\n[ USING INDEX TABLESPACE tablespace_name ]\n\nexclude_element in an EXCLUDE constraint is:\n\n{ column_name | ( expression ) } [ opclass ] [ ASC | DESC ] [ NULLS { FIRST | LAST } ]\n" }, "CREATE TABLE AS": { "description": "Description\nCREATE TABLE AS creates a table and fills it", "synopsis": "\nCREATE [ [ GLOBAL | LOCAL ] { TEMPORARY | TEMP } | UNLOGGED ] TABLE [ IF NOT EXISTS ] table_name\n [ (column_name [, ...] ) ]\n [ WITH ( storage_parameter [= value] [, ... ] ) | WITH OIDS | WITHOUT OIDS ]\n [ ON COMMIT { PRESERVE ROWS | DELETE ROWS | DROP } ]\n [ TABLESPACE tablespace_name ]\n AS query\n [ WITH [ NO ] DATA ]\n" }, "CREATE TABLESPACE": { "description": "Description\nCREATE TABLESPACE registers a new cluster-wide", "synopsis": "\nCREATE TABLESPACE tablespace_name\n [ OWNER { new_owner | CURRENT_USER | SESSION_USER } ]\n LOCATION 'directory'\n [ WITH ( tablespace_option = value [, ... ] ) ]\n" }, "CREATE TRANSFORM": { "description": "Description\nCREATE TRANSFORM defines a new transform.", "synopsis": "\nCREATE [ OR REPLACE ] TRANSFORM FOR type_name LANGUAGE lang_name (\n FROM SQL WITH FUNCTION from_sql_function_name (argument_type [, ...]),\n TO SQL WITH FUNCTION to_sql_function_name (argument_type [, ...])\n);\n" }, "CREATE TRIGGER": { "description": "Description\nCREATE TRIGGER creates a new trigger. The", "synopsis": "\nCREATE [ CONSTRAINT ] TRIGGER name { BEFORE | AFTER | INSTEAD OF } { event [ OR ... ] }\n ON table_name\n [ FROM referenced_table_name ]\n [ NOT DEFERRABLE | [ DEFERRABLE ] [ INITIALLY IMMEDIATE | INITIALLY DEFERRED ] ]\n [ FOR [ EACH ] { ROW | STATEMENT } ]\n [ WHEN ( condition ) ]\n EXECUTE PROCEDURE function_name ( arguments )\n\nwhere event can be one of:\n\n INSERT\n UPDATE [ OF column_name [, ... ] ]\n DELETE\n TRUNCATE\n" }, "CREATE TSCONFIG": { "description": "Description\nCREATE TEXT SEARCH CONFIGURATION creates a new text", "synopsis": "\nCREATE TEXT SEARCH CONFIGURATION name (\n PARSER = parser_name |\n COPY = source_config\n)\n" }, "CREATE TSDICTIONARY": { "description": "Description\nCREATE TEXT SEARCH DICTIONARY creates a new text search", "synopsis": "\nCREATE TEXT SEARCH DICTIONARY name (\n TEMPLATE = template\n [, option = value [, ... ]]\n)\n" }, "CREATE TSPARSER": { "description": "Description\nCREATE TEXT SEARCH PARSER creates a new text search", "synopsis": "\nCREATE TEXT SEARCH PARSER name (\n START = start_function ,\n GETTOKEN = gettoken_function ,\n END = end_function ,\n LEXTYPES = lextypes_function\n [, HEADLINE = headline_function ]\n)\n" }, "CREATE TSTEMPLATE": { "description": "Description\nCREATE TEXT SEARCH TEMPLATE creates a new text search", "synopsis": "\nCREATE TEXT SEARCH TEMPLATE name (\n [ INIT = init_function , ]\n LEXIZE = lexize_function\n)\n" }, "CREATE TYPE": { "description": "Description\nCREATE TYPE registers a new data type for use in", "synopsis": "\nCREATE TYPE name AS\n ( [ attribute_name data_type [ COLLATE collation ] [, ... ] ] )\n\nCREATE TYPE name AS ENUM\n ( [ 'label' [, ... ] ] )\n\nCREATE TYPE name AS RANGE (\n SUBTYPE = subtype\n [ , SUBTYPE_OPCLASS = subtype_operator_class ]\n [ , COLLATION = collation ]\n [ , CANONICAL = canonical_function ]\n [ , SUBTYPE_DIFF = subtype_diff_function ]\n)\n\nCREATE TYPE name (\n INPUT = input_function,\n OUTPUT = output_function\n [ , RECEIVE = receive_function ]\n [ , SEND = send_function ]\n [ , TYPMOD_IN = type_modifier_input_function ]\n [ , TYPMOD_OUT = type_modifier_output_function ]\n [ , ANALYZE = analyze_function ]\n [ , INTERNALLENGTH = { internallength | VARIABLE } ]\n [ , PASSEDBYVALUE ]\n [ , ALIGNMENT = alignment ]\n [ , STORAGE = storage ]\n [ , LIKE = like_type ]\n [ , CATEGORY = category ]\n [ , PREFERRED = preferred ]\n [ , DEFAULT = default ]\n [ , ELEMENT = element ]\n [ , DELIMITER = delimiter ]\n [ , COLLATABLE = collatable ]\n)\n\nCREATE TYPE name\n" }, "CREATE USER": { "description": "Description\nCREATE USER is now an alias for", "synopsis": "\nCREATE USER name [ [ WITH ] option [ ... ] ]\n\nwhere option can be:\n\n SUPERUSER | NOSUPERUSER\n | CREATEDB | NOCREATEDB\n | CREATEROLE | NOCREATEROLE\n | CREATEUSER | NOCREATEUSER\n | INHERIT | NOINHERIT\n | LOGIN | NOLOGIN\n | REPLICATION | NOREPLICATION\n | CONNECTION LIMIT connlimit\n | [ ENCRYPTED | UNENCRYPTED ] PASSWORD 'password'\n | VALID UNTIL 'timestamp'\n | IN ROLE role_name [, ...]\n | IN GROUP role_name [, ...]\n | ROLE role_name [, ...]\n | ADMIN role_name [, ...]\n | USER role_name [, ...]\n | SYSID uid\n" }, "CREATE USER MAPPING": { "description": "Description\nCREATE USER MAPPING defines a mapping of a user", "synopsis": "\nCREATE USER MAPPING FOR { user_name | USER | CURRENT_USER | PUBLIC }\n SERVER server_name\n [ OPTIONS ( option 'value' [ , ... ] ) ]\n" }, "CREATE VIEW": { "description": "Description\nCREATE VIEW defines a view of a query. The view", "synopsis": "\nCREATE [ OR REPLACE ] [ TEMP | TEMPORARY ] [ RECURSIVE ] VIEW name [ ( column_name [, ...] ) ]\n [ WITH ( view_option_name [= view_option_value] [, ... ] ) ]\n AS query\n [ WITH [ CASCADED | LOCAL ] CHECK OPTION ]\n" }, "DEALLOCATE": { "description": "Description\nDEALLOCATE is used to deallocate a previously", "synopsis": "\nDEALLOCATE [ PREPARE ] { name | ALL }\n" }, "DECLARE": { "description": "Description\nDECLARE allows a user to create cursors, which", "synopsis": "\nDECLARE name [ BINARY ] [ INSENSITIVE ] [ [ NO ] SCROLL ]\n CURSOR [ { WITH | WITHOUT } HOLD ] FOR query\n" }, "DELETE": { "description": "Description\nDELETE deletes rows that satisfy the", "synopsis": "\n[ WITH [ RECURSIVE ] with_query [, ...] ]\nDELETE FROM [ ONLY ] table_name [ * ] [ [ AS ] alias ]\n [ USING using_list ]\n [ WHERE condition | WHERE CURRENT OF cursor_name ]\n [ RETURNING * | output_expression [ [ AS ] output_name ] [, ...] ]\n" }, "DISCARD": { "description": "Description\nDISCARD releases internal resources associated with a", "synopsis": "\nDISCARD { ALL | PLANS | SEQUENCES | TEMPORARY | TEMP }\n" }, "DO": { "description": "Description\nDO executes an anonymous code block, or in other", "synopsis": "\nDO [ LANGUAGE lang_name ] code\n" }, "DROP AGGREGATE": { "description": "Description\nDROP AGGREGATE removes an existing", "synopsis": "\nDROP AGGREGATE [ IF EXISTS ] name ( aggregate_signature ) [ CASCADE | RESTRICT ]\n\nwhere aggregate_signature is:\n\n* |\n[ argmode ] [ argname ] argtype [ , ... ] |\n[ [ argmode ] [ argname ] argtype [ , ... ] ] ORDER BY [ argmode ] [ argname ] argtype [ , ... ]\n" }, "DROP CAST": { "description": "Description\nDROP CAST removes a previously defined cast.", "synopsis": "\nDROP CAST [ IF EXISTS ] (source_type AS target_type) [ CASCADE | RESTRICT ]\n" }, "DROP COLLATION": { "description": "Description\nDROP COLLATION removes a previously defined collation.", "synopsis": "\nDROP COLLATION [ IF EXISTS ] name [ CASCADE | RESTRICT ]\n" }, "DROP CONVERSION": { "description": "Description\nDROP CONVERSION removes a previously defined conversion.", "synopsis": "\nDROP CONVERSION [ IF EXISTS ] name [ CASCADE | RESTRICT ]\n" }, "DROP DATABASE": { "description": "Description\nDROP DATABASE drops a database. It removes the", "synopsis": "\nDROP DATABASE [ IF EXISTS ] name\n" }, "DROP DOMAIN": { "description": "Description\nDROP DOMAIN removes a domain. Only the owner of", "synopsis": "\nDROP DOMAIN [ IF EXISTS ] name [, ...] [ CASCADE | RESTRICT ]\n" }, "DROP EVENT TRIGGER": { "description": "Description\nDROP EVENT TRIGGER removes an existing event trigger.", "synopsis": "\nDROP EVENT TRIGGER [ IF EXISTS ] name [ CASCADE | RESTRICT ]\n" }, "DROP EXTENSION": { "description": "Description\nDROP EXTENSION removes extensions from the database.", "synopsis": "\nDROP EXTENSION [ IF EXISTS ] name [, ...] [ CASCADE | RESTRICT ]\n" }, "DROP FOREIGN DATA WRAPPER": { "description": "Description\nDROP FOREIGN DATA WRAPPER removes an existing", "synopsis": "\nDROP FOREIGN DATA WRAPPER [ IF EXISTS ] name [ CASCADE | RESTRICT ]\n" }, "DROP FOREIGN TABLE": { "description": "Description\nDROP FOREIGN TABLE removes a foreign table.", "synopsis": "\nDROP FOREIGN TABLE [ IF EXISTS ] name [, ...] [ CASCADE | RESTRICT ]\n" }, "DROP FUNCTION": { "description": "Description\nDROP FUNCTION removes the definition of an existing", "synopsis": "\nDROP FUNCTION [ IF EXISTS ] name ( [ [ argmode ] [ argname ] argtype [, ...] ] )\n [ CASCADE | RESTRICT ]\n" }, "DROP GROUP": { "description": "Description\nDROP GROUP is now an alias for", "synopsis": "\nDROP GROUP [ IF EXISTS ] name [, ...]\n" }, "DROP INDEX": { "description": "Description\nDROP INDEX drops an existing index from the database", "synopsis": "\nDROP INDEX [ CONCURRENTLY ] [ IF EXISTS ] name [, ...] [ CASCADE | RESTRICT ]\n" }, "DROP LANGUAGE": { "description": "Description\nDROP LANGUAGE removes the definition of a", "synopsis": "\nDROP [ PROCEDURAL ] LANGUAGE [ IF EXISTS ] name [ CASCADE | RESTRICT ]\n" }, "DROP MATERIALIZED VIEW": { "description": "Description\nDROP MATERIALIZED VIEW drops an existing materialized", "synopsis": "\nDROP MATERIALIZED VIEW [ IF EXISTS ] name [, ...] [ CASCADE | RESTRICT ]\n" }, "DROP OPCLASS": { "description": "Description\nDROP OPERATOR CLASS drops an existing operator class.", "synopsis": "\nDROP OPERATOR CLASS [ IF EXISTS ] name USING index_method [ CASCADE | RESTRICT ]\n" }, "DROP OPERATOR": { "description": "Description\nDROP OPERATOR drops an existing operator from", "synopsis": "\nDROP OPERATOR [ IF EXISTS ] name ( { left_type | NONE } , { right_type | NONE } ) [ CASCADE | RESTRICT ]\n" }, "DROP OPFAMILY": { "description": "Description\nDROP OPERATOR FAMILY drops an existing operator family.", "synopsis": "\nDROP OPERATOR FAMILY [ IF EXISTS ] name USING index_method [ CASCADE | RESTRICT ]\n" }, "DROP OWNED": { "description": "Description\nDROP OWNED drops all the objects within the current", "synopsis": "\nDROP OWNED BY { name | CURRENT_USER | SESSION_USER } [, ...] [ CASCADE | RESTRICT ]\n" }, "DROP POLICY": { "description": "Description\nDROP POLICY removes the specified policy from the table.", "synopsis": "\nDROP POLICY [ IF EXISTS ] name ON table_name\n" }, "DROP ROLE": { "description": "Description\nDROP ROLE removes the specified role(s).", "synopsis": "\nDROP ROLE [ IF EXISTS ] name [, ...]\n" }, "DROP RULE": { "description": "Description\nDROP RULE drops a rewrite rule.", "synopsis": "\nDROP RULE [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]\n" }, "DROP SCHEMA": { "description": "Description\nDROP SCHEMA removes schemas from the database.", "synopsis": "\nDROP SCHEMA [ IF EXISTS ] name [, ...] [ CASCADE | RESTRICT ]\n" }, "DROP SEQUENCE": { "description": "Description\nDROP SEQUENCE removes sequence number", "synopsis": "\nDROP SEQUENCE [ IF EXISTS ] name [, ...] [ CASCADE | RESTRICT ]\n" }, "DROP SERVER": { "description": "Description\nDROP SERVER removes an existing foreign server", "synopsis": "\nDROP SERVER [ IF EXISTS ] name [ CASCADE | RESTRICT ]\n" }, "DROP TABLE": { "description": "Description\nDROP TABLE removes tables from the database.", "synopsis": "\nDROP TABLE [ IF EXISTS ] name [, ...] [ CASCADE | RESTRICT ]\n" }, "DROP TABLESPACE": { "description": "Description\nDROP TABLESPACE removes a tablespace from the system.", "synopsis": "\nDROP TABLESPACE [ IF EXISTS ] name\n" }, "DROP TRANSFORM": { "description": "Description\nDROP TRANSFORM removes a previously defined transform.", "synopsis": "\nDROP TRANSFORM [ IF EXISTS ] FOR type_name LANGUAGE lang_name\n" }, "DROP TRIGGER": { "description": "Description\nDROP TRIGGER removes an existing", "synopsis": "\nDROP TRIGGER [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]\n" }, "DROP TSCONFIG": { "description": "Description\nDROP TEXT SEARCH CONFIGURATION drops an existing text", "synopsis": "\nDROP TEXT SEARCH CONFIGURATION [ IF EXISTS ] name [ CASCADE | RESTRICT ]\n" }, "DROP TSDICTIONARY": { "description": "Description\nDROP TEXT SEARCH DICTIONARY drops an existing text", "synopsis": "\nDROP TEXT SEARCH DICTIONARY [ IF EXISTS ] name [ CASCADE | RESTRICT ]\n" }, "DROP TSPARSER": { "description": "Description\nDROP TEXT SEARCH PARSER drops an existing text search", "synopsis": "\nDROP TEXT SEARCH PARSER [ IF EXISTS ] name [ CASCADE | RESTRICT ]\n" }, "DROP TSTEMPLATE": { "description": "Description\nDROP TEXT SEARCH TEMPLATE drops an existing text search", "synopsis": "\nDROP TEXT SEARCH TEMPLATE [ IF EXISTS ] name [ CASCADE | RESTRICT ]\n" }, "DROP TYPE": { "description": "Description\nDROP TYPE removes a user-defined data type.", "synopsis": "\nDROP TYPE [ IF EXISTS ] name [, ...] [ CASCADE | RESTRICT ]\n" }, "DROP USER": { "description": "Description\nDROP USER is now an alias for", "synopsis": "\nDROP USER [ IF EXISTS ] name [, ...]\n" }, "DROP USER MAPPING": { "description": "Description\nDROP USER MAPPING removes an existing user", "synopsis": "\nDROP USER MAPPING [ IF EXISTS ] FOR { user_name | USER | CURRENT_USER | PUBLIC } SERVER server_name\n" }, "DROP VIEW": { "description": "Description\nDROP VIEW drops an existing view. To execute", "synopsis": "\nDROP VIEW [ IF EXISTS ] name [, ...] [ CASCADE | RESTRICT ]\n" }, "END": { "description": "Description\nEND commits the current transaction. All changes", "synopsis": "\nEND [ WORK | TRANSACTION ]\n" }, "EXECUTE": { "description": "Description\nEXECUTE is used to execute a previously prepared", "synopsis": "\nEXECUTE name [ ( parameter [, ...] ) ]\n" }, "EXPLAIN": { "description": "Description\n This command displays the execution plan that the", "synopsis": "\nEXPLAIN [ ( option [, ...] ) ] statement\nEXPLAIN [ ANALYZE ] [ VERBOSE ] statement\nwhere option can be one of:\n\n ANALYZE [ boolean ]\n VERBOSE [ boolean ]\n COSTS [ boolean ]\n BUFFERS [ boolean ]\n TIMING [ boolean ]\n FORMAT { TEXT | XML | JSON | YAML }\n" }, "FETCH": { "description": "Description\nFETCH retrieves rows using a previously-created cursor.", "synopsis": "\nFETCH [ direction [ FROM | IN ] ] cursor_name\nwhere direction can be empty or one of:\n\n NEXT\n PRIOR\n FIRST\n LAST\n ABSOLUTE count\n RELATIVE count\ncount\n ALL\n FORWARD\n FORWARD count\n FORWARD ALL\n BACKWARD\n BACKWARD count\n BACKWARD ALL\n" }, "GRANT": { "description": "Description\n The GRANT command has two basic variants: one", "synopsis": "\nGRANT { { SELECT | INSERT | UPDATE | DELETE | TRUNCATE | REFERENCES | TRIGGER }\n [, ...] | ALL [ PRIVILEGES ] }\n ON { [ TABLE ] table_name [, ...]\n | ALL TABLES IN SCHEMA schema_name [, ...] }\n TO role_specification [, ...] [ WITH GRANT OPTION ]\n\nGRANT { { SELECT | INSERT | UPDATE | REFERENCES } ( column_name [, ...] )\n [, ...] | ALL [ PRIVILEGES ] ( column_name [, ...] ) }\n ON [ TABLE ] table_name [, ...]\n TO role_specification [, ...] [ WITH GRANT OPTION ]\n\nGRANT { { USAGE | SELECT | UPDATE }\n [, ...] | ALL [ PRIVILEGES ] }\n ON { SEQUENCE sequence_name [, ...]\n | ALL SEQUENCES IN SCHEMA schema_name [, ...] }\n TO role_specification [, ...] [ WITH GRANT OPTION ]\n\nGRANT { { CREATE | CONNECT | TEMPORARY | TEMP } [, ...] | ALL [ PRIVILEGES ] }\n ON DATABASE database_name [, ...]\n TO role_specification [, ...] [ WITH GRANT OPTION ]\n\nGRANT { USAGE | ALL [ PRIVILEGES ] }\n ON DOMAIN domain_name [, ...]\n TO role_specification [, ...] [ WITH GRANT OPTION ]\n\nGRANT { USAGE | ALL [ PRIVILEGES ] }\n ON FOREIGN DATA WRAPPER fdw_name [, ...]\n TO role_specification [, ...] [ WITH GRANT OPTION ]\n\nGRANT { USAGE | ALL [ PRIVILEGES ] }\n ON FOREIGN SERVER server_name [, ...]\n TO role_specification [, ...] [ WITH GRANT OPTION ]\n\nGRANT { EXECUTE | ALL [ PRIVILEGES ] }\n ON { FUNCTION function_name ( [ [ argmode ] [ arg_name ] arg_type [, ...] ] ) [, ...]\n | ALL FUNCTIONS IN SCHEMA schema_name [, ...] }\n TO role_specification [, ...] [ WITH GRANT OPTION ]\n\nGRANT { USAGE | ALL [ PRIVILEGES ] }\n ON LANGUAGE lang_name [, ...]\n TO role_specification [, ...] [ WITH GRANT OPTION ]\n\nGRANT { { SELECT | UPDATE } [, ...] | ALL [ PRIVILEGES ] }\n ON LARGE OBJECT loid [, ...]\n TO role_specification [, ...] [ WITH GRANT OPTION ]\n\nGRANT { { CREATE | USAGE } [, ...] | ALL [ PRIVILEGES ] }\n ON SCHEMA schema_name [, ...]\n TO role_specification [, ...] [ WITH GRANT OPTION ]\n\nGRANT { CREATE | ALL [ PRIVILEGES ] }\n ON TABLESPACE tablespace_name [, ...]\n TO role_specification [, ...] [ WITH GRANT OPTION ]\n\nGRANT { USAGE | ALL [ PRIVILEGES ] }\n ON TYPE type_name [, ...]\n TO role_specification [, ...] [ WITH GRANT OPTION ]\n\nwhere role_specification can be:\n\n [ GROUP ] role_name\n | PUBLIC\n | CURRENT_USER\n | SESSION_USER\n\nGRANT role_name [, ...] TO role_name [, ...] [ WITH ADMIN OPTION ]\n" }, "IMPORT FOREIGN SCHEMA": { "description": "Description\nIMPORT FOREIGN SCHEMA creates foreign tables that", "synopsis": "\nIMPORT FOREIGN SCHEMA remote_schema\n [ { LIMIT TO | EXCEPT } ( table_name [, ...] ) ]\n FROM SERVER server_name\n INTO local_schema\n [ OPTIONS ( option 'value' [, ... ] ) ]\n" }, "INSERT": { "description": "Description\nINSERT inserts new rows into a table.", "synopsis": "\n[ WITH [ RECURSIVE ] with_query [, ...] ]\nINSERT INTO table_name [ AS alias ] [ ( column_name [, ...] ) ]\n { DEFAULT VALUES | VALUES ( { expression | DEFAULT } [, ...] ) [, ...] | query }\n [ ON CONFLICT [ conflict_target ] conflict_action ]\n [ RETURNING * | output_expression [ [ AS ] output_name ] [, ...] ]\n\nwhere conflict_target can be one of:\n\n ( { column_name_index | ( expression_index ) } [ COLLATE collation ] [ opclass ] [, ...] ) [ WHERE index_predicate ]\n ON CONSTRAINT constraint_name\nand conflict_action is one of:\n\n DO NOTHING\n DO UPDATE SET { column_name = { expression | DEFAULT } |\n ( column_name [, ...] ) = ( { expression | DEFAULT } [, ...] ) |\n ( column_name [, ...] ) = ( sub-SELECT )\n } [, ...]\n [ WHERE condition ]\n" }, "LISTEN": { "description": "Description\nLISTEN registers the current session as a", "synopsis": "\nLISTEN channel\n" }, "LOAD": { "description": "Description\n This command loads a shared library file into the PostgreSQL", "synopsis": "\nLOAD 'filename'\n" }, "LOCK": { "description": "Description\nLOCK TABLE obtains a table-level lock, waiting", "synopsis": "\nLOCK [ TABLE ] [ ONLY ] name [ * ] [, ...] [ IN lockmode MODE ] [ NOWAIT ]\n\nwhere lockmode is one of:\n\n ACCESS SHARE | ROW SHARE | ROW EXCLUSIVE | SHARE UPDATE EXCLUSIVE\n | SHARE | SHARE ROW EXCLUSIVE | EXCLUSIVE | ACCESS EXCLUSIVE\n" }, "MOVE": { "description": "Description\nMOVE repositions a cursor without retrieving any data.", "synopsis": "\nMOVE [ direction [ FROM | IN ] ] cursor_name\nwhere direction can be empty or one of:\n\n NEXT\n PRIOR\n FIRST\n LAST\n ABSOLUTE count\n RELATIVE count\ncount\n ALL\n FORWARD\n FORWARD count\n FORWARD ALL\n BACKWARD\n BACKWARD count\n BACKWARD ALL\n" }, "NOTIFY": { "description": "Description\n The NOTIFY command sends a notification event together", "synopsis": "\nNOTIFY channel [ , payload ]\n" }, "PGBENCH": { "description": "Description\npgbench is a simple program for running benchmark", "synopsis": "\nclient_id transaction_no time file_no time_epoch time_us schedule_lag\n" }, "PREPARE": { "description": "Description\nPREPARE creates a prepared statement. A prepared", "synopsis": "\nPREPARE name [ ( data_type [, ...] ) ] AS statement\n" }, "PREPARE TRANSACTION": { "description": "Description\nPREPARE TRANSACTION prepares the current transaction", "synopsis": "\nPREPARE TRANSACTION transaction_id\n" }, "REASSIGN OWNED": { "description": "Description\nREASSIGN OWNED instructs the system to change", "synopsis": "\nREASSIGN OWNED BY { old_role | CURRENT_USER | SESSION_USER } [, ...]\n TO { new_role | CURRENT_USER | SESSION_USER }\n" }, "REFRESH MATERIALIZED VIEW": { "description": "Description\nREFRESH MATERIALIZED VIEW completely replaces the", "synopsis": "\nREFRESH MATERIALIZED VIEW [ CONCURRENTLY ] name\n [ WITH [ NO ] DATA ]\n" }, "REINDEX": { "description": "Description\nREINDEX rebuilds an index using the data", "synopsis": "\nREINDEX [ ( { VERBOSE } [, ...] ) ] { INDEX | TABLE | SCHEMA | DATABASE | SYSTEM } name\n" }, "RELEASE SAVEPOINT": { "description": "Description\nRELEASE SAVEPOINT destroys a savepoint previously defined", "synopsis": "\nRELEASE [ SAVEPOINT ] savepoint_name\n" }, "RESET": { "description": "Description\nRESET restores run-time parameters to their", "synopsis": "\nRESET configuration_parameter\nRESET ALL\n" }, "REVOKE": { "description": "Description\n The REVOKE command revokes previously granted", "synopsis": "\nREVOKE [ GRANT OPTION FOR ]\n { { SELECT | INSERT | UPDATE | DELETE | TRUNCATE | REFERENCES | TRIGGER }\n [, ...] | ALL [ PRIVILEGES ] }\n ON { [ TABLE ] table_name [, ...]\n | ALL TABLES IN SCHEMA schema_name [, ...] }\n FROM { [ GROUP ] role_name | PUBLIC } [, ...]\n [ CASCADE | RESTRICT ]\n\nREVOKE [ GRANT OPTION FOR ]\n { { SELECT | INSERT | UPDATE | REFERENCES } ( column_name [, ...] )\n [, ...] | ALL [ PRIVILEGES ] ( column_name [, ...] ) }\n ON [ TABLE ] table_name [, ...]\n FROM { [ GROUP ] role_name | PUBLIC } [, ...]\n [ CASCADE | RESTRICT ]\n\nREVOKE [ GRANT OPTION FOR ]\n { { USAGE | SELECT | UPDATE }\n [, ...] | ALL [ PRIVILEGES ] }\n ON { SEQUENCE sequence_name [, ...]\n | ALL SEQUENCES IN SCHEMA schema_name [, ...] }\n FROM { [ GROUP ] role_name | PUBLIC } [, ...]\n [ CASCADE | RESTRICT ]\n\nREVOKE [ GRANT OPTION FOR ]\n { { CREATE | CONNECT | TEMPORARY | TEMP } [, ...] | ALL [ PRIVILEGES ] }\n ON DATABASE database_name [, ...]\n FROM { [ GROUP ] role_name | PUBLIC } [, ...]\n [ CASCADE | RESTRICT ]\n\nREVOKE [ GRANT OPTION FOR ]\n { USAGE | ALL [ PRIVILEGES ] }\n ON DOMAIN domain_name [, ...]\n FROM { [ GROUP ] role_name | PUBLIC } [, ...]\n [ CASCADE | RESTRICT ]\n\nREVOKE [ GRANT OPTION FOR ]\n { USAGE | ALL [ PRIVILEGES ] }\n ON FOREIGN DATA WRAPPER fdw_name [, ...]\n FROM { [ GROUP ] role_name | PUBLIC } [, ...]\n [ CASCADE | RESTRICT ]\n\nREVOKE [ GRANT OPTION FOR ]\n { USAGE | ALL [ PRIVILEGES ] }\n ON FOREIGN SERVER server_name [, ...]\n FROM { [ GROUP ] role_name | PUBLIC } [, ...]\n [ CASCADE | RESTRICT ]\n\nREVOKE [ GRANT OPTION FOR ]\n { EXECUTE | ALL [ PRIVILEGES ] }\n ON { FUNCTION function_name ( [ [ argmode ] [ arg_name ] arg_type [, ...] ] ) [, ...]\n | ALL FUNCTIONS IN SCHEMA schema_name [, ...] }\n FROM { [ GROUP ] role_name | PUBLIC } [, ...]\n [ CASCADE | RESTRICT ]\n\nREVOKE [ GRANT OPTION FOR ]\n { USAGE | ALL [ PRIVILEGES ] }\n ON LANGUAGE lang_name [, ...]\n FROM { [ GROUP ] role_name | PUBLIC } [, ...]\n [ CASCADE | RESTRICT ]\n\nREVOKE [ GRANT OPTION FOR ]\n { { SELECT | UPDATE } [, ...] | ALL [ PRIVILEGES ] }\n ON LARGE OBJECT loid [, ...]\n FROM { [ GROUP ] role_name | PUBLIC } [, ...]\n [ CASCADE | RESTRICT ]\n\nREVOKE [ GRANT OPTION FOR ]\n { { CREATE | USAGE } [, ...] | ALL [ PRIVILEGES ] }\n ON SCHEMA schema_name [, ...]\n FROM { [ GROUP ] role_name | PUBLIC } [, ...]\n [ CASCADE | RESTRICT ]\n\nREVOKE [ GRANT OPTION FOR ]\n { CREATE | ALL [ PRIVILEGES ] }\n ON TABLESPACE tablespace_name [, ...]\n FROM { [ GROUP ] role_name | PUBLIC } [, ...]\n [ CASCADE | RESTRICT ]\n\nREVOKE [ GRANT OPTION FOR ]\n { USAGE | ALL [ PRIVILEGES ] }\n ON TYPE type_name [, ...]\n FROM { [ GROUP ] role_name | PUBLIC } [, ...]\n [ CASCADE | RESTRICT ]\n\nREVOKE [ ADMIN OPTION FOR ]\n role_name [, ...] FROM role_name [, ...]\n [ CASCADE | RESTRICT ]\n" }, "ROLLBACK": { "description": "Description\nROLLBACK rolls back the current transaction and causes", "synopsis": "\nROLLBACK [ WORK | TRANSACTION ]\n" }, "ROLLBACK PREPARED": { "description": "Description\nROLLBACK PREPARED rolls back a transaction that is in", "synopsis": "\nROLLBACK PREPARED transaction_id\n" }, "ROLLBACK TO": { "description": "Description\n Roll back all commands that were executed after the savepoint was", "synopsis": "\nROLLBACK [ WORK | TRANSACTION ] TO [ SAVEPOINT ] savepoint_name\n" }, "SAVEPOINT": { "description": "Description\nSAVEPOINT establishes a new savepoint within", "synopsis": "\nSAVEPOINT savepoint_name\n" }, "SECURITY LABEL": { "description": "Description\nSECURITY LABEL applies a security label to a database", "synopsis": "\nSECURITY LABEL [ FOR provider ] ON\n{\n TABLE object_name |\n COLUMN table_name.column_name |\n AGGREGATE aggregate_name ( aggregate_signature ) |\n DATABASE object_name |\n DOMAIN object_name |\n EVENT TRIGGER object_name |\n FOREIGN TABLE object_name\n FUNCTION function_name ( [ [ argmode ] [ argname ] argtype [, ...] ] ) |\n LARGE OBJECT large_object_oid |\n MATERIALIZED VIEW object_name |\n [ PROCEDURAL ] LANGUAGE object_name |\n ROLE object_name |\n SCHEMA object_name |\n SEQUENCE object_name |\n TABLESPACE object_name |\n TYPE object_name |\n VIEW object_name\n} IS 'label'\n\nwhere aggregate_signature is:\n\n* |\n[ argmode ] [ argname ] argtype [ , ... ] |\n[ [ argmode ] [ argname ] argtype [ , ... ] ] ORDER BY [ argmode ] [ argname ] argtype [ , ... ]\n" }, "SELECT": { "description": "Description\nSELECT retrieves rows from zero or more tables.", "synopsis": "\n[ WITH [ RECURSIVE ] with_query [, ...] ]\nSELECT [ ALL | DISTINCT [ ON ( expression [, ...] ) ] ]\n [ * | expression [ [ AS ] output_name ] [, ...] ]\n [ FROM from_item [, ...] ]\n [ WHERE condition ]\n [ GROUP BY grouping_element [, ...] ]\n [ HAVING condition [, ...] ]\n [ WINDOW window_name AS ( window_definition ) [, ...] ]\n [ { UNION | INTERSECT | EXCEPT } [ ALL | DISTINCT ] select ]\n [ ORDER BY expression [ ASC | DESC | USING operator ] [ NULLS { FIRST | LAST } ] [, ...] ]\n [ LIMIT { count | ALL } ]\n [ OFFSET start [ ROW | ROWS ] ]\n [ FETCH { FIRST | NEXT } [ count ] { ROW | ROWS } ONLY ]\n [ FOR { UPDATE | NO KEY UPDATE | SHARE | KEY SHARE } [ OF table_name [, ...] ] [ NOWAIT | SKIP LOCKED ] [...] ]\n\nwhere from_item can be one of:\n\n [ ONLY ] table_name [ * ] [ [ AS ] alias [ ( column_alias [, ...] ) ] ]\n [ TABLESAMPLE sampling_method ( argument [, ...] ) [ REPEATABLE ( seed ) ] ]\n [ LATERAL ] ( select ) [ AS ] alias [ ( column_alias [, ...] ) ]\n with_query_name [ [ AS ] alias [ ( column_alias [, ...] ) ] ]\n [ LATERAL ] function_name ( [ argument [, ...] ] )\n [ WITH ORDINALITY ] [ [ AS ] alias [ ( column_alias [, ...] ) ] ]\n [ LATERAL ] function_name ( [ argument [, ...] ] ) [ AS ] alias ( column_definition [, ...] )\n [ LATERAL ] function_name ( [ argument [, ...] ] ) AS ( column_definition [, ...] )\n [ LATERAL ] ROWS FROM( function_name ( [ argument [, ...] ] ) [ AS ( column_definition [, ...] ) ] [, ...] )\n [ WITH ORDINALITY ] [ [ AS ] alias [ ( column_alias [, ...] ) ] ]\n from_item [ NATURAL ] join_type from_item [ ON join_condition | USING ( join_column [, ...] ) ]\n\nand grouping_element can be one of:\n\n ( )\n expression\n ( expression [, ...] )\n ROLLUP ( { expression | ( expression [, ...] ) } [, ...] )\n CUBE ( { expression | ( expression [, ...] ) } [, ...] )\n GROUPING SETS ( grouping_element [, ...] )\n\nand with_query is:\nwith_query_name [ ( column_name [, ...] ) ] AS ( select | values | insert | update | delete )\n\nTABLE [ ONLY ] table_name [ * ]\n" }, "SELECT INTO": { "description": "Description\nSELECT INTO creates a new table and fills it", "synopsis": "\n[ WITH [ RECURSIVE ] with_query [, ...] ]\nSELECT [ ALL | DISTINCT [ ON ( expression [, ...] ) ] ]\n * | expression [ [ AS ] output_name ] [, ...]\n INTO [ TEMPORARY | TEMP | UNLOGGED ] [ TABLE ] new_table\n [ FROM from_item [, ...] ]\n [ WHERE condition ]\n [ GROUP BY expression [, ...] ]\n [ HAVING condition [, ...] ]\n [ WINDOW window_name AS ( window_definition ) [, ...] ]\n [ { UNION | INTERSECT | EXCEPT } [ ALL | DISTINCT ] select ]\n [ ORDER BY expression [ ASC | DESC | USING operator ] [ NULLS { FIRST | LAST } ] [, ...] ]\n [ LIMIT { count | ALL } ]\n [ OFFSET start [ ROW | ROWS ] ]\n [ FETCH { FIRST | NEXT } [ count ] { ROW | ROWS } ONLY ]\n [ FOR { UPDATE | SHARE } [ OF table_name [, ...] ] [ NOWAIT ] [...] ]\n" }, "SET": { "description": "Description\n The SET command changes run-time configuration", "synopsis": "\nSET [ SESSION | LOCAL ] configuration_parameter { TO | = } { value | 'value' | DEFAULT }\nSET [ SESSION | LOCAL ] TIME ZONE { timezone | LOCAL | DEFAULT }\n" }, "SET CONSTRAINTS": { "description": "Description\nSET CONSTRAINTS sets the behavior of constraint", "synopsis": "\nSET CONSTRAINTS { ALL | name [, ...] } { DEFERRED | IMMEDIATE }\n" }, "SET ROLE": { "description": "Description\n This command sets the current user", "synopsis": "\nSET [ SESSION | LOCAL ] ROLE role_name\nSET [ SESSION | LOCAL ] ROLE NONE\nRESET ROLE\n" }, "SET SESSION AUTH": { "description": "Description\n This command sets the session user identifier and the current user", "synopsis": "\nSET [ SESSION | LOCAL ] SESSION AUTHORIZATION user_name\nSET [ SESSION | LOCAL ] SESSION AUTHORIZATION DEFAULT\nRESET SESSION AUTHORIZATION\n" }, "SET TRANSACTION": { "description": "Description\n The SET TRANSACTION command sets the", "synopsis": "\nSET TRANSACTION transaction_mode [, ...]\nSET TRANSACTION SNAPSHOT snapshot_id\nSET SESSION CHARACTERISTICS AS TRANSACTION transaction_mode [, ...]\n\nwhere transaction_mode is one of:\n\n ISOLATION LEVEL { SERIALIZABLE | REPEATABLE READ | READ COMMITTED | READ UNCOMMITTED }\n READ WRITE | READ ONLY\n [ NOT ] DEFERRABLE\n" }, "SHOW": { "description": "Description\nSHOW will display the current setting of", "synopsis": "\nSHOW name\nSHOW ALL\n" }, "START TRANSACTION": { "description": "Description\n This command begins a new transaction block. If the isolation level,", "synopsis": "\nSTART TRANSACTION [ transaction_mode [, ...] ]\n\nwhere transaction_mode is one of:\n\n ISOLATION LEVEL { SERIALIZABLE | REPEATABLE READ | READ COMMITTED | READ UNCOMMITTED }\n READ WRITE | READ ONLY\n [ NOT ] DEFERRABLE\n" }, "TRUNCATE": { "description": "Description\nTRUNCATE quickly removes all rows from a set of", "synopsis": "\nTRUNCATE [ TABLE ] [ ONLY ] name [ * ] [, ... ]\n [ RESTART IDENTITY | CONTINUE IDENTITY ] [ CASCADE | RESTRICT ]\n" }, "UNLISTEN": { "description": "Description\nUNLISTEN is used to remove an existing", "synopsis": "\nUNLISTEN { channel | * }\n" }, "UPDATE": { "description": "Description\nUPDATE changes the values of the specified", "synopsis": "\n[ WITH [ RECURSIVE ] with_query [, ...] ]\nUPDATE [ ONLY ] table_name [ * ] [ [ AS ] alias ]\n SET { column_name = { expression | DEFAULT } |\n ( column_name [, ...] ) = ( { expression | DEFAULT } [, ...] ) |\n ( column_name [, ...] ) = ( sub-SELECT )\n } [, ...]\n [ FROM from_list ]\n [ WHERE condition | WHERE CURRENT OF cursor_name ]\n [ RETURNING * | output_expression [ [ AS ] output_name ] [, ...] ]\n" }, "VACUUM": { "description": "Description\nVACUUM reclaims storage occupied by dead tuples.", "synopsis": "\nVACUUM [ ( { FULL | FREEZE | VERBOSE | ANALYZE } [, ...] ) ] [ table_name [ (column_name [, ...] ) ] ]\nVACUUM [ FULL ] [ FREEZE ] [ VERBOSE ] [ table_name ]\nVACUUM [ FULL ] [ FREEZE ] [ VERBOSE ] ANALYZE [ table_name [ (column_name [, ...] ) ] ]\n" }, "VALUES": { "description": "Description\nVALUES computes a row value or set of row values", "synopsis": "\nVALUES ( expression [, ...] ) [, ...]\n [ ORDER BY sort_expression [ ASC | DESC | USING operator ] [, ...] ]\n [ LIMIT { count | ALL } ]\n [ OFFSET start [ ROW | ROWS ] ]\n [ FETCH { FIRST | NEXT } [ count ] { ROW | ROWS } ONLY ]\n" } } pgspecial-1.9.0/pgspecial/iocommands.py0000644000076500000240000001535313165740766020415 0ustar irinastaff00000000000000from contextlib import contextmanager import re import sys import logging import click import io import shlex import sqlparse import psycopg2 from os.path import expanduser from .namedqueries import NamedQueries from . import export from .main import special_command _logger = logging.getLogger(__name__) @export def editor_command(command): """ Is this an external editor command? :param command: string """ # It is possible to have `\e filename` or `SELECT * FROM \e`. So we check # for both conditions. return command.strip().endswith('\\e') or command.strip().startswith('\\e ') @export def get_filename(sql): if sql.strip().startswith('\\e'): command, _, filename = sql.partition(' ') return filename.strip() or None @export def get_watch_command(command): match = re.match("(.*?)[\s]*\\\\watch (\d+);?$", command) if match: groups = match.groups() return groups[0], int(groups[1]) return None, None @export def get_editor_query(sql): """Get the query part of an editor command.""" sql = sql.strip() # The reason we can't simply do .strip('\e') is that it strips characters, # not a substring. So it'll strip "e" in the end of the sql also! # Ex: "select * from style\e" -> "select * from styl". pattern = re.compile('(^\\\e|\\\e$)') while pattern.search(sql): sql = pattern.sub('', sql) return sql @export def open_external_editor(filename=None, sql=None): """ Open external editor, wait for the user to type in his query, return the query. :return: list with one tuple, query as first element. """ message = None filename = filename.strip().split(' ', 1)[0] if filename else None sql = sql or '' MARKER = '# Type your query above this line.\n' # Populate the editor buffer with the partial sql (if available) and a # placeholder comment. query = click.edit(u'{sql}\n\n{marker}'.format(sql=sql, marker=MARKER), filename=filename, extension='.sql') if filename: try: query = read_from_file(filename) except IOError: message = 'Error reading file: %s.' % filename if query is not None: query = query.split(MARKER, 1)[0].rstrip('\n') else: # Don't return None for the caller to deal with. # Empty string is ok. query = sql return (query, message) def read_from_file(path): with io.open(expanduser(path), encoding='utf-8') as f: contents = f.read() return contents @contextmanager def _paused_thread(): try: thread = psycopg2.extensions.get_wait_callback() psycopg2.extensions.set_wait_callback(None) yield finally: psycopg2.extensions.set_wait_callback(thread) def _index_of_file_name(tokenlist): for (idx, token) in reversed(list(enumerate(tokenlist[:-2]))): if token.is_keyword and token.value.upper() in ('TO', 'FROM'): return idx + 2 return None @special_command('\\copy', '\\copy [tablename] to/from [filename]', 'Copy data between a file and a table.') def copy(cur, pattern, verbose): """Copies table data to/from files""" # Replace the specified file destination with STDIN or STDOUT parsed = sqlparse.parse(pattern) tokenlist = parsed[0].tokens idx = _index_of_file_name(tokenlist) file_name = tokenlist[idx].value before_file_name = ''.join(t.value for t in tokenlist[:idx]) after_file_name = ''.join(t.value for t in tokenlist[idx+1:]) direction = tokenlist[idx-2].value.upper() replacement_file_name = 'STDIN' if direction == 'FROM' else 'STDOUT' query = u'{0} {1} {2}'.format(before_file_name, replacement_file_name, after_file_name) open_mode = 'r' if direction == 'FROM' else 'w' if file_name.startswith("'") and file_name.endswith("'"): file = io.open(expanduser(file_name.strip("'")), mode=open_mode, encoding='utf-8') elif 'stdin' in file_name.lower(): file = sys.stdin elif 'stdout' in file_name.lower(): file = sys.stdout else: raise Exception('Enclose filename in single quotes') with _paused_thread(): cur.copy_expert('copy ' + query, file) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, cur.statusmessage)] else: return [(None, None, None, cur.statusmessage)] @special_command('\\n', '\\n[+] [name] [param1 param2 ...]', 'List or execute named queries.') def execute_named_query(cur, pattern, **_): """Returns (title, rows, headers, status)""" if pattern == '': return list_named_queries(True) params = shlex.split(pattern) pattern = params.pop(0) query = NamedQueries.instance.get(pattern) title = '> {}'.format(query) if query is None: message = "No named query: {}".format(pattern) return [(None, None, None, message)] try: cur.execute(query, params) except (IndexError, TypeError): raise Exception("Bad arguments") if cur.description: headers = [x[0] for x in cur.description] return [(title, cur, headers, cur.statusmessage)] else: return [(title, None, None, cur.statusmessage)] def list_named_queries(verbose): """List of all named queries. Returns (title, rows, headers, status)""" if not verbose: rows = [[r] for r in NamedQueries.instance.list()] headers = ["Name"] else: headers = ["Name", "Query"] rows = [[r, NamedQueries.instance.get(r)] for r in NamedQueries.instance.list()] if not rows: status = NamedQueries.instance.usage else: status = '' return [('', rows, headers, status)] @special_command('\\ns', '\\ns name query', 'Save a named query.') def save_named_query(pattern, **_): """Save a new named query. Returns (title, rows, headers, status)""" usage = 'Syntax: \\ns name query.\n\n' + NamedQueries.instance.usage if not pattern: return [(None, None, None, usage)] name, _, query = pattern.partition(' ') # If either name or query is missing then print the usage and complain. if (not name) or (not query): return [(None, None, None, usage + 'Err: Both name and query are required.')] NamedQueries.instance.save(name, query) return [(None, None, None, "Saved.")] @special_command('\\nd', '\\nd [name]', 'Delete a named query.') def delete_named_query(pattern, **_): """Delete an existing named query. """ usage = 'Syntax: \\nd name.\n\n' + NamedQueries.instance.usage if not pattern: return [(None, None, None, usage)] status = NamedQueries.instance.delete(pattern) return [(None, None, None, status)] pgspecial-1.9.0/pgspecial/main.py0000644000076500000240000002122613144170154017165 0ustar irinastaff00000000000000import os import logging from collections import namedtuple from . import export from .help.commands import helpcommands log = logging.getLogger(__name__) NO_QUERY = 0 PARSED_QUERY = 1 RAW_QUERY = 2 PAGER_ALWAYS = 2 PAGER_LONG_OUTPUT = 1 PAGER_OFF = 0 PAGER_MSG = { PAGER_OFF: "Pager usage is off.", PAGER_LONG_OUTPUT: "Pager is used for long output.", PAGER_ALWAYS: "Pager is always used." } SpecialCommand = namedtuple('SpecialCommand', ['handler', 'syntax', 'description', 'arg_type', 'hidden', 'case_sensitive']) @export class CommandNotFound(Exception): pass @export class PGSpecial(object): # Default static commands that don't rely on PGSpecial state are registered # via the special_command decorator and stored in default_commands default_commands = {} def __init__(self): self.timing_enabled = True self.commands = self.default_commands.copy() self.timing_enabled = False self.expanded_output = False self.auto_expand = False self.pager_config = PAGER_ALWAYS self.pager = os.environ.get('PAGER', '') self.register(self.show_help, '\\?', '\\?', 'Show Commands.', arg_type=PARSED_QUERY) self.register(self.toggle_expanded_output, '\\x', '\\x', 'Toggle expanded output.', arg_type=PARSED_QUERY) self.register(self.call_pset, '\\pset', '\\pset [key] [value]', 'A limited version of traditional \pset', arg_type=PARSED_QUERY) self.register(self.show_command_help, '\\h', '\\h', 'Show SQL syntax and help.', arg_type=PARSED_QUERY) self.register(self.toggle_timing, '\\timing', '\\timing', 'Toggle timing of commands.', arg_type=NO_QUERY) self.register(self.set_pager, '\\pager', '\\pager [command]', 'Set PAGER. Pring the query results via PAGER.', arg_type=PARSED_QUERY) def register(self, *args, **kwargs): register_special_command(*args, command_dict=self.commands, **kwargs) def execute(self, cur, sql): commands = self.commands command, verbose, pattern = parse_special_command(sql) if (command not in commands) and (command.lower() not in commands): raise CommandNotFound try: special_cmd = commands[command] except KeyError: special_cmd = commands[command.lower()] if special_cmd.case_sensitive: raise CommandNotFound('Command not found: %s' % command) if special_cmd.arg_type == NO_QUERY: return special_cmd.handler() elif special_cmd.arg_type == PARSED_QUERY: return special_cmd.handler(cur=cur, pattern=pattern, verbose=verbose) elif special_cmd.arg_type == RAW_QUERY: return special_cmd.handler(cur=cur, query=sql) def show_help(self, pattern, **_): if pattern.strip(): return self.show_command_help(pattern) headers = ['Command', 'Description'] result = [] for _, value in sorted(self.commands.items()): if not value.hidden: result.append((value.syntax, value.description)) return [(None, result, headers, None)] def show_command_help_listing(self): table = chunks(sorted(helpcommands.keys()), 6) return [(None, table, [], None)] def show_command_help(self, pattern, **_): command = pattern.strip().upper() message = "" if not command: return self.show_command_help_listing() if command in helpcommands: helpcommand = helpcommands[command] if "description" in helpcommand: message += helpcommand["description"] if "synopsis" in helpcommand: message += "\nSyntax:\n" message += helpcommand["synopsis"] else: message = "No help available for \"%s\"" % pattern message += "\nTry \h with no arguments to see available help." return [(None, None, None, message)] def toggle_expanded_output(self, pattern, **_): flag = pattern.strip() if flag == "auto": self.auto_expand = True self.expanded_output = False return [(None, None, None, u"Expanded display is used automatically.")] elif flag == "off": self.expanded_output = False elif flag == "on": self.expanded_output = True else: self.expanded_output = not (self.expanded_output or self.auto_expand) self.auto_expand = self.expanded_output message = u"Expanded display is " message += u"on." if self.expanded_output else u"off." return [(None, None, None, message)] def toggle_timing(self): self.timing_enabled = not self.timing_enabled message = "Timing is " message += "on." if self.timing_enabled else "off." return [(None, None, None, message)] def call_pset(self, pattern, **_): pattern = pattern.split(' ', 2) val = pattern[1] if len(pattern) > 1 else '' key = pattern[0] if hasattr(self, 'pset_' + key): return getattr(self, 'pset_' + key)(val) else: return [(None, None, None, "'%s' is currently not supported by pset" % key)] def pset_pager(self, value): if value == 'always': self.pager_config = PAGER_ALWAYS elif value == 'off': self.pager_config = PAGER_OFF elif value == 'on': self.pager_config = PAGER_LONG_OUTPUT elif self.pager_config == PAGER_LONG_OUTPUT: self.pager_config = PAGER_OFF else: self.pager_config = PAGER_LONG_OUTPUT return [(None, None, None, '%s' % PAGER_MSG[self.pager_config])] def set_pager(self, pattern, **_): if not pattern: if not self.pager: os.environ.pop('PAGER', None) msg = 'Pager reset to system default.' else: os.environ['PAGER'] = self.pager msg = 'Reset pager back to default. Default: %s' % self.pager else: os.environ['PAGER'] = pattern msg = 'PAGER set to %s.' % pattern return [(None, None, None, msg)] @export def content_exceeds_width(row, width): # Account for 3 characters between each column separator_space = (len(row)*3) # Add 2 columns for a bit of buffer line_len = sum([len(x) for x in row]) + separator_space + 2 return line_len > width @export def parse_special_command(sql): command, _, arg = sql.partition(' ') verbose = '+' in command command = command.strip().replace('+', '') return (command, verbose, arg.strip()) def special_command(command, syntax, description, arg_type=PARSED_QUERY, hidden=False, case_sensitive=True, aliases=()): """A decorator used internally for static special commands""" def wrapper(wrapped): register_special_command(wrapped, command, syntax, description, arg_type, hidden, case_sensitive, aliases, command_dict=PGSpecial.default_commands) return wrapped return wrapper def register_special_command(handler, command, syntax, description, arg_type=PARSED_QUERY, hidden=False, case_sensitive=True, aliases=(), command_dict=None): cmd = command.lower() if not case_sensitive else command command_dict[cmd] = SpecialCommand(handler, syntax, description, arg_type, hidden, case_sensitive) for alias in aliases: cmd = alias.lower() if not case_sensitive else alias command_dict[cmd] = SpecialCommand(handler, syntax, description, arg_type, case_sensitive=case_sensitive, hidden=True) def chunks(l, n): n = max(1, n) return [l[i:i + n] for i in range(0, len(l), n)] @special_command('\\e', '\\e [file]', 'Edit the query with external editor.', arg_type=NO_QUERY) def doc_only(): raise RuntimeError @special_command('\\ef', '\\ef [funcname [line]]', 'Edit the contents of the query buffer.', arg_type=NO_QUERY, hidden=True) @special_command('\\ev', '\\ev [viewname [line]]', 'Edit the contents of the query buffer.', arg_type=NO_QUERY, hidden=True) @special_command('\\do', '\\do[S] [pattern]', 'List operators.', arg_type=NO_QUERY, hidden=True) @special_command('\\dp', '\\dp [pattern]', 'List table, view, and sequence access privileges.', arg_type=NO_QUERY, hidden=True) @special_command('\\z', '\\z [pattern]', 'Same as \\dp.', arg_type=NO_QUERY, hidden=True) def place_holder(): raise NotImplementedError pgspecial-1.9.0/pgspecial/namedqueries.py0000644000076500000240000000316112732064265020730 0ustar irinastaff00000000000000# -*- coding: utf-8 -*- class NamedQueries(object): section_name = 'named queries' usage = u'''Named Queries are a way to save frequently used queries with a short name. Think of them as favorites. Examples: # Save a new named query. > \\ns simple select * from abc where a is not Null; # List all named queries. > \\n +--------+----------------------------------------+ | Name | Query | |--------+----------------------------------------| | simple | SELECT * FROM xyzb where a is not null | +--------+----------------------------------------+ # Run a named query. > \\n simple +-----+ | a | |-----| | 50 | +-----+ # Delete a named query. > \\nd simple simple: Deleted ''' # Class-level variable, for convenience to use as a singleton. instance = None def __init__(self, config): self.config = config @classmethod def from_config(cls, config): return NamedQueries(config) def list(self): return self.config.get(self.section_name, []) def get(self, name): return self.config.get(self.section_name, {}).get(name, None) def save(self, name, query): if self.section_name not in self.config: self.config[self.section_name] = {} self.config[self.section_name][name] = query self.config.write() def delete(self, name): try: del self.config[self.section_name][name] except KeyError: return '%s: Not Found.' % name self.config.write() return '%s: Deleted' % name pgspecial-1.9.0/pgspecial.egg-info/0000755000076500000240000000000013174677543017377 5ustar irinastaff00000000000000pgspecial-1.9.0/pgspecial.egg-info/dependency_links.txt0000644000076500000240000000000113174677543023445 0ustar irinastaff00000000000000 pgspecial-1.9.0/pgspecial.egg-info/PKG-INFO0000644000076500000240000000665213174677543020505 0ustar irinastaff00000000000000Metadata-Version: 1.1 Name: pgspecial Version: 1.9.0 Summary: Meta-commands handler for Postgres Database. Home-page: http://pgcli.com Author: Pgcli Core Team Author-email: pgcli-dev@googlegroups.com License: LICENSE.txt Description: Meta-commands for Postgres -------------------------- |BuildStatus| |PyPI| This package provides an API to execute meta-commands (AKA "special", or "backslash commands") on PostgreSQL. Quick Start ----------- This is a python package. It can be installed with: :: $ pip install pgspecial Usage ----- Once this library is included into your project, you will most likely use the following imports: .. code-block:: python from pgspecial.main import PGSpecial from pgspecial.namedqueries import NamedQueries Then you will create and use an instance of PGSpecial: .. code-block:: python pgspecial = PGSpecial() for result in pgspecial.execute(cur, sql): # Do something If you want to import named queries from an existing config file, it is convenient to initialize and keep around the class variable in ``NamedQueries``: .. code-block:: python from configobj import ConfigObj NamedQueries.instance = NamedQueries.from_config( ConfigObj('~/.config_file_name')) Contributions: -------------- If you're interested in contributing to this project, first of all I would like to extend my heartfelt gratitude. I've written a small doc to describe how to get this running in a development setup. https://github.com/dbcli/pgspecial/blob/master/DEVELOP.rst Please feel free to reach out to me if you need help. My email: amjith.r@gmail.com, Twitter: `@amjithr `_ Projects using it: ------------------ This module is being used by pgcli_: A REPL for Postgres. If you find this module useful and include it in your project, I'll be happy to know about it and list it here. .. |BuildStatus| image:: https://api.travis-ci.org/dbcli/pgspecial.svg?branch=master :target: https://travis-ci.org/dbcli/pgspecial .. |PyPI| image:: https://badge.fury.io/py/pgspecial.svg :target: https://pypi.python.org/pypi/pgspecial/ :alt: Latest Version .. _pgcli: https://github.com/dbcli/pgcli Platform: UNKNOWN Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: BSD License Classifier: Operating System :: Unix Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2.6 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: SQL Classifier: Topic :: Database Classifier: Topic :: Database :: Front-Ends Classifier: Topic :: Software Development Classifier: Topic :: Software Development :: Libraries :: Python Modules pgspecial-1.9.0/pgspecial.egg-info/requires.txt0000644000076500000240000000003713174677543021777 0ustar irinastaff00000000000000click >= 4.1 sqlparse >= 0.1.19pgspecial-1.9.0/pgspecial.egg-info/SOURCES.txt0000644000076500000240000000135313174677543021265 0ustar irinastaff00000000000000License.txt MANIFEST.in README.rst setup.py pgspecial/__init__.py pgspecial/dbcommands.py pgspecial/iocommands.py pgspecial/main.py pgspecial/namedqueries.py pgspecial.egg-info/PKG-INFO pgspecial.egg-info/SOURCES.txt pgspecial.egg-info/dependency_links.txt pgspecial.egg-info/requires.txt pgspecial.egg-info/top_level.txt pgspecial/help/__init__.py pgspecial/help/commands.py tests/conftest.py tests/dbutils.py tests/dbutils.pyc tests/pytest.ini tests/test_specials.py tests/.cache/v/cache/lastfailed tests/__pycache__/conftest.cpython-27-PYTEST.pyc tests/__pycache__/conftest.cpython-33-PYTEST.pyc tests/__pycache__/dbutils.cpython-33.pyc tests/__pycache__/test_specials.cpython-27-PYTEST.pyc tests/__pycache__/test_specials.cpython-33-PYTEST.pycpgspecial-1.9.0/pgspecial.egg-info/top_level.txt0000644000076500000240000000001213174677543022122 0ustar irinastaff00000000000000pgspecial pgspecial-1.9.0/PKG-INFO0000644000076500000240000000665213174677543015044 0ustar irinastaff00000000000000Metadata-Version: 1.1 Name: pgspecial Version: 1.9.0 Summary: Meta-commands handler for Postgres Database. Home-page: http://pgcli.com Author: Pgcli Core Team Author-email: pgcli-dev@googlegroups.com License: LICENSE.txt Description: Meta-commands for Postgres -------------------------- |BuildStatus| |PyPI| This package provides an API to execute meta-commands (AKA "special", or "backslash commands") on PostgreSQL. Quick Start ----------- This is a python package. It can be installed with: :: $ pip install pgspecial Usage ----- Once this library is included into your project, you will most likely use the following imports: .. code-block:: python from pgspecial.main import PGSpecial from pgspecial.namedqueries import NamedQueries Then you will create and use an instance of PGSpecial: .. code-block:: python pgspecial = PGSpecial() for result in pgspecial.execute(cur, sql): # Do something If you want to import named queries from an existing config file, it is convenient to initialize and keep around the class variable in ``NamedQueries``: .. code-block:: python from configobj import ConfigObj NamedQueries.instance = NamedQueries.from_config( ConfigObj('~/.config_file_name')) Contributions: -------------- If you're interested in contributing to this project, first of all I would like to extend my heartfelt gratitude. I've written a small doc to describe how to get this running in a development setup. https://github.com/dbcli/pgspecial/blob/master/DEVELOP.rst Please feel free to reach out to me if you need help. My email: amjith.r@gmail.com, Twitter: `@amjithr `_ Projects using it: ------------------ This module is being used by pgcli_: A REPL for Postgres. If you find this module useful and include it in your project, I'll be happy to know about it and list it here. .. |BuildStatus| image:: https://api.travis-ci.org/dbcli/pgspecial.svg?branch=master :target: https://travis-ci.org/dbcli/pgspecial .. |PyPI| image:: https://badge.fury.io/py/pgspecial.svg :target: https://pypi.python.org/pypi/pgspecial/ :alt: Latest Version .. _pgcli: https://github.com/dbcli/pgcli Platform: UNKNOWN Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: BSD License Classifier: Operating System :: Unix Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2.6 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: SQL Classifier: Topic :: Database Classifier: Topic :: Database :: Front-Ends Classifier: Topic :: Software Development Classifier: Topic :: Software Development :: Libraries :: Python Modules pgspecial-1.9.0/README.rst0000644000076500000240000000370113144170154015405 0ustar irinastaff00000000000000Meta-commands for Postgres -------------------------- |BuildStatus| |PyPI| This package provides an API to execute meta-commands (AKA "special", or "backslash commands") on PostgreSQL. Quick Start ----------- This is a python package. It can be installed with: :: $ pip install pgspecial Usage ----- Once this library is included into your project, you will most likely use the following imports: .. code-block:: python from pgspecial.main import PGSpecial from pgspecial.namedqueries import NamedQueries Then you will create and use an instance of PGSpecial: .. code-block:: python pgspecial = PGSpecial() for result in pgspecial.execute(cur, sql): # Do something If you want to import named queries from an existing config file, it is convenient to initialize and keep around the class variable in ``NamedQueries``: .. code-block:: python from configobj import ConfigObj NamedQueries.instance = NamedQueries.from_config( ConfigObj('~/.config_file_name')) Contributions: -------------- If you're interested in contributing to this project, first of all I would like to extend my heartfelt gratitude. I've written a small doc to describe how to get this running in a development setup. https://github.com/dbcli/pgspecial/blob/master/DEVELOP.rst Please feel free to reach out to me if you need help. My email: amjith.r@gmail.com, Twitter: `@amjithr `_ Projects using it: ------------------ This module is being used by pgcli_: A REPL for Postgres. If you find this module useful and include it in your project, I'll be happy to know about it and list it here. .. |BuildStatus| image:: https://api.travis-ci.org/dbcli/pgspecial.svg?branch=master :target: https://travis-ci.org/dbcli/pgspecial .. |PyPI| image:: https://badge.fury.io/py/pgspecial.svg :target: https://pypi.python.org/pypi/pgspecial/ :alt: Latest Version .. _pgcli: https://github.com/dbcli/pgcli pgspecial-1.9.0/setup.cfg0000644000076500000240000000007313174677543015557 0ustar irinastaff00000000000000[egg_info] tag_build = tag_date = 0 tag_svn_revision = 0 pgspecial-1.9.0/setup.py0000644000076500000240000000273413174677334015454 0ustar irinastaff00000000000000import re import ast from setuptools import setup, find_packages _version_re = re.compile(r'__version__\s+=\s+(.*)') with open('pgspecial/__init__.py', 'rb') as f: version = str(ast.literal_eval(_version_re.search( f.read().decode('utf-8')).group(1))) description = 'Meta-commands handler for Postgres Database.' setup( name='pgspecial', author='Pgcli Core Team', author_email='pgcli-dev@googlegroups.com', version=version, license='LICENSE.txt', url='http://pgcli.com', packages=find_packages(), description=description, long_description=open('README.rst').read(), install_requires=[ 'click >= 4.1', 'sqlparse >= 0.1.19', ], classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: SQL', 'Topic :: Database', 'Topic :: Database :: Front-Ends', 'Topic :: Software Development', 'Topic :: Software Development :: Libraries :: Python Modules', ], ) pgspecial-1.9.0/tests/0000755000076500000240000000000013174677543015100 5ustar irinastaff00000000000000pgspecial-1.9.0/tests/.cache/0000755000076500000240000000000013174677543016221 5ustar irinastaff00000000000000pgspecial-1.9.0/tests/.cache/v/0000755000076500000240000000000013174677543016466 5ustar irinastaff00000000000000pgspecial-1.9.0/tests/.cache/v/cache/0000755000076500000240000000000013174677543017531 5ustar irinastaff00000000000000pgspecial-1.9.0/tests/.cache/v/cache/lastfailed0000644000076500000240000000000213026111046021525 0ustar irinastaff00000000000000{}pgspecial-1.9.0/tests/__pycache__/0000755000076500000240000000000013174677543017310 5ustar irinastaff00000000000000pgspecial-1.9.0/tests/__pycache__/conftest.cpython-27-PYTEST.pyc0000644000076500000240000000313613144170574024601 0ustar irinastaff00000000000000ó µhhW~c@sŸddlZddljjZddlZddlmZm Z m Z m Z ddl m Z ejddƒd„ƒZejd„ƒZejd„ƒZdS( iÿÿÿÿN(t create_dbt db_connectiontsetup_dbt teardown_db(t PGSpecialtscopetmoduleccs=tdƒtdƒ}t|ƒ|Vt|ƒ|jƒdS(Nt_test_db(RRRRtclose(t connection((s,/Users/irina/src/pgspecial/tests/conftest.pyR s     cCs|jƒ }|SWdQXdS(N(tcursor(R tcur((s,/Users/irina/src/pgspecial/tests/conftest.pyR scs+|jƒ‰tƒ‰‡‡fd†}|S(Ncszg}xmˆjdˆd|ƒD]S\}}}}|rY|j|t|ƒ||fƒq|j|d||fƒqW|S(NR tsql(texecutetextendtlisttNone(R tresultsttitletrowstheaderststatus(R t pgspecial(s,/Users/irina/src/pgspecial/tests/conftest.pyt query_runners +"(R R(R R((R Rs,/Users/irina/src/pgspecial/tests/conftest.pytexecutors  (t __builtin__t @py_builtinst_pytest.assertion.rewritet assertiontrewritet @pytest_artpytesttdbutilsRRRRtpgspecial.mainRt yield_fixtureR tfixtureR R(((s,/Users/irina/src/pgspecial/tests/conftest.pyts  " pgspecial-1.9.0/tests/__pycache__/conftest.cpython-33-PYTEST.pyc0000644000076500000240000000361413144170625024574 0ustar irinastaff00000000000000ž µhhW~c@s¨ddlZddljjZddlZddlmZm Z m Z m Z ddl m Z ejddƒdd„ƒZejdd „ƒZejd d „ƒZdS( iN(u create_dbu db_connectionusetup_dbu teardown_db(u PGSpecialuscopeumoduleccs=tdƒtdƒ}t|ƒ|Vt|ƒ|jƒdS(Nu_test_db(u create_dbu db_connectionusetup_dbu teardown_dbuclose(u connection((u,/Users/irina/src/pgspecial/tests/conftest.pyu connections     u connectioncCs|jƒ }|SWdQXdS(N(ucursor(u connectionucur((u,/Users/irina/src/pgspecial/tests/conftest.pyucursorsucursorcs.|jƒ‰tƒ‰‡‡fdd†}|S(Ncszg}xmˆjdˆd|ƒD]S\}}}}|rY|j|t|ƒ||fƒq|j|d||fƒqW|S(Nucurusql(uexecuteuextendulistuNone(usqluresultsutitleurowsuheadersustatus(ucuru pgspecial(u,/Users/irina/src/pgspecial/tests/conftest.pyu query_runners +"uexecutor..query_runner(ucursoru PGSpecial(u connectionu query_runner((ucuru pgspecialu,/Users/irina/src/pgspecial/tests/conftest.pyuexecutors  uexecutor(ubuiltinsu @py_builtinsu_pytest.assertion.rewriteu assertionurewriteu @pytest_arupytestudbutilsu create_dbu db_connectionusetup_dbu teardown_dbupgspecial.mainu PGSpecialu yield_fixtureu connectionufixtureucursoruexecutor(((u,/Users/irina/src/pgspecial/tests/conftest.pyus  " pgspecial-1.9.0/tests/__pycache__/dbutils.cpython-33.pyc0000644000076500000240000000572513144170625023374 0ustar irinastaff00000000000000ž lðY> c @s·ddlZddlZddlZd\ZZddd„ZyeƒZdZ ej Z WndZ dZ YnXej je ddeƒZdd „Zd d „Zd d „ZdS(iNupostgresu localhostcCs+tjdtdtd|ƒ}d|_|S(NuuseruhostudatabaseT(upsycopg2uconnectu POSTGRES_USERu POSTGRES_HOSTuTrueu autocommit(udbnameuconn((u+/Users/irina/src/pgspecial/tests/dbutils.pyu db_connection s  u db_connectionureasonu=Need a postgres instance at localhost accessible by user '%s'cCs8tƒjƒ#}y|jdƒWnYnXWdQXdS(NuCREATE DATABASE _test_db(u db_connectionucursoruexecute(udbnameucur((u+/Users/irina/src/pgspecial/tests/dbutils.pyu create_db s u create_dbc CsÂ|jƒ°}|jdƒ|jdƒ|jdƒ|jdƒ|jdƒ|jdƒ|jdƒ|jdƒ|jd ƒ|jd ƒ|jd ƒ|jd ƒ|jd ƒWdQXdS(Nucreate schema schema1ucreate schema schema2uTcreate table tbl1(id1 integer, txt1 text, CONSTRAINT id_text PRIMARY KEY(id1, txt1))u(create table tbl2(id2 serial, txt2 text)u4create table schema1.s1_tbl1(id1 integer, txt1 text)u=create table tbl3(c3 circle, exclude using gist (c3 with &&))u%create view vw1 as select * from tbl1uRcreate view schema1.s1_vw1 as select * from schema1.s1_tbl1u3create materialized view mvw1 as select * from tbl1u`create materialized view schema1.s1_mvw1 as select * from schema1.s1_tbl1u"create type foo AS (a int, b text)uWcreate function func1() returns int language sql as $$select 1$$ubcreate function schema1.s1_func1() returns int language sql as $$select 2$$(ucursoruexecute(uconnucur((u+/Users/irina/src/pgspecial/tests/dbutils.pyusetup_db(s$         usetup_dbc Cs&|jƒ}|jdƒWdQXdS(Nu¯ DROP SCHEMA public CASCADE; CREATE SCHEMA public; DROP SCHEMA IF EXISTS schema1 CASCADE; DROP SCHEMA IF EXISTS schema2 CASCADE(ucursoruexecute(uconnucur((u+/Users/irina/src/pgspecial/tests/dbutils.pyu teardown_dbHsu teardown_db(upostgresu localhostTF(upytestupsycopg2upsycopg2.extrasu POSTGRES_USERu POSTGRES_HOSTuNoneu db_connectionuconnuTrueuCAN_CONNECT_TO_DBuserver_versionuSERVER_VERSIONuFalseumarkuskipifudbtestu create_dbusetup_dbu teardown_db(((u+/Users/irina/src/pgspecial/tests/dbutils.pyus$           pgspecial-1.9.0/tests/__pycache__/test_specials.cpython-27-PYTEST.pyc0000644000076500000240000010615013174656611025622 0ustar irinastaff00000000000000ó ]óY„?c @s—ddlZddljjZddlmZmZddl Z ddddddgZ ed „ƒZ ed „ƒZ ed „ƒZ ed „ƒZed „ƒZed„ƒZed„ƒZed„ƒZed„ƒZed„ƒZed„ƒZed„ƒZed„ƒZed„ƒZed„ƒZed„ƒZed„ƒZed„ƒZed„ƒZed„ƒZed„ƒZed„ƒZ dd d!d"d#d$gd%d&d'd(d)d*gd+d,d-d.d/d0gd1d2d3d4d5d6gd7d8d9d:d;d<gd=d>d?d@dAdBgdCdDdEdFdGdHgdIdJdKdLdMdNgdOdPdQdRdSdTgdUdVdWdXdYdZgd[d\d]d^d_d`gdadbdcdddedfgdgdhdidjdkdlgdmdndodpdqdrgdsdtdudvdwdxgdydzd{d|d}d~gdd€dd‚dƒd„gd…d†d‡dˆd‰dŠgd‹dŒddŽddgd‘d’d“d”d•d–gd—d˜d™dšd›dœgddždŸd d¡d¢gd£d¤d¥d¦d§d¨gd©dªd«d¬d­d®gd¯d°d±d²d³d´gdµd¶d·d¸d¹dºgd»d¼d½d¾d¿dÀgdÁdÂdÃdÄggZ!edÅ„ƒZ"edÆ„ƒZ#edÇ„ƒZ$edÈ„ƒZ%edÉ„ƒZ&edÊ„ƒZ'edË„ƒZ(edÌ„ƒZ)edÍ„ƒZ*ed΄ƒZ+edÏ„ƒZ,dS(ÐiÿÿÿÿN(tdbtestt POSTGRES_USERtSchematNametTypetOwnertSizet Descriptionc CsŠ|dƒ}d}dddtfdddtfdddtfdddtfdd dtfdd d tfdd dtfdd dtfg}td }d}||||g}||k}|s€tjd|fd||fƒidtjƒkstj|ƒrtj|ƒndd6dtjƒks=tj|ƒrLtj|ƒndd6}di|d6} t tj | ƒƒ‚nd}dS(Ns\dtpublictInh1ttabletinh2tmvw1smaterialized viewttbl1ttbl2t tbl2_id2_seqtsequencettbl3tvw1tviewiþÿÿÿsSELECT 8s==s%(py0)s == %(py2)stresultstpy0texpectedtpy2tsassert %(py4)stpy4(s==(s%(py0)s == %(py2)ssassert %(py4)s( tNoneRtobjects_listing_headerst @pytest_art_call_reprcomparet @py_builtinstlocalst_should_repr_global_namet _safereprtAssertionErrort_format_explanation( texecutorRttitletrowstheaderststatusRt @py_assert1t @py_format3t @py_format5((s1/Users/irina/src/pgspecial/tests/test_specials.pyt test_slash_d s&   “c Cs¶|dƒ}d}dddtddfdddtddfdddtddfdd dtddfdd dtddfdd d tddfdd dtddfdddtddfg}t}d}||||g}||k}|s¬tjd|fd||fƒidtjƒks2tj|ƒrAtj|ƒndd6dtjƒksitj|ƒrxtj|ƒndd6}di|d6} t tj | ƒƒ‚nd}dS(Ns\d+RR R s 8192 bytesR R smaterialized viewR RRRRs0 bytesRRsSELECT 8s==s%(py0)s == %(py2)sRRRRRsassert %(py4)sR(s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRRRR R!R"R#( R$RR%R&R'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_d_verboses*      “c Cs/|dƒ}d}dddgdddgg}ddd g}d }||||g}||k}|s%tjd|fd||fƒid tjƒks«tj|ƒrºtj|ƒnd d6dtjƒksâtj|ƒrñtj|ƒndd6}di|d6} ttj| ƒƒ‚nd}dS(Ns\d tbl1tid1tintegers not nullttxt1ttexttColumnRt ModifiersshIndexes: "id_text" PRIMARY KEY, btree (id1, txt1) Number of child tables: 2 (Use \d+ to list them.) s==s%(py0)s == %(py2)sRRRRRsassert %(py4)sR(s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRR R!R"R#( R$RR%R&R'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_d_table_12s   “c Cs/|dƒ}d}dddgdddgg}dd d g}d }||||g}||k}|s%tjd|fd||fƒidtjƒks«tj|ƒrºtj|ƒndd6dtjƒksâtj|ƒrñtj|ƒndd6}di|d6} ttj| ƒƒ‚nd}dS(Ns\d tbl2tid2R/s3 not null default nextval('tbl2_id2_seq'::regclass)ttxt2R1RR2RR3s2Number of child tables: 1 (Use \d+ to list them.) s==s%(py0)s == %(py2)sRRRRsassert %(py4)sR(s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRR R!R"R#( R$RR%R&R'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_d_table_2@s   “c CsJ|dƒ}d}ddddddgddddddgg}d d d d d dg}d}||||g}||k}|s@tjd|fd||fƒidtjƒksÆtj|ƒrÕtj|ƒndd6dtjƒksýtj|ƒr tj|ƒndd6}di|d6} ttj| ƒƒ‚nd}dS(Ns\d+ tbl1R.R/s not nulltplainR0R1textendedR2RR3tStorages Stats targetRslIndexes: "id_text" PRIMARY KEY, btree (id1, txt1) Child tables: "Inh1", inh2 Has OIDs: no s==s%(py0)s == %(py2)sRRRRRsassert %(py4)sR(s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRR R!R"R#( R$RR%R&R'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_d_table_verbose_1Ms   “c CsJ|dƒ}d}ddddddgdddd ddgg}d d d d ddg}d}||||g}||k}|s@tjd|fd||fƒidtjƒksÆtj|ƒrÕtj|ƒndd6dtjƒksýtj|ƒr tj|ƒndd6}di|d6} ttj| ƒƒ‚nd}dS(Ns\d+ tbl2R5R/s3 not null default nextval('tbl2_id2_seq'::regclass)R8R6R1RR9R2RR3R:s Stats targetRs Child tables: inh2 Has OIDs: no s==s%(py0)s == %(py2)sRRRRsassert %(py4)sR(s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRR R!R"R#( R$RR%R&R'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_d_table_verbose_2^s     “c Cs#|dƒ}d}dddgg}dddg}d}||||g}||k}|stjd|fd||fƒid tjƒksŸtj|ƒr®tj|ƒnd d 6d tjƒksÖtj|ƒråtj|ƒnd d6}di|d6} ttj| ƒƒ‚nd}dS(Ns\d tbl3tc3tcircleRR2RR3s<Indexes: "tbl3_c3_excl" EXCLUDE USING gist (c3 WITH &&) s==s%(py0)s == %(py2)sRRRRsassert %(py4)sR(s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRR R!R"R#( R$RR%R&R'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyt!test_slash_d_table_with_exclusionns  “c Cs/|dƒ}d}dtfdtfdtfg}ddg}d}||||g}||k}|s%tjd|fd||fƒid tjƒks«tj|ƒrºtj|ƒnd d 6d tjƒksâtj|ƒrñtj|ƒnd d 6}di|d6} ttj | ƒƒ‚nd}dS(sList all schemas.s\dnRtschema1tschema2RRsSELECT 3s==s%(py0)s == %(py2)sRRRRRsassert %(py4)sRN(s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRRR R!R"R#( R$RR%R&R'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyt test_slash_dnys     “c Cs]|dƒ}d}dddtfdddtfdddtfdddtfdddtfg}td }d }||||g}||k}|sStjd|fd||fƒid tjƒksÙtj|ƒrètj|ƒnd d6dtjƒkstj|ƒrtj|ƒndd6}di|d6} t tj | ƒƒ‚nd}dS(s!List all tables in public schema.s\dtRR R R R RRiþÿÿÿsSELECT 5s==s%(py0)s == %(py2)sRRRRRsassert %(py4)sRN(s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRRRR R!R"R#( R$RR%R&R'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyt test_slash_dt‡s    “c Csw|dƒ}d}dddtddfdddtddfdddtddfdddtddfdd dtd dfg}t}d }||||g}||k}|smtjd|fd||fƒidtjƒksótj|ƒrtj|ƒndd6dtjƒks*tj|ƒr9tj|ƒndd6}di|d6} t tj | ƒƒ‚nd}dS(s1List all tables in public schema in verbose mode.s\dt+RR R s 8192 bytesR R RRs0 bytessSELECT 5s==s%(py0)s == %(py2)sRRRRRsassert %(py4)sRN(s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRRRR R!R"R#( R$RR%R&R'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_dt_verbose—s   “c Cs!|dƒ}d}dddtfg}td }d}||||g}||k}|stjd|fd||fƒid tjƒkstj|ƒr¬tj|ƒnd d 6d tjƒksÔtj|ƒrãtj|ƒnd d 6}di|d6} t tj | ƒƒ‚nd}dS(s List all views in public schema.s\dvRRRiþÿÿÿsSELECT 1s==s%(py0)s == %(py2)sRRRRRsassert %(py4)sRN(s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRRRR R!R"R#( R$RR%trowR'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyt test_slash_dv§s   “c Cs#|dƒ}d}dddtddfg}t}d}||||g}||k}|stjd|fd||fƒid tjƒksŸtj|ƒr®tj|ƒnd d 6d tjƒksÖtj|ƒråtj|ƒnd d 6}di|d6} t tj | ƒƒ‚nd}dS(s,List all views in s1 schema in verbose mode.s\dv+ schema1.*R@ts1_vw1Rs0 bytessSELECT 1s==s%(py0)s == %(py2)sRRRRRsassert %(py4)sRN(s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRRRR R!R"R#( R$RR%RER'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_dv_verbose³s  “c Cs!|dƒ}d}dddtfg}td }d}||||g}||k}|stjd|fd||fƒid tjƒkstj|ƒr¬tj|ƒnd d 6d tjƒksÔtj|ƒrãtj|ƒnd d 6}di|d6} t tj | ƒƒ‚nd}dS(s'List all materialized views in schema1.s \dm schema1.*R@ts1_mvw1smaterialized viewiþÿÿÿsSELECT 1s==s%(py0)s == %(py2)sRRRRRsassert %(py4)sRN(s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRRRR R!R"R#( R$RR%RER'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyt test_slash_dm¿s   “c Cs#|dƒ}d}dddtddfg}t}d}||||g}||k}|stjd|fd||fƒid tjƒksŸtj|ƒr®tj|ƒnd d 6d tjƒksÖtj|ƒråtj|ƒnd d 6}di|d6} t tj | ƒƒ‚nd}dS(s=List all materialized views in public schema in verbose mode.s\dm+RR smaterialized views 8192 bytessSELECT 1s==s%(py0)s == %(py2)sRRRRRsassert %(py4)sRN(s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRRRR R!R"R#( R$RR%RER'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_dm_verboseËs  “c Cs!|dƒ}d}dddtfg}td }d}||||g}||k}|stjd|fd||fƒid tjƒkstj|ƒr¬tj|ƒnd d 6d tjƒksÔtj|ƒrãtj|ƒnd d 6}di|d6} t tj | ƒƒ‚nd}dS(s$List all sequences in public schema.s\dsRRRiþÿÿÿsSELECT 1s==s%(py0)s == %(py2)sRRRRRsassert %(py4)sRN(s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRRRR R!R"R#( R$RR%RER'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyt test_slash_ds×s   “c Cs#|dƒ}d}dddtddfg}t}d}||||g}||k}|stjd|fd||fƒid tjƒksŸtj|ƒr®tj|ƒnd d 6d tjƒksÖtj|ƒråtj|ƒnd d 6}di|d6} t tj | ƒƒ‚nd}dS(s4List all sequences in public schema in verbose mode.s\ds+RRRs 8192 bytessSELECT 1s==s%(py0)s == %(py2)sRRRRRsassert %(py4)sRN(s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRRRR R!R"R#( R$RR%RER'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_ds_verboseãs  “c Cs0|dƒ}d}dddtfdddtfg}td }d}||||g}||k}|s&tjd|fd||fƒid tjƒks¬tj|ƒr»tj|ƒnd d 6d tjƒksãtj|ƒròtj|ƒnd d 6}di|d6} t tj | ƒƒ‚nd}dS(s"List all indexes in public schema.s\diRtid_texttindext tbl3_c3_excliþÿÿÿsSELECT 2s==s%(py0)s == %(py2)sRRRRRsassert %(py4)sRN(s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRRRR R!R"R#( R$RR%RER'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyt test_slash_diïs   “c Cs8|dƒ}d}dddtddfdddtddfg}t}d}||||g}||k}|s.tjd|fd||fƒid tjƒks´tj|ƒrÃtj|ƒnd d 6d tjƒksëtj|ƒrútj|ƒnd d 6}di|d6} t tj | ƒƒ‚nd}dS(s2List all indexes in public schema in verbose mode.s\di+RRNROs 8192 bytesRPsSELECT 2s==s%(py0)s == %(py2)sRRRRRsassert %(py4)sRN(s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRRRR R!R"R#( R$RR%RER'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_di_verboseüs  “c Cs|dƒ}d}dg}dddg}d}||||g}||k}|stjd|fd||fƒid tjƒks–tj|ƒr¥tj|ƒnd d 6d tjƒksÍtj|ƒrÜtj|ƒnd d 6}di|d6} ttj| ƒƒ‚nd}dS(sList all datatypes.s\dTRtfooRRRsSELECT 1s==s%(py0)s == %(py2)sRRRRRsassert %(py4)sRN(spublicsfooN(s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRR R!R"R#( R$RR%R&R'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyt test_slash_dT s   “cCsN|dƒ\}}}}|dk}|sætjd|fd|dfƒidtjƒksltj|ƒr{tj|ƒndd6dtjƒks£tjdƒr²tjdƒndd6}di|d 6}ttj|ƒƒ‚nd}d d d g}||k}|s¢tjd|fd ||fƒidtjƒksOtj|ƒr^tj|ƒndd6tj|ƒd6} d!i| d6} ttj| ƒƒ‚nd}}d} |d} | | k}|s<tjd"|fd#| | fƒitj| ƒd6tj| ƒd 6}d$i|d6} ttj| ƒƒ‚nd} }} dS(%sList all tablespaces.s\dbtiss%(py0)s is %(py2)sR%RRRRsassert %(py4)sRRRtLocations==s%(py0)s == %(py3)stheadertpy3sassert %(py5)stpy5t pg_defaultitins%(py1)s in %(py4)stpy1sassert %(py6)stpy6N(RU(s%(py0)s is %(py2)ssassert %(py4)s(s==(s%(py0)s == %(py3)ssassert %(py5)s(R[(s%(py1)s in %(py4)ssassert %(py6)s( RRRRRR R!R"R#(R$R%R&RWR(R)R*R+t @py_assert2t @py_format4t @py_format6t @py_assert0t @py_assert3t @py_format7((s1/Users/irina/src/pgspecial/tests/test_specials.pyt test_slash_dbs, “ l  EcCs|dƒ\}}}}|dk}|sætjd|fd|dfƒidtjƒksltj|ƒr{tj|ƒndd6dtjƒks£tjdƒr²tjdƒndd6}d i|d 6}ttj|ƒƒ‚nd}d d d g}||k}|s¢tjd!|fd"||fƒidtjƒksOtj|ƒr^tj|ƒndd6tj|ƒd6} d#i| d6} ttj| ƒƒ‚nd}}d} |d} | | k}|s<tjd$|fd%| | fƒitj| ƒd6tj| ƒd 6}d&i|d6} ttj| ƒƒ‚nd} }} d}||k}|s÷tjd'|fd(||fƒidtjƒks¤tj|ƒr³tj|ƒndd6tj|ƒd6} d)i| d6} ttj| ƒƒ‚nd}}dS(*sList tablespace by name.s\db pg_defaultRUs%(py0)s is %(py2)sR%RRRRsassert %(py4)sRRRRVs==s%(py0)s == %(py3)sRWRXsassert %(py5)sRYRZiR[s%(py1)s in %(py4)sR\sassert %(py6)sR]sSELECT 1R(N(RU(s%(py0)s is %(py2)ssassert %(py4)s(s==(s%(py0)s == %(py3)ssassert %(py5)s(R[(s%(py1)s in %(py4)ssassert %(py6)s(s==(s%(py0)s == %(py3)ssassert %(py5)s( RRRRRR R!R"R#(R$R%R&RWR(R)R*R+R^R_R`RaRbRc((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_db_names: “ l  E lc Cs |dƒ}d}dg}ddd d d g}d }||||g}||k}|stjd|fd||fƒidtjƒksœtj|ƒr«tj|ƒndd6dtjƒksÓtj|ƒrâtj|ƒndd6}di|d6} ttj| ƒƒ‚nd}dS(Ns\dfRtfunc1R/RtnormalRRsResult data typesArgument data typesRsSELECT 1s==s%(py0)s == %(py2)sRRRRsassert %(py4)sR(spublicRfsintegerRRg(s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRR R!R"R#( R$RR%R&R'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyt test_slash_df(s     “tABORTsALTER AGGREGATEsALTER COLLATIONsALTER CONVERSIONsALTER DATABASEsALTER DEFAULT PRIVILEGESs ALTER DOMAINsALTER EVENT TRIGGERsALTER EXTENSIONsALTER FOREIGN DATA WRAPPERsALTER FOREIGN TABLEsALTER FUNCTIONs ALTER GROUPs ALTER INDEXsALTER LANGUAGEsALTER LARGE OBJECTsALTER MATERIALIZED VIEWs ALTER OPCLASSsALTER OPERATORsALTER OPFAMILYs ALTER POLICYs ALTER ROLEs ALTER RULEs ALTER SCHEMAsALTER SEQUENCEs ALTER SERVERs ALTER SYSTEMs ALTER TABLEsALTER TABLESPACEs ALTER TRIGGERsALTER TSCONFIGsALTER TSDICTIONARYsALTER TSPARSERsALTER TSTEMPLATEs ALTER TYPEs ALTER USERsALTER USER MAPPINGs ALTER VIEWtANALYZEtBEGINt CHECKPOINTtCLOSEtCLUSTERtCOMMENTtCOMMITsCOMMIT PREPAREDtCOPYsCREATE AGGREGATEs CREATE CASTsCREATE COLLATIONsCREATE CONVERSIONsCREATE DATABASEs CREATE DOMAINsCREATE EVENT TRIGGERsCREATE EXTENSIONsCREATE FOREIGN DATA WRAPPERsCREATE FOREIGN TABLEsCREATE FUNCTIONs CREATE GROUPs CREATE INDEXsCREATE LANGUAGEsCREATE MATERIALIZED VIEWsCREATE OPCLASSsCREATE OPERATORsCREATE OPFAMILYs CREATE POLICYs CREATE ROLEs CREATE RULEs CREATE SCHEMAsCREATE SEQUENCEs CREATE SERVERs CREATE TABLEsCREATE TABLE ASsCREATE TABLESPACEsCREATE TRANSFORMsCREATE TRIGGERsCREATE TSCONFIGsCREATE TSDICTIONARYsCREATE TSPARSERsCREATE TSTEMPLATEs CREATE TYPEs CREATE USERsCREATE USER MAPPINGs CREATE VIEWt DEALLOCATEtDECLAREtDELETEtDISCARDtDOsDROP AGGREGATEs DROP CASTsDROP COLLATIONsDROP CONVERSIONs DROP DATABASEs DROP DOMAINsDROP EVENT TRIGGERsDROP EXTENSIONsDROP FOREIGN DATA WRAPPERsDROP FOREIGN TABLEs DROP FUNCTIONs DROP GROUPs DROP INDEXs DROP LANGUAGEsDROP MATERIALIZED VIEWs DROP OPCLASSs DROP OPERATORs DROP OPFAMILYs DROP OWNEDs DROP POLICYs DROP ROLEs DROP RULEs DROP SCHEMAs DROP SEQUENCEs DROP SERVERs DROP TABLEsDROP TABLESPACEsDROP TRANSFORMs DROP TRIGGERs DROP TSCONFIGsDROP TSDICTIONARYs DROP TSPARSERsDROP TSTEMPLATEs DROP TYPEs DROP USERsDROP USER MAPPINGs DROP VIEWtENDtEXECUTEtEXPLAINtFETCHtGRANTsIMPORT FOREIGN SCHEMAtINSERTtLISTENtLOADtLOCKtMOVEtNOTIFYtPGBENCHtPREPAREsPREPARE TRANSACTIONsREASSIGN OWNEDsREFRESH MATERIALIZED VIEWtREINDEXsRELEASE SAVEPOINTtRESETtREVOKEtROLLBACKsROLLBACK PREPAREDs ROLLBACK TOt SAVEPOINTsSECURITY LABELtSELECTs SELECT INTOtSETsSET CONSTRAINTSsSET ROLEsSET SESSION AUTHsSET TRANSACTIONtSHOWsSTART TRANSACTIONtTRUNCATEtUNLISTENtUPDATEtVACUUMtVALUEScCsö|dƒ}d tgd g}||k}|sìtjd |fd ||fƒidtjƒksrtj|ƒrtj|ƒndd6dtjƒks©tj|ƒr¸tj|ƒndd6}di|d 6}ttj |ƒƒ‚nd }d S(sList all commands.s\hs==s%(py0)s == %(py2)sRRRRRsassert %(py4)sRN(s==(s%(py0)s == %(py2)ssassert %(py4)s( Rt help_rowsRRRRR R!R"R#(R$RRR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyt test_slash_h5s  “c CsoxhtjtŒD]W}|d|ƒ}|d}|j}d}||ƒ}|s·dditj|ƒd6tj|ƒd6tj|ƒd6tj|ƒd 6}ttj|ƒƒ‚nd}}}}d }|d}||k}|sYtj d|fd||fƒitj|ƒd6tj|ƒd 6} di| d6} ttj| ƒƒ‚nd}}}qWdS(s'Check help is returned for all commandss\h %sis Description RsLassert %(py7)s {%(py7)s = %(py3)s {%(py3)s = %(py1)s.startswith }(%(py5)s) }R\RXRYtpy7tSyntaxR[s%(py1)s in %(py4)sRsassert %(py6)sR]N(R[(s%(py1)s in %(py4)ssassert %(py6)s( t itertoolstchainR‘t startswithRR!R"R#RR( R$tcommandRRaR^t @py_assert4t @py_assert6t @py_format8RbR+Rc((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_h_command<s$  N EcCs¾|dƒ}|dƒ}|d}|d}||k}|s¬tjd |fd ||fƒitj|ƒd6tj|ƒd6}di|d 6}ttj|ƒƒ‚nd }}}d S(s\? is properly aliased to \hs \h SELECTs \? SELECTis==s%(py1)s == %(py4)sR\RRsassert %(py6)sR]N(s==(s%(py1)s == %(py4)ssassert %(py6)s(RRR!R"R#R(R$t h_resultsRRaRbR^R+Rc((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_h_aliasDs   EcCsC|jdƒ}|dj|ƒƒ|jddƒ}|jƒ}|j}|ƒ}t|ƒ}d}||k} | srtjd| fd||fƒidtj ƒks¸tj tƒrÇtj tƒndd 6d tj ƒksïtj |ƒrþtj |ƒnd d 6tj |ƒd 6tj |ƒd 6tj |ƒd6tj |ƒd6} di| d6} t tj | ƒƒ‚nd}}}} }d} | |k}|s5tjd|fd| |fƒitj | ƒd 6d tj ƒksòtj |ƒrtj |ƒnd d 6} di| d6}t tj |ƒƒ‚nd} }dS(Ns pycons.tsvu=\copy (SELECT 'Montréal', 'Portland', 'Cleveland') TO '{0}' tencodingsutf-8is==sh%(py7)s {%(py7)s = %(py0)s(%(py5)s {%(py5)s = %(py3)s {%(py3)s = %(py1)s.splitlines }() }) } == %(py10)stlenRtcontentsR\tpy10RXRYR“Rsassert %(py12)stpy12u MontréalR[s%(py1)s in %(py3)ssassert %(py5)s(s==(sh%(py7)s {%(py7)s = %(py0)s(%(py5)s {%(py5)s = %(py3)s {%(py3)s = %(py1)s.splitlines }() }) } == %(py10)ssassert %(py12)s(R[(s%(py1)s in %(py3)ssassert %(py5)s(tjointformattopentreadt splitlinesR RRRRR R!R"R#R(R$ttmpdirtfilepathtinfileR¡R^R™Ršt @py_assert9t @py_assert8t @py_format11t @py_format13RaR_R`((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_copy_to_tsvLs,     Ó lcCs×|dƒ|jƒ\}}d}||k}|sÉtjd |fd ||fƒidtjƒksvtj|ƒr…tj|ƒndd6tj|ƒd6}d i|d 6}ttj|ƒƒ‚nd}}dS(Nu=\copy (SELECT 'Montréal', 'Portland', 'Cleveland') TO stdoutuMontréal Portland Cleveland s==s%(py0)s == %(py3)stoutRRXRsassert %(py5)sRY(s==(s%(py0)s == %(py3)ssassert %(py5)s( t readouterrRRRRR R!R"R#R(R$tcapsysR±terrR^R)R_R`((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_copy_to_stdoutWs  lcCsú|jdƒ}|dj|ƒƒ|jddƒ}|jƒ}|j}|ƒ}t|ƒ}d}||k} | srtjd| fd||fƒidtj ƒks¸tj tƒrÇtj tƒndd 6d tj ƒksïtj |ƒrþtj |ƒnd d 6tj |ƒd 6tj |ƒd 6tj |ƒd6tj |ƒd6} di| d6} t tj | ƒƒ‚nd}}}} }d} | |k}|s5tjd|fd| |fƒitj | ƒd 6d tj ƒksòtj |ƒrtj |ƒnd d 6} di| d6}t tj |ƒƒ‚nd} }d} | |k}|sìtjd|fd| |fƒitj | ƒd 6d tj ƒks©tj |ƒr¸tj |ƒnd d 6} d i| d6}t tj |ƒƒ‚nd} }dS(!Ns pycons.tsvuE\copy (SELECT 'Montréal', 'Portland', 'Cleveland') TO '{0}' WITH csvRŸsutf-8is==sh%(py7)s {%(py7)s = %(py0)s(%(py5)s {%(py5)s = %(py3)s {%(py3)s = %(py1)s.splitlines }() }) } == %(py10)sR RR¡R\R¢RXRYR“Rsassert %(py12)sR£u MontréalR[s%(py1)s in %(py3)ssassert %(py5)su,(s==(sh%(py7)s {%(py7)s = %(py0)s(%(py5)s {%(py5)s = %(py3)s {%(py3)s = %(py1)s.splitlines }() }) } == %(py10)ssassert %(py12)s(R[(s%(py1)s in %(py3)ssassert %(py5)s(R[(s%(py1)s in %(py3)ssassert %(py5)s(R¤R¥R¦R§R¨R RRRRR R!R"R#R(R$R©RªR«R¡R^R™RšR¬R­R®R¯RaR_R`((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_copy_to_csv^s:     Ó l  lc Csü|jdƒ}|dj|ƒƒ|dj|ƒƒ|jƒ}|jdƒ|jƒ}|d}d}||k}|sêtjd|fd||fƒitj|ƒd 6tj|ƒd 6} di| d 6} ttj | ƒƒ‚nd}}}dS(Nstbl1.csvs/\copy (SELECT 22, 'elephant') TO '{0}' WITH csvs\copy tbl1 FROM '{0}' WITH csvs!SELECT * FROM tbl1 WHERE id1 = 22itelephants==s%(py1)s == %(py4)sR\RRsassert %(py6)sR](s==(s%(py1)s == %(py4)ssassert %(py6)s( R¤R¥tcursortexecutetfetchoneRRR!R"R#R( R$t connectionR©RªtcurRERaRbR^R+Rc((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_copy_from_csvjs       Ec Cs|dƒ}d}d g}dg}d}||||g}||k}|s tjd|fd||fƒidtjƒkstj|ƒrŸtj|ƒndd6dtjƒksÇtj|ƒrÖtj|ƒndd 6}di|d 6} ttj| ƒƒ‚nd}dS(Ns \sf func1siCREATE OR REPLACE FUNCTION public.func1() RETURNS integer LANGUAGE sql AS $function$select 1$function$ tsources==s%(py0)s == %(py2)sRRRRRsassert %(py4)sR(siCREATE OR REPLACE FUNCTION public.func1() RETURNS integer LANGUAGE sql AS $function$select 1$function$ (s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRR R!R"R#( R$RR%R&R'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyt test_slash_sfvs    “cCs¢y|dƒWn!tk r1}d}t|ƒ}||k}|s tjd|fd||fƒitj|ƒd6dtjƒks–tjtƒr¥tjtƒndd6dtjƒksÍtj|ƒrÜtj|ƒndd 6tj|ƒd 6}di|d 6}ttj |ƒƒ‚nd}}}nmXt sžtj dƒdidtjƒksltjt ƒr{tjt ƒndd6}ttj |ƒƒ‚ndS(Ns\sf non_existingt non_existingR[s0%(py1)s in %(py6)s {%(py6)s = %(py3)s(%(py4)s) }R\tstrRXteRR]Rsassert %(py8)stpy8sExpected an exceptions >assert %(py0)stFalseR(R[(s0%(py1)s in %(py6)s {%(py6)s = %(py3)s(%(py4)s) }sassert %(py8)s( t ExceptionRÁRRR!RRR R"R#RRÄt_format_assertmsg(R$RÂRat @py_assert5R^Rct @py_format9t @py_format1((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_sf_unknown…s ³Nc Cs|dƒ}d}d g}dg}d}||||g}||k}|s tjd|fd||fƒidtjƒkstj|ƒrŸtj|ƒndd6dtjƒksÇtj|ƒrÖtj|ƒndd 6}di|d 6} ttj| ƒƒ‚nd}dS(Ns \sf func1()siCREATE OR REPLACE FUNCTION public.func1() RETURNS integer LANGUAGE sql AS $function$select 1$function$ R¾s==s%(py0)s == %(py2)sRRRRRsassert %(py4)sR(siCREATE OR REPLACE FUNCTION public.func1() RETURNS integer LANGUAGE sql AS $function$select 1$function$ (s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRR R!R"R#( R$RR%R&R'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_sf_parenss    “c Cs|dƒ}d}d g}dg}d}||||g}||k}|s tjd|fd||fƒidtjƒkstj|ƒrŸtj|ƒndd6dtjƒksÇtj|ƒrÖtj|ƒndd 6}di|d 6} ttj| ƒƒ‚nd}dS(Ns\sf+ schema1.s1_func1s CREATE OR REPLACE FUNCTION schema1.s1_func1() RETURNS integer LANGUAGE sql 1 AS $function$select 2$function$ R¾s==s%(py0)s == %(py2)sRRRRRsassert %(py4)sR(s CREATE OR REPLACE FUNCTION schema1.s1_func1() RETURNS integer LANGUAGE sql 1 AS $function$select 2$function$ (s==(s%(py0)s == %(py2)ssassert %(py4)s( RRRRRR R!R"R#( R$RR%R&R'R(RR)R*R+((s1/Users/irina/src/pgspecial/tests/test_specials.pyttest_slash_sf_verbosežs    “(-t __builtin__Rt_pytest.assertion.rewritet assertiontrewriteRtdbutilsRRR•RR,R-R4R7R;R<R?RBRCRDRFRHRJRKRLRMRQRRRTRdReRhR‘R’RœRžR°RµR¶R½R¿RÊRËRÌ(((s1/Users/irina/src/pgspecial/tests/test_specials.pytsP                ÿÿN    pgspecial-1.9.0/tests/__pycache__/test_specials.cpython-33-PYTEST.pyc0000644000076500000240000012344313144170625025614 0ustar irinastaff00000000000000ž lðYÓ8c @sÖddlZddljjZddlmZmZddl Z ddddddgZ ed d „ƒZ ed d „ƒZ ed d„ƒZ edd„ƒZedd„ƒZedd„ƒZedd„ƒZedd„ƒZedd„ƒZedd„ƒZedd„ƒZedd „ƒZed!d"„ƒZed#d$„ƒZed%d&„ƒZed'd(„ƒZed)d*„ƒZed+d,„ƒZed-d.„ƒZed/d0„ƒZd1d2d3d4d5d6gd7d8d9d:d;d<gd=d>d?d@dAdBgdCdDdEdFdGdHgdIdJdKdLdMdNgdOdPdQdRdSdTgdUdVdWdXdYdZgd[d\d]d^d_d`gdadbdcdddedfgdgdhdidjdkdlgdmdndodpdqdrgdsdtdudvdwdxgdydzd{d|d}d~gdd€dd‚dƒd„gd…d†d‡dˆd‰dŠgd‹dŒddŽddgd‘d’d“d”d•d–gd—d˜d™dšd›dœgddždŸd d¡d¢gd£d¤d¥d¦d§d¨gd©dªd«d¬d­d®gd¯d°d±d²d³d´gdµd¶d·d¸d¹dºgd»d¼d½d¾d¿dÀgdÁdÂdÃdÄdÅdÆgdÇdÈdÉdÊdËdÌgdÍdÎdÏdÐdÑdÒgdÓdÔdÕdÖggZed×dØ„ƒZ edÙdÚ„ƒZ!edÛdÜ„ƒZ"edÝdÞ„ƒZ#edßdà„ƒZ$edád℃Z%edãd䄃Z&edåd愃Z'edçd脃Z(edédꄃZ)edëd섃Z*dS(íiN(udbtestu POSTGRES_USERuSchemauNameuTypeuOwneruSizeu Descriptionc Csr|dƒ}d}dddtfdddtfdddtfddd tfdd dtfdd d tfg}tdd…}d}||||g}||k}|shtjd|fd||fƒidtjƒksîtj|ƒrýtj|ƒndd6dtjƒks%tj|ƒr4tj|ƒndd6}di|d6} t tj | ƒƒ‚nd}dS(Nu\dupublicumvw1umaterialized viewutbl1utableutbl2u tbl2_id2_sequsequenceutbl3uvw1uviewiuSELECT 6u==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4iþÿÿÿ(u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu POSTGRES_USERuobjects_listing_headersu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowsuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyu test_slash_d s"  “u test_slash_dc CsŒ|dƒ}d}dddtddfdddtddfdddtddfdd d tddfdd dtd dfdd dtd dfg}t}d}||||g}||k}|s‚tjd|fd||fƒidtjƒkstj|ƒrtj|ƒndd6dtjƒks?tj|ƒrNtj|ƒndd6}di|d6} t tj | ƒƒ‚nd}dS(Nu\d+upublicumvw1umaterialized viewu 8192 bytesutbl1utableutbl2u tbl2_id2_sequsequenceutbl3u0 bytesuvw1uviewuSELECT 6u==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4(u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu POSTGRES_USERuobjects_listing_headersu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowsuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyutest_slash_d_verboses"  “utest_slash_d_verbosec Cs/|dƒ}d}dddgdddgg}ddd g}d }||||g}||k}|s%tjd|fd||fƒid tjƒks«tj|ƒrºtj|ƒnd d6dtjƒksâtj|ƒrñtj|ƒndd6}di|d6} ttj| ƒƒ‚nd}dS(Nu\d tbl1uid1uintegeru not nullutxt1utextuColumnuTypeu Modifiersu6Indexes: "id_text" PRIMARY KEY, btree (id1, txt1) u==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4(u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowsuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyutest_slash_d_table,s   “utest_slash_d_tablec CsJ|dƒ}d}ddddddgddddddgg}d d d d d dg}d}||||g}||k}|s@tjd|fd||fƒidtjƒksÆtj|ƒrÕtj|ƒndd6dtjƒksýtj|ƒr tj|ƒndd6}di|d6} ttj| ƒƒ‚nd}dS(Nu\d+ tbl1uid1uintegeru not nulluplainutxt1utextuextendeduColumnuTypeu ModifiersuStorageu Stats targetu DescriptionuCIndexes: "id_text" PRIMARY KEY, btree (id1, txt1) Has OIDs: no u==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4(u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowsuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyutest_slash_d_table_verbose9s  “utest_slash_d_table_verbosec Cs#|dƒ}d}dddgg}dddg}d}||||g}||k}|stjd|fd||fƒid tjƒksŸtj|ƒr®tj|ƒnd d 6d tjƒksÖtj|ƒråtj|ƒnd d6}di|d6} ttj| ƒƒ‚nd}dS(Nu\d tbl3uc3ucircleuuColumnuTypeu Modifiersu<Indexes: "tbl3_c3_excl" EXCLUDE USING gist (c3 WITH &&) u==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uassert %(py4)supy4(u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowsuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyu!test_slash_d_table_with_exclusionFs  “u!test_slash_d_table_with_exclusionc Cs/|dƒ}d}dtfdtfdtfg}ddg}d}||||g}||k}|s%tjd|fd||fƒid tjƒks«tj|ƒrºtj|ƒnd d 6d tjƒksâtj|ƒrñtj|ƒnd d 6}di|d6} ttj | ƒƒ‚nd}dS(uList all schemas.u\dnupublicuschema1uschema2uNameuOwneruSELECT 3u==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4N(u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu POSTGRES_USERu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowsuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyu test_slash_dnQs     “u test_slash_dnc CsE|dƒ}d}dddtfdddtfdddtfg}tdd…}d }||||g}||k}|s;tjd|fd||fƒid tjƒksÁtj|ƒrÐtj|ƒnd d 6dtjƒksøtj|ƒrtj|ƒndd6}di|d6} t tj | ƒƒ‚nd}dS(u!List all tables in public schema.u\dtupublicutbl1utableutbl2utbl3NiuSELECT 3u==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4iþÿÿÿ(u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu POSTGRES_USERuobjects_listing_headersu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowsuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyu test_slash_dt_s  “u test_slash_dtc CsM|dƒ}d}dddtddfdddtddfdddtddfg}t}d }||||g}||k}|sCtjd|fd||fƒid tjƒksÉtj|ƒrØtj|ƒnd d 6dtjƒkstj|ƒrtj|ƒndd6}di|d6} t tj | ƒƒ‚nd}dS(u1List all tables in public schema in verbose mode.u\dt+upublicutbl1utableu 8192 bytesutbl2utbl3u0 bytesuSELECT 3u==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4N(u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu POSTGRES_USERuobjects_listing_headersu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowsuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyutest_slash_dt_verbosems  “utest_slash_dt_verbosec Cs'|dƒ}d}dddtfg}tdd…}d}||||g}||k}|stjd|fd||fƒid tjƒks£tj|ƒr²tj|ƒnd d 6d tjƒksÚtj|ƒrétj|ƒnd d 6}di|d6} t tj | ƒƒ‚nd}dS(u List all views in public schema.u\dvupublicuvw1uviewNiuSELECT 1u==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4iþÿÿÿ(u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu POSTGRES_USERuobjects_listing_headersu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyu test_slash_dv{s  “u test_slash_dvc Cs#|dƒ}d}dddtddfg}t}d}||||g}||k}|stjd|fd||fƒid tjƒksŸtj|ƒr®tj|ƒnd d 6d tjƒksÖtj|ƒråtj|ƒnd d 6}di|d6} t tj | ƒƒ‚nd}dS(u,List all views in s1 schema in verbose mode.u\dv+ schema1.*uschema1us1_vw1uviewu0 bytesuSELECT 1u==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4N(u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu POSTGRES_USERuobjects_listing_headersu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyutest_slash_dv_verbose‡s  “utest_slash_dv_verbosec Cs'|dƒ}d}dddtfg}tdd…}d}||||g}||k}|stjd|fd||fƒid tjƒks£tj|ƒr²tj|ƒnd d 6d tjƒksÚtj|ƒrétj|ƒnd d 6}di|d6} t tj | ƒƒ‚nd}dS(u'List all materialized views in schema1.u \dm schema1.*uschema1us1_mvw1umaterialized viewNiuSELECT 1u==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4iþÿÿÿ(u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu POSTGRES_USERuobjects_listing_headersu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyu test_slash_dm“s  “u test_slash_dmc Cs#|dƒ}d}dddtddfg}t}d}||||g}||k}|stjd|fd||fƒid tjƒksŸtj|ƒr®tj|ƒnd d 6d tjƒksÖtj|ƒråtj|ƒnd d 6}di|d6} t tj | ƒƒ‚nd}dS(u=List all materialized views in public schema in verbose mode.u\dm+upublicumvw1umaterialized viewu 8192 bytesuSELECT 1u==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4N(u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu POSTGRES_USERuobjects_listing_headersu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyutest_slash_dm_verboseŸs  “utest_slash_dm_verbosec Cs'|dƒ}d}dddtfg}tdd…}d}||||g}||k}|stjd|fd||fƒid tjƒks£tj|ƒr²tj|ƒnd d 6d tjƒksÚtj|ƒrétj|ƒnd d 6}di|d6} t tj | ƒƒ‚nd}dS(u$List all sequences in public schema.u\dsupublicu tbl2_id2_sequsequenceNiuSELECT 1u==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4iþÿÿÿ(u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu POSTGRES_USERuobjects_listing_headersu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyu test_slash_ds«s  “u test_slash_dsc Cs#|dƒ}d}dddtddfg}t}d}||||g}||k}|stjd|fd||fƒid tjƒksŸtj|ƒr®tj|ƒnd d 6d tjƒksÖtj|ƒråtj|ƒnd d 6}di|d6} t tj | ƒƒ‚nd}dS(u4List all sequences in public schema in verbose mode.u\ds+upublicu tbl2_id2_sequsequenceu 8192 bytesuSELECT 1u==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4N(u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu POSTGRES_USERuobjects_listing_headersu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyutest_slash_ds_verbose·s  “utest_slash_ds_verbosec Cs6|dƒ}d}dddtfdddtfg}tdd…}d}||||g}||k}|s,tjd|fd||fƒid tjƒks²tj|ƒrÁtj|ƒnd d 6d tjƒksétj|ƒrøtj|ƒnd d6}di|d6} t tj | ƒƒ‚nd}dS(u"List all indexes in public schema.u\diupublicuid_textuindexu tbl3_c3_exclNiuSELECT 2u==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4iþÿÿÿ(u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu POSTGRES_USERuobjects_listing_headersu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyu test_slash_diÃs  “u test_slash_dic Cs8|dƒ}d}dddtddfdddtddfg}t}d}||||g}||k}|s.tjd|fd||fƒid tjƒks´tj|ƒrÃtj|ƒnd d 6d tjƒksëtj|ƒrútj|ƒnd d 6}di|d6} t tj | ƒƒ‚nd}dS(u2List all indexes in public schema in verbose mode.u\di+upublicuid_textuindexu 8192 bytesu tbl3_c3_excluSELECT 2u==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4N(u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu POSTGRES_USERuobjects_listing_headersu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyutest_slash_di_verboseÐs  “utest_slash_di_verbosec Cs|dƒ}d}dg}dddg}d}||||g}||k}|stjd|fd||fƒid tjƒks–tj|ƒr¥tj|ƒnd d 6d tjƒksÍtj|ƒrÜtj|ƒnd d 6}di|d6} ttj| ƒƒ‚nd}dS(uList all datatypes.u\dTupublicufoouSchemauNameu DescriptionuSELECT 1u==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4N(upublicufooN(u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowsuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyu test_slash_dTÝs   “u test_slash_dTc CsN|dƒ\}}}}|dk}|sætjd|fd|dfƒidtjƒksltjdƒr{tjdƒndd6dtjƒks£tj|ƒr²tj|ƒndd6}di|d 6}ttj|ƒƒ‚nd}d d d g}||k}|s¢tjd|fd ||fƒitj|ƒd6dtjƒks_tj|ƒrntj|ƒndd6} d!i| d6} ttj| ƒƒ‚nd}}d} |d} | | k}|s<tjd"|fd#| | fƒitj| ƒd 6tj| ƒd6}d$i|d6} ttj| ƒƒ‚nd} }} dS(%uList all tablespaces.u\dbuisu%(py0)s is %(py2)suNoneupy2utitleupy0uuassert %(py4)supy4uNameuOwneruLocationu==u%(py0)s == %(py3)supy3uheaderuassert %(py5)supy5u pg_defaultiuinu%(py1)s in %(py4)supy1uassert %(py6)supy6N(uis(u%(py0)s is %(py2)suassert %(py4)s(u==(u%(py0)s == %(py3)suassert %(py5)s(uin(u%(py1)s in %(py4)suassert %(py6)s( uNoneu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation(uexecutorutitleurowsuheaderustatusu @py_assert1u @py_format3u @py_format5u @py_assert2u @py_format4u @py_format6u @py_assert0u @py_assert3u @py_format7((u1/Users/irina/src/pgspecial/tests/test_specials.pyu test_slash_dbés, “ l  Eu test_slash_dbc Cs|dƒ\}}}}|dk}|sætjd|fd|dfƒidtjƒksltjdƒr{tjdƒndd6dtjƒks£tj|ƒr²tj|ƒndd6}d i|d 6}ttj|ƒƒ‚nd}d d d g}||k}|s¢tjd!|fd"||fƒitj|ƒd6dtjƒks_tj|ƒrntj|ƒndd6} d#i| d6} ttj| ƒƒ‚nd}}d} |d} | | k}|s<tjd$|fd%| | fƒitj| ƒd 6tj| ƒd6}d&i|d6} ttj| ƒƒ‚nd} }} d}||k}|s÷tjd'|fd(||fƒitj|ƒd6dtjƒks´tj|ƒrÃtj|ƒndd6} d)i| d6} ttj| ƒƒ‚nd}}dS(*uList tablespace by name.u\db pg_defaultuisu%(py0)s is %(py2)suNoneupy2utitleupy0uuassert %(py4)supy4uNameuOwneruLocationu==u%(py0)s == %(py3)supy3uheaderuassert %(py5)supy5u pg_defaultiuinu%(py1)s in %(py4)supy1uassert %(py6)supy6uSELECT 1ustatusN(uis(u%(py0)s is %(py2)suassert %(py4)s(u==(u%(py0)s == %(py3)suassert %(py5)s(uin(u%(py1)s in %(py4)suassert %(py6)s(u==(u%(py0)s == %(py3)suassert %(py5)s( uNoneu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation(uexecutorutitleurowsuheaderustatusu @py_assert1u @py_format3u @py_format5u @py_assert2u @py_format4u @py_format6u @py_assert0u @py_assert3u @py_format7((u1/Users/irina/src/pgspecial/tests/test_specials.pyutest_slash_db_nameòs: “ l  E lutest_slash_db_namec Cs |dƒ}d}dg}ddd d d g}d }||||g}||k}|stjd|fd||fƒidtjƒksœtj|ƒr«tj|ƒndd6dtjƒksÓtj|ƒrâtj|ƒndd6}di|d6} ttj| ƒƒ‚nd}dS(Nu\dfupublicufunc1uintegeruunormaluSchemauNameuResult data typeuArgument data typesuTypeuSELECT 1u==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uassert %(py4)supy4(upublicufunc1uintegeruunormal(u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowsuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyu test_slash_dfüs     “u test_slash_dfuABORTuALTER AGGREGATEuALTER COLLATIONuALTER CONVERSIONuALTER DATABASEuALTER DEFAULT PRIVILEGESu ALTER DOMAINuALTER EVENT TRIGGERuALTER EXTENSIONuALTER FOREIGN DATA WRAPPERuALTER FOREIGN TABLEuALTER FUNCTIONu ALTER GROUPu ALTER INDEXuALTER LANGUAGEuALTER LARGE OBJECTuALTER MATERIALIZED VIEWu ALTER OPCLASSuALTER OPERATORuALTER OPFAMILYu ALTER POLICYu ALTER ROLEu ALTER RULEu ALTER SCHEMAuALTER SEQUENCEu ALTER SERVERu ALTER SYSTEMu ALTER TABLEuALTER TABLESPACEu ALTER TRIGGERuALTER TSCONFIGuALTER TSDICTIONARYuALTER TSPARSERuALTER TSTEMPLATEu ALTER TYPEu ALTER USERuALTER USER MAPPINGu ALTER VIEWuANALYZEuBEGINu CHECKPOINTuCLOSEuCLUSTERuCOMMENTuCOMMITuCOMMIT PREPAREDuCOPYuCREATE AGGREGATEu CREATE CASTuCREATE COLLATIONuCREATE CONVERSIONuCREATE DATABASEu CREATE DOMAINuCREATE EVENT TRIGGERuCREATE EXTENSIONuCREATE FOREIGN DATA WRAPPERuCREATE FOREIGN TABLEuCREATE FUNCTIONu CREATE GROUPu CREATE INDEXuCREATE LANGUAGEuCREATE MATERIALIZED VIEWuCREATE OPCLASSuCREATE OPERATORuCREATE OPFAMILYu CREATE POLICYu CREATE ROLEu CREATE RULEu CREATE SCHEMAuCREATE SEQUENCEu CREATE SERVERu CREATE TABLEuCREATE TABLE ASuCREATE TABLESPACEuCREATE TRANSFORMuCREATE TRIGGERuCREATE TSCONFIGuCREATE TSDICTIONARYuCREATE TSPARSERuCREATE TSTEMPLATEu CREATE TYPEu CREATE USERuCREATE USER MAPPINGu CREATE VIEWu DEALLOCATEuDECLAREuDELETEuDISCARDuDOuDROP AGGREGATEu DROP CASTuDROP COLLATIONuDROP CONVERSIONu DROP DATABASEu DROP DOMAINuDROP EVENT TRIGGERuDROP EXTENSIONuDROP FOREIGN DATA WRAPPERuDROP FOREIGN TABLEu DROP FUNCTIONu DROP GROUPu DROP INDEXu DROP LANGUAGEuDROP MATERIALIZED VIEWu DROP OPCLASSu DROP OPERATORu DROP OPFAMILYu DROP OWNEDu DROP POLICYu DROP ROLEu DROP RULEu DROP SCHEMAu DROP SEQUENCEu DROP SERVERu DROP TABLEuDROP TABLESPACEuDROP TRANSFORMu DROP TRIGGERu DROP TSCONFIGuDROP TSDICTIONARYu DROP TSPARSERuDROP TSTEMPLATEu DROP TYPEu DROP USERuDROP USER MAPPINGu DROP VIEWuENDuEXECUTEuEXPLAINuFETCHuGRANTuIMPORT FOREIGN SCHEMAuINSERTuLISTENuLOADuLOCKuMOVEuNOTIFYuPGBENCHuPREPAREuPREPARE TRANSACTIONuREASSIGN OWNEDuREFRESH MATERIALIZED VIEWuREINDEXuRELEASE SAVEPOINTuRESETuREVOKEuROLLBACKuROLLBACK PREPAREDu ROLLBACK TOu SAVEPOINTuSECURITY LABELuSELECTu SELECT INTOuSETuSET CONSTRAINTSuSET ROLEuSET SESSION AUTHuSET TRANSACTIONuSHOWuSTART TRANSACTIONuTRUNCATEuUNLISTENuUPDATEuVACUUMuVALUEScCsö|dƒ}d tgd g}||k}|sìtjd |fd ||fƒidtjƒksrtj|ƒrtj|ƒndd6dtjƒks©tj|ƒr¸tj|ƒndd6}di|d 6}ttj |ƒƒ‚nd }d S(uList all commands.u\hu==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4N(u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu help_rowsu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation(uexecutoruresultsuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyu test_slash_h s  “u test_slash_hc CsoxhtjtŒD]W}|d|ƒ}|d}|j}d}||ƒ}|s·dditj|ƒd6tj|ƒd6tj|ƒd6tj|ƒd 6}ttj|ƒƒ‚nd}}}}d }|d}||k}|sYtj d|fd||fƒitj|ƒd 6tj|ƒd 6} di| d6} ttj| ƒƒ‚nd}}}qWdS(u'Check help is returned for all commandsu\h %siu Description uuLassert %(py7)s {%(py7)s = %(py3)s {%(py3)s = %(py1)s.startswith }(%(py5)s) }upy7upy5upy3upy1uSyntaxuinu%(py1)s in %(py4)supy4uassert %(py6)supy6N(uin(u%(py1)s in %(py4)suassert %(py6)s( u itertoolsuchainu help_rowsu startswithu @pytest_aru _saferepruAssertionErroru_format_explanationuNoneu_call_reprcompare( uexecutorucommanduresultsu @py_assert0u @py_assert2u @py_assert4u @py_assert6u @py_format8u @py_assert3u @py_format5u @py_format7((u1/Users/irina/src/pgspecial/tests/test_specials.pyutest_slash_h_commands$  N Eutest_slash_h_commandcCs¾|dƒ}|dƒ}|d}|d}||k}|s¬tjd |fd ||fƒitj|ƒd6tj|ƒd6}di|d 6}ttj|ƒƒ‚nd }}}d S(u\? is properly aliased to \hu \h SELECTu \? SELECTiu==u%(py1)s == %(py4)supy4upy1uuassert %(py6)supy6N(u==(u%(py1)s == %(py4)suassert %(py6)s(u @pytest_aru_call_reprcompareu _saferepruAssertionErroru_format_explanationuNone(uexecutoru h_resultsuresultsu @py_assert0u @py_assert3u @py_assert2u @py_format5u @py_format7((u1/Users/irina/src/pgspecial/tests/test_specials.pyutest_slash_h_aliass   Eutest_slash_h_aliascCsC|jdƒ}|dj|ƒƒ|jddƒ}|jƒ}|j}|ƒ}t|ƒ}d}||k} | srtjd| fd||fƒitj|ƒd6tj|ƒd 6tj|ƒd 6tj|ƒd 6d t j ƒksøtj |ƒrtj|ƒnd d 6dt j ƒks/tj tƒr>tjtƒndd6} di| d6} t tj | ƒƒ‚nd}}}} }d} | |k}|s5tjd|fd| |fƒid t j ƒksâtj |ƒrñtj|ƒnd d 6tj| ƒd 6} di| d 6}t tj |ƒƒ‚nd} }dS(Nu pycons.tsvu=\copy (SELECT 'Montréal', 'Portland', 'Cleveland') TO '{0}' uencodinguutf-8iu==uh%(py7)s {%(py7)s = %(py0)s(%(py5)s {%(py5)s = %(py3)s {%(py3)s = %(py1)s.splitlines }() }) } == %(py10)supy10upy7upy5upy3ucontentsupy1ulenupy0uuassert %(py12)supy12u Montréaluinu%(py1)s in %(py3)suassert %(py5)s(u==(uh%(py7)s {%(py7)s = %(py0)s(%(py5)s {%(py5)s = %(py3)s {%(py3)s = %(py1)s.splitlines }() }) } == %(py10)suassert %(py12)s(uin(u%(py1)s in %(py3)suassert %(py5)s(ujoinuformatuopenureadu splitlinesulenu @pytest_aru_call_reprcompareu _saferepru @py_builtinsulocalsu_should_repr_global_nameuAssertionErroru_format_explanationuNone(uexecutorutmpdirufilepathuinfileucontentsu @py_assert2u @py_assert4u @py_assert6u @py_assert9u @py_assert8u @py_format11u @py_format13u @py_assert0u @py_format4u @py_format6((u1/Users/irina/src/pgspecial/tests/test_specials.pyutest_slash_copy_to_tsv s,     Ó lutest_slash_copy_to_tsvcCs×|dƒ|jƒ\}}d}||k}|sÉtjd |fd ||fƒitj|ƒd6dtjƒks†tj|ƒr•tj|ƒndd6}d i|d 6}ttj|ƒƒ‚nd}}dS(Nu=\copy (SELECT 'Montréal', 'Portland', 'Cleveland') TO stdoutuMontréal Portland Cleveland u==u%(py0)s == %(py3)supy3uoutupy0uuassert %(py5)supy5(u==(u%(py0)s == %(py3)suassert %(py5)s( u readouterru @pytest_aru_call_reprcompareu _saferepru @py_builtinsulocalsu_should_repr_global_nameuAssertionErroru_format_explanationuNone(uexecutorucapsysuoutuerru @py_assert2u @py_assert1u @py_format4u @py_format6((u1/Users/irina/src/pgspecial/tests/test_specials.pyutest_slash_copy_to_stdout+s  lutest_slash_copy_to_stdoutc Csú|jdƒ}|dj|ƒƒ|jddƒ}|jƒ}|j}|ƒ}t|ƒ}d}||k} | srtjd| fd||fƒitj|ƒd6tj|ƒd 6tj|ƒd 6tj|ƒd 6d t j ƒksøtj |ƒrtj|ƒnd d 6dt j ƒks/tj tƒr>tjtƒndd6} di| d6} t tj | ƒƒ‚nd}}}} }d} | |k}|s5tjd|fd| |fƒid t j ƒksâtj |ƒrñtj|ƒnd d 6tj| ƒd 6} di| d 6}t tj |ƒƒ‚nd} }d} | |k}|sìtjd|fd| |fƒid t j ƒks™tj |ƒr¨tj|ƒnd d 6tj| ƒd 6} d i| d 6}t tj |ƒƒ‚nd} }dS(!Nu pycons.tsvuE\copy (SELECT 'Montréal', 'Portland', 'Cleveland') TO '{0}' WITH csvuencodinguutf-8iu==uh%(py7)s {%(py7)s = %(py0)s(%(py5)s {%(py5)s = %(py3)s {%(py3)s = %(py1)s.splitlines }() }) } == %(py10)supy10upy7upy5upy3ucontentsupy1ulenupy0uuassert %(py12)supy12u Montréaluinu%(py1)s in %(py3)suassert %(py5)su,(u==(uh%(py7)s {%(py7)s = %(py0)s(%(py5)s {%(py5)s = %(py3)s {%(py3)s = %(py1)s.splitlines }() }) } == %(py10)suassert %(py12)s(uin(u%(py1)s in %(py3)suassert %(py5)s(uin(u%(py1)s in %(py3)suassert %(py5)s(ujoinuformatuopenureadu splitlinesulenu @pytest_aru_call_reprcompareu _saferepru @py_builtinsulocalsu_should_repr_global_nameuAssertionErroru_format_explanationuNone(uexecutorutmpdirufilepathuinfileucontentsu @py_assert2u @py_assert4u @py_assert6u @py_assert9u @py_assert8u @py_format11u @py_format13u @py_assert0u @py_format4u @py_format6((u1/Users/irina/src/pgspecial/tests/test_specials.pyutest_slash_copy_to_csv2s:     Ó l  lutest_slash_copy_to_csvc Csü|jdƒ}|dj|ƒƒ|dj|ƒƒ|jƒ}|jdƒ|jƒ}|d}d}||k}|sêtjd|fd||fƒitj|ƒd 6tj|ƒd 6} di| d 6} ttj | ƒƒ‚nd}}}dS(Nutbl1.csvu/\copy (SELECT 22, 'elephant') TO '{0}' WITH csvu\copy tbl1 FROM '{0}' WITH csvu!SELECT * FROM tbl1 WHERE id1 = 22iuelephantu==u%(py1)s == %(py4)supy4upy1uuassert %(py6)supy6(u==(u%(py1)s == %(py4)suassert %(py6)s( ujoinuformatucursoruexecuteufetchoneu @pytest_aru_call_reprcompareu _saferepruAssertionErroru_format_explanationuNone( uexecutoru connectionutmpdirufilepathucururowu @py_assert0u @py_assert3u @py_assert2u @py_format5u @py_format7((u1/Users/irina/src/pgspecial/tests/test_specials.pyutest_slash_copy_from_csv>s       Eutest_slash_copy_from_csvc Cs|dƒ}d}d g}dg}d}||||g}||k}|s tjd|fd||fƒidtjƒkstj|ƒrŸtj|ƒndd6dtjƒksÇtj|ƒrÖtj|ƒndd 6}di|d 6} ttj| ƒƒ‚nd}dS(Nu \sf func1uiCREATE OR REPLACE FUNCTION public.func1() RETURNS integer LANGUAGE sql AS $function$select 1$function$ usourceu==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4(uiCREATE OR REPLACE FUNCTION public.func1() RETURNS integer LANGUAGE sql AS $function$select 1$function$ (u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowsuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyu test_slash_sfJs    “u test_slash_sfcCs´y|dƒWn3tk rC}zd}t|ƒ}||k}|s#tjd|fd||fƒitj|ƒd6dtjƒks™tj|ƒr¨tj|ƒndd6dtjƒksÐtjtƒrßtjtƒndd 6tj|ƒd 6}di|d 6}ttj |ƒƒ‚nd}}}WYdd}~XnmXds°tj dƒdidtjƒks~tjdƒrtjdƒndd6}ttj |ƒƒ‚ndS(Nu\sf non_existingu non_existinguinu0%(py1)s in %(py6)s {%(py6)s = %(py3)s(%(py4)s) }upy6ueupy4ustrupy3upy1uuassert %(py8)supy8uExpected an exceptionu >assert %(py0)suFalseupy0(uin(u0%(py1)s in %(py6)s {%(py6)s = %(py3)s(%(py4)s) }uassert %(py8)sF( u Exceptionustru @pytest_aru_call_reprcompareu _saferepru @py_builtinsulocalsu_should_repr_global_nameuAssertionErroru_format_explanationuNoneuFalseu_format_assertmsg(uexecutorueu @py_assert0u @py_assert5u @py_assert2u @py_format7u @py_format9u @py_format1((u1/Users/irina/src/pgspecial/tests/test_specials.pyutest_slash_sf_unknownYs ³!Nutest_slash_sf_unknownc Cs|dƒ}d}d g}dg}d}||||g}||k}|s tjd|fd||fƒidtjƒkstj|ƒrŸtj|ƒndd6dtjƒksÇtj|ƒrÖtj|ƒndd 6}di|d 6} ttj| ƒƒ‚nd}dS(Nu \sf func1()uiCREATE OR REPLACE FUNCTION public.func1() RETURNS integer LANGUAGE sql AS $function$select 1$function$ usourceu==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4(uiCREATE OR REPLACE FUNCTION public.func1() RETURNS integer LANGUAGE sql AS $function$select 1$function$ (u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowsuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyutest_slash_sf_parenscs    “utest_slash_sf_parensc Cs|dƒ}d}d g}dg}d}||||g}||k}|s tjd|fd||fƒidtjƒkstj|ƒrŸtj|ƒndd6dtjƒksÇtj|ƒrÖtj|ƒndd 6}di|d 6} ttj| ƒƒ‚nd}dS(Nu\sf+ schema1.s1_func1u CREATE OR REPLACE FUNCTION schema1.s1_func1() RETURNS integer LANGUAGE sql 1 AS $function$select 2$function$ usourceu==u%(py0)s == %(py2)suexpectedupy2uresultsupy0uuassert %(py4)supy4(u CREATE OR REPLACE FUNCTION schema1.s1_func1() RETURNS integer LANGUAGE sql 1 AS $function$select 2$function$ (u==(u%(py0)s == %(py2)suassert %(py4)s( uNoneu @pytest_aru_call_reprcompareu @py_builtinsulocalsu_should_repr_global_nameu _saferepruAssertionErroru_format_explanation( uexecutoruresultsutitleurowsuheadersustatusuexpectedu @py_assert1u @py_format3u @py_format5((u1/Users/irina/src/pgspecial/tests/test_specials.pyutest_slash_sf_verbosers    “utest_slash_sf_verbose(+ubuiltinsu @py_builtinsu_pytest.assertion.rewriteu assertionurewriteu @pytest_arudbutilsudbtestu POSTGRES_USERu itertoolsuobjects_listing_headersu test_slash_dutest_slash_d_verboseutest_slash_d_tableutest_slash_d_table_verboseu!test_slash_d_table_with_exclusionu test_slash_dnu test_slash_dtutest_slash_dt_verboseu test_slash_dvutest_slash_dv_verboseu test_slash_dmutest_slash_dm_verboseu test_slash_dsutest_slash_ds_verboseu test_slash_diutest_slash_di_verboseu test_slash_dTu test_slash_dbutest_slash_db_nameu test_slash_dfu help_rowsu test_slash_hutest_slash_h_commandutest_slash_h_aliasutest_slash_copy_to_tsvutest_slash_copy_to_stdoututest_slash_copy_to_csvutest_slash_copy_from_csvu test_slash_sfutest_slash_sf_unknownutest_slash_sf_parensutest_slash_sf_verbose(((u1/Users/irina/src/pgspecial/tests/test_specials.pyusL                 ÿÿN    pgspecial-1.9.0/tests/conftest.py0000644000076500000240000000157612732064265017276 0ustar irinastaff00000000000000import pytest from dbutils import (create_db, db_connection, setup_db, teardown_db) from pgspecial.main import PGSpecial @pytest.yield_fixture(scope='module') def connection(): create_db('_test_db') connection = db_connection('_test_db') setup_db(connection) yield connection teardown_db(connection) connection.close() @pytest.fixture def cursor(connection): with connection.cursor() as cur: return cur @pytest.fixture def executor(connection): cur = connection.cursor() pgspecial = PGSpecial() def query_runner(sql): results = [] for title, rows, headers, status in pgspecial.execute(cur=cur, sql=sql): if rows: results.extend((title, list(rows), headers, status)) else: results.extend((title, None, headers, status)) return results return query_runner pgspecial-1.9.0/tests/dbutils.py0000644000076500000240000000473013174656601017114 0ustar irinastaff00000000000000import pytest import psycopg2 import psycopg2.extras # TODO: should this be somehow be divined from environment? POSTGRES_USER, POSTGRES_HOST = 'postgres', 'localhost' def db_connection(dbname=None): conn = psycopg2.connect(user=POSTGRES_USER, host=POSTGRES_HOST, database=dbname) conn.autocommit = True return conn try: conn = db_connection() CAN_CONNECT_TO_DB = True SERVER_VERSION = conn.server_version except: CAN_CONNECT_TO_DB = False SERVER_VERSION = 0 dbtest = pytest.mark.skipif( not CAN_CONNECT_TO_DB, reason="Need a postgres instance at localhost accessible by user " "'%s'" % POSTGRES_USER) def create_db(dbname): with db_connection().cursor() as cur: try: cur.execute('''CREATE DATABASE _test_db''') except: pass def setup_db(conn): with conn.cursor() as cur: # schemas cur.execute('create schema schema1') cur.execute('create schema schema2') # tables cur.execute('create table tbl1(id1 integer, txt1 text, CONSTRAINT id_text PRIMARY KEY(id1, txt1))') cur.execute('create table tbl2(id2 serial, txt2 text)') cur.execute('create table schema1.s1_tbl1(id1 integer, txt1 text)') cur.execute('create table tbl3(c3 circle, exclude using gist (c3 with &&))') cur.execute('create table "Inh1"(value1 integer) inherits (tbl1)') cur.execute('create table inh2(value2 integer) inherits (tbl1, tbl2)') # views cur.execute('create view vw1 as select * from tbl1') cur.execute('''create view schema1.s1_vw1 as select * from schema1.s1_tbl1''') # materialized views cur.execute('create materialized view mvw1 as select * from tbl1') cur.execute('''create materialized view schema1.s1_mvw1 as select * from schema1.s1_tbl1''') # datatype cur.execute('create type foo AS (a int, b text)') # functions cur.execute('''create function func1() returns int language sql as $$select 1$$''') cur.execute('''create function schema1.s1_func1() returns int language sql as $$select 2$$''') def teardown_db(conn): with conn.cursor() as cur: cur.execute(''' DROP SCHEMA public CASCADE; CREATE SCHEMA public; DROP SCHEMA IF EXISTS schema1 CASCADE; DROP SCHEMA IF EXISTS schema2 CASCADE''') pgspecial-1.9.0/tests/dbutils.pyc0000644000076500000240000000554713174656611017267 0ustar irinastaff00000000000000ó ]óYc@sªddlZddlZddlZd \ZZdd„ZyeƒZeZ ej Z Wne Z dZ nXej je ddeƒZd„Zd „Zd „ZdS( iÿÿÿÿNtpostgrest localhostcCs+tjdtdtd|ƒ}t|_|S(Ntuserthosttdatabase(tpsycopg2tconnectt POSTGRES_USERt POSTGRES_HOSTtTruet autocommit(tdbnametconn((s+/Users/irina/src/pgspecial/tests/dbutils.pyt db_connection s  itreasons=Need a postgres instance at localhost accessible by user '%s'cCs7tƒjƒ"}y|jdƒWnnXWdQXdS(NsCREATE DATABASE _test_db(R tcursortexecute(R tcur((s+/Users/irina/src/pgspecial/tests/dbutils.pyt create_db s cCsÜ|jƒÊ}|jdƒ|jdƒ|jdƒ|jdƒ|jdƒ|jdƒ|jdƒ|jdƒ|jd ƒ|jd ƒ|jd ƒ|jd ƒ|jd ƒ|jdƒ|jdƒWdQXdS(Nscreate schema schema1screate schema schema2sTcreate table tbl1(id1 integer, txt1 text, CONSTRAINT id_text PRIMARY KEY(id1, txt1))s(create table tbl2(id2 serial, txt2 text)s4create table schema1.s1_tbl1(id1 integer, txt1 text)s=create table tbl3(c3 circle, exclude using gist (c3 with &&))s3create table "Inh1"(value1 integer) inherits (tbl1)s7create table inh2(value2 integer) inherits (tbl1, tbl2)s%create view vw1 as select * from tbl1sRcreate view schema1.s1_vw1 as select * from schema1.s1_tbl1s3create materialized view mvw1 as select * from tbl1s`create materialized view schema1.s1_mvw1 as select * from schema1.s1_tbl1s"create type foo AS (a int, b text)sWcreate function func1() returns int language sql as $$select 1$$sbcreate function schema1.s1_func1() returns int language sql as $$select 2$$(RR(R R((s+/Users/irina/src/pgspecial/tests/dbutils.pytsetup_db(s(           cCs&|jƒ}|jdƒWdQXdS(Ns¯ DROP SCHEMA public CASCADE; CREATE SCHEMA public; DROP SCHEMA IF EXISTS schema1 CASCADE; DROP SCHEMA IF EXISTS schema2 CASCADE(RR(R R((s+/Users/irina/src/pgspecial/tests/dbutils.pyt teardown_dbJs(RR(tpytestRtpsycopg2.extrasRRtNoneR R R tCAN_CONNECT_TO_DBtserver_versiontSERVER_VERSIONtFalsetmarktskipiftdbtestRRR(((s+/Users/irina/src/pgspecial/tests/dbutils.pyts$            "pgspecial-1.9.0/tests/pytest.ini0000644000076500000240000000006012600416376017111 0ustar irinastaff00000000000000[pytest] addopts=--capture=sys --showlocals -rxspgspecial-1.9.0/tests/test_specials.py0000644000076500000240000003760413174656601020316 0ustar irinastaff00000000000000 #!/usr/bin/python # -*- coding: utf-8 -*- from dbutils import dbtest, POSTGRES_USER import itertools objects_listing_headers = ['Schema', 'Name', 'Type', 'Owner', 'Size', 'Description'] @dbtest def test_slash_d(executor): results = executor('\d') title = None rows = [('public', 'Inh1', 'table', POSTGRES_USER), ('public', 'inh2', 'table', POSTGRES_USER), ('public', 'mvw1', 'materialized view', POSTGRES_USER), ('public', 'tbl1', 'table', POSTGRES_USER), ('public', 'tbl2', 'table', POSTGRES_USER), ('public', 'tbl2_id2_seq', 'sequence', POSTGRES_USER), ('public', 'tbl3', 'table', POSTGRES_USER), ('public', 'vw1', 'view', POSTGRES_USER)] headers = objects_listing_headers[:-2] status = 'SELECT 8' expected = [title, rows, headers, status] assert results == expected @dbtest def test_slash_d_verbose(executor): results = executor('\d+') title = None rows = [('public', 'Inh1', 'table', POSTGRES_USER, '8192 bytes', None), ('public', 'inh2', 'table', POSTGRES_USER, '8192 bytes', None), ('public', 'mvw1', 'materialized view', POSTGRES_USER, '8192 bytes', None), ('public', 'tbl1', 'table', POSTGRES_USER, '8192 bytes', None), ('public', 'tbl2', 'table', POSTGRES_USER, '8192 bytes', None), ('public', 'tbl2_id2_seq', 'sequence', POSTGRES_USER, '8192 bytes', None), ('public', 'tbl3', 'table', POSTGRES_USER, '0 bytes', None), ('public', 'vw1', 'view', POSTGRES_USER, '0 bytes', None)] headers = objects_listing_headers status = 'SELECT 8' expected = [title, rows, headers, status] assert results == expected @dbtest def test_slash_d_table_1(executor): results = executor('\d tbl1') title = None rows = [['id1', 'integer', ' not null'], ['txt1', 'text', ' not null'], ] headers = ['Column', 'Type', 'Modifiers'] status = ('Indexes:\n "id_text" PRIMARY KEY, btree (id1, txt1)\n' 'Number of child tables: 2 (Use \\d+ to list them.)\n') expected = [title, rows, headers, status] assert results == expected @dbtest def test_slash_d_table_2(executor): results = executor('\d tbl2') title = None rows = [['id2', 'integer', " not null default nextval('tbl2_id2_seq'::regclass)"], ['txt2', 'text', ''], ] headers = ['Column', 'Type', 'Modifiers'] status = ('Number of child tables: 1 (Use \\d+ to list them.)\n') expected = [title, rows, headers, status] assert results == expected @dbtest def test_slash_d_table_verbose_1(executor): results = executor('\d+ tbl1') title = None rows = [['id1', 'integer', ' not null', 'plain', None, None], ['txt1', 'text', ' not null', 'extended', None, None], ] headers = ['Column', 'Type', 'Modifiers', 'Storage', 'Stats target', 'Description'] status = ('Indexes:\n "id_text" PRIMARY KEY, btree (id1, txt1)\n' 'Child tables: "Inh1",\n' ' inh2\n' 'Has OIDs: no\n') expected = [title, rows, headers, status] assert results == expected @dbtest def test_slash_d_table_verbose_2(executor): results = executor('\d+ tbl2') title = None rows = [['id2', 'integer', " not null default nextval('tbl2_id2_seq'::regclass)", 'plain', None, None], ['txt2', 'text', '', 'extended', None, None], ] headers = ['Column', 'Type', 'Modifiers', 'Storage', 'Stats target', 'Description'] status = ('Child tables: inh2\n' 'Has OIDs: no\n') expected = [title, rows, headers, status] assert results == expected @dbtest def test_slash_d_table_with_exclusion(executor): results = executor('\d tbl3') title = None rows = [['c3', 'circle', '']] headers = ['Column', 'Type', 'Modifiers'] status = 'Indexes:\n "tbl3_c3_excl" EXCLUDE USING gist (c3 WITH &&)\n' expected = [title, rows, headers, status] assert results == expected @dbtest def test_slash_dn(executor): """List all schemas.""" results = executor('\dn') title = None rows = [('public', POSTGRES_USER), ('schema1', POSTGRES_USER), ('schema2', POSTGRES_USER)] headers = ['Name', 'Owner'] status = 'SELECT 3' expected = [title, rows, headers, status] assert results == expected @dbtest def test_slash_dt(executor): """List all tables in public schema.""" results = executor('\dt') title = None rows = [('public', 'Inh1', 'table', POSTGRES_USER), ('public', 'inh2', 'table', POSTGRES_USER), ('public', 'tbl1', 'table', POSTGRES_USER), ('public', 'tbl2', 'table', POSTGRES_USER), ('public', 'tbl3', 'table', POSTGRES_USER)] headers = objects_listing_headers[:-2] status = 'SELECT 5' expected = [title, rows, headers, status] assert results == expected @dbtest def test_slash_dt_verbose(executor): """List all tables in public schema in verbose mode.""" results = executor('\dt+') title = None rows = [('public', 'Inh1', 'table', POSTGRES_USER, '8192 bytes', None), ('public', 'inh2', 'table', POSTGRES_USER, '8192 bytes', None), ('public', 'tbl1', 'table', POSTGRES_USER, '8192 bytes', None), ('public', 'tbl2', 'table', POSTGRES_USER, '8192 bytes', None), ('public', 'tbl3', 'table', POSTGRES_USER, '0 bytes', None)] headers = objects_listing_headers status = 'SELECT 5' expected = [title, rows, headers, status] assert results == expected @dbtest def test_slash_dv(executor): """List all views in public schema.""" results = executor('\dv') title = None row = [('public', 'vw1', 'view', POSTGRES_USER)] headers = objects_listing_headers[:-2] status = 'SELECT 1' expected = [title, row, headers, status] assert results == expected @dbtest def test_slash_dv_verbose(executor): """List all views in s1 schema in verbose mode.""" results = executor('\dv+ schema1.*') title = None row = [('schema1', 's1_vw1', 'view', POSTGRES_USER, '0 bytes', None)] headers = objects_listing_headers status = 'SELECT 1' expected = [title, row, headers, status] assert results == expected @dbtest def test_slash_dm(executor): """List all materialized views in schema1.""" results = executor('\dm schema1.*') title = None row = [('schema1', 's1_mvw1', 'materialized view', POSTGRES_USER)] headers = objects_listing_headers[:-2] status = 'SELECT 1' expected = [title, row, headers, status] assert results == expected @dbtest def test_slash_dm_verbose(executor): """List all materialized views in public schema in verbose mode.""" results = executor('\dm+') title = None row = [('public', 'mvw1', 'materialized view', POSTGRES_USER, '8192 bytes', None)] headers = objects_listing_headers status = 'SELECT 1' expected = [title, row, headers, status] assert results == expected @dbtest def test_slash_ds(executor): """List all sequences in public schema.""" results = executor('\ds') title = None row = [('public', 'tbl2_id2_seq', 'sequence', POSTGRES_USER)] headers = objects_listing_headers[:-2] status = 'SELECT 1' expected = [title, row, headers, status] assert results == expected @dbtest def test_slash_ds_verbose(executor): """List all sequences in public schema in verbose mode.""" results = executor('\ds+') title = None row = [('public', 'tbl2_id2_seq', 'sequence', POSTGRES_USER, '8192 bytes', None)] headers = objects_listing_headers status = 'SELECT 1' expected = [title, row, headers, status] assert results == expected @dbtest def test_slash_di(executor): """List all indexes in public schema.""" results = executor('\di') title = None row = [('public', 'id_text', 'index', POSTGRES_USER), ('public', 'tbl3_c3_excl', 'index', POSTGRES_USER)] headers = objects_listing_headers[:-2] status = 'SELECT 2' expected = [title, row, headers, status] assert results == expected @dbtest def test_slash_di_verbose(executor): """List all indexes in public schema in verbose mode.""" results = executor('\di+') title = None row = [('public', 'id_text', 'index', POSTGRES_USER, '8192 bytes', None), ('public', 'tbl3_c3_excl', 'index', POSTGRES_USER, '8192 bytes', None)] headers = objects_listing_headers status = 'SELECT 2' expected = [title, row, headers, status] assert results == expected @dbtest def test_slash_dT(executor): """List all datatypes.""" results = executor('\dT') title = None rows = [('public', 'foo', None)] headers = ['Schema', 'Name', 'Description'] status = 'SELECT 1' expected = [title, rows, headers, status] assert results == expected @dbtest def test_slash_db(executor): """List all tablespaces.""" title, rows, header, status = executor('\db') assert title is None assert header == ['Name', 'Owner', 'Location'] assert 'pg_default' in rows[0] @dbtest def test_slash_db_name(executor): """List tablespace by name.""" title, rows, header, status = executor('\db pg_default') assert title is None assert header == ['Name', 'Owner', 'Location'] assert 'pg_default' in rows[0] assert status == 'SELECT 1' @dbtest def test_slash_df(executor): results = executor('\df') title = None rows = [('public', 'func1', 'integer', '', 'normal')] headers = ['Schema', 'Name', 'Result data type', 'Argument data types', 'Type'] status = 'SELECT 1' expected = [title, rows, headers, status] assert results == expected help_rows = [['ABORT', 'ALTER AGGREGATE', 'ALTER COLLATION', 'ALTER CONVERSION', 'ALTER DATABASE', 'ALTER DEFAULT PRIVILEGES'], ['ALTER DOMAIN', 'ALTER EVENT TRIGGER', 'ALTER EXTENSION', 'ALTER FOREIGN DATA WRAPPER', 'ALTER FOREIGN TABLE', 'ALTER FUNCTION'], ['ALTER GROUP', 'ALTER INDEX', 'ALTER LANGUAGE', 'ALTER LARGE OBJECT', 'ALTER MATERIALIZED VIEW', 'ALTER OPCLASS'], ['ALTER OPERATOR', 'ALTER OPFAMILY', 'ALTER POLICY', 'ALTER ROLE', 'ALTER RULE', 'ALTER SCHEMA'], ['ALTER SEQUENCE', 'ALTER SERVER', 'ALTER SYSTEM', 'ALTER TABLE', 'ALTER TABLESPACE', 'ALTER TRIGGER'], ['ALTER TSCONFIG', 'ALTER TSDICTIONARY', 'ALTER TSPARSER', 'ALTER TSTEMPLATE', 'ALTER TYPE', 'ALTER USER'], ['ALTER USER MAPPING', 'ALTER VIEW', 'ANALYZE', 'BEGIN', 'CHECKPOINT', 'CLOSE'], ['CLUSTER', 'COMMENT', 'COMMIT', 'COMMIT PREPARED', 'COPY', 'CREATE AGGREGATE'], ['CREATE CAST', 'CREATE COLLATION', 'CREATE CONVERSION', 'CREATE DATABASE', 'CREATE DOMAIN', 'CREATE EVENT TRIGGER'], ['CREATE EXTENSION', 'CREATE FOREIGN DATA WRAPPER', 'CREATE FOREIGN TABLE', 'CREATE FUNCTION', 'CREATE GROUP', 'CREATE INDEX'], ['CREATE LANGUAGE', 'CREATE MATERIALIZED VIEW', 'CREATE OPCLASS', 'CREATE OPERATOR', 'CREATE OPFAMILY', 'CREATE POLICY'], ['CREATE ROLE', 'CREATE RULE', 'CREATE SCHEMA', 'CREATE SEQUENCE', 'CREATE SERVER', 'CREATE TABLE'], ['CREATE TABLE AS', 'CREATE TABLESPACE', 'CREATE TRANSFORM', 'CREATE TRIGGER', 'CREATE TSCONFIG', 'CREATE TSDICTIONARY'], ['CREATE TSPARSER', 'CREATE TSTEMPLATE', 'CREATE TYPE', 'CREATE USER', 'CREATE USER MAPPING', 'CREATE VIEW'], ['DEALLOCATE', 'DECLARE', 'DELETE', 'DISCARD', 'DO', 'DROP AGGREGATE'], ['DROP CAST', 'DROP COLLATION', 'DROP CONVERSION', 'DROP DATABASE', 'DROP DOMAIN', 'DROP EVENT TRIGGER'], ['DROP EXTENSION', 'DROP FOREIGN DATA WRAPPER', 'DROP FOREIGN TABLE', 'DROP FUNCTION', 'DROP GROUP', 'DROP INDEX'], ['DROP LANGUAGE', 'DROP MATERIALIZED VIEW', 'DROP OPCLASS', 'DROP OPERATOR', 'DROP OPFAMILY', 'DROP OWNED'], ['DROP POLICY', 'DROP ROLE', 'DROP RULE', 'DROP SCHEMA', 'DROP SEQUENCE', 'DROP SERVER'], ['DROP TABLE', 'DROP TABLESPACE', 'DROP TRANSFORM', 'DROP TRIGGER', 'DROP TSCONFIG', 'DROP TSDICTIONARY'], ['DROP TSPARSER', 'DROP TSTEMPLATE', 'DROP TYPE', 'DROP USER', 'DROP USER MAPPING', 'DROP VIEW'], ['END', 'EXECUTE', 'EXPLAIN', 'FETCH', 'GRANT', 'IMPORT FOREIGN SCHEMA'], ['INSERT', 'LISTEN', 'LOAD', 'LOCK', 'MOVE', 'NOTIFY'], ['PGBENCH', 'PREPARE', 'PREPARE TRANSACTION', 'REASSIGN OWNED', 'REFRESH MATERIALIZED VIEW', 'REINDEX'], ['RELEASE SAVEPOINT', 'RESET', 'REVOKE', 'ROLLBACK', 'ROLLBACK PREPARED', 'ROLLBACK TO'], ['SAVEPOINT', 'SECURITY LABEL', 'SELECT', 'SELECT INTO', 'SET', 'SET CONSTRAINTS'], ['SET ROLE', 'SET SESSION AUTH', 'SET TRANSACTION', 'SHOW', 'START TRANSACTION', 'TRUNCATE'], ['UNLISTEN', 'UPDATE', 'VACUUM', 'VALUES']] @dbtest def test_slash_h(executor): """List all commands.""" results = executor('\h') expected = [None, help_rows, [], None] assert results == expected @dbtest def test_slash_h_command(executor): """Check help is returned for all commands""" for command in itertools.chain(*help_rows): results = executor('\h %s' % command) assert results[3].startswith('Description\n') assert 'Syntax' in results[3] @dbtest def test_slash_h_alias(executor): """\? is properly aliased to \h""" h_results = executor('\h SELECT') results = executor('\? SELECT') assert results[3] == h_results[3] @dbtest def test_slash_copy_to_tsv(executor, tmpdir): filepath = tmpdir.join('pycons.tsv') executor(u"\copy (SELECT 'Montréal', 'Portland', 'Cleveland') TO '{0}' " .format(filepath)) infile = filepath.open(encoding='utf-8') contents = infile.read() assert len(contents.splitlines()) == 1 assert u'Montréal' in contents @dbtest def test_slash_copy_to_stdout(executor, capsys): executor(u"\copy (SELECT 'Montréal', 'Portland', 'Cleveland') TO stdout") (out, err) = capsys.readouterr() assert out == u'Montréal\tPortland\tCleveland\n' @dbtest def test_slash_copy_to_csv(executor, tmpdir): filepath = tmpdir.join('pycons.tsv') executor(u"\copy (SELECT 'Montréal', 'Portland', 'Cleveland') TO '{0}' WITH csv" .format(filepath)) infile = filepath.open(encoding='utf-8') contents = infile.read() assert len(contents.splitlines()) == 1 assert u'Montréal' in contents assert u',' in contents @dbtest def test_slash_copy_from_csv(executor, connection, tmpdir): filepath = tmpdir.join('tbl1.csv') executor("\copy (SELECT 22, 'elephant') TO '{0}' WITH csv" .format(filepath)) executor("\copy tbl1 FROM '{0}' WITH csv".format(filepath)) cur = connection.cursor() cur.execute("SELECT * FROM tbl1 WHERE id1 = 22") row = cur.fetchone() assert row[1] == 'elephant' @dbtest def test_slash_sf(executor): results = executor('\sf func1') title = None rows = [('CREATE OR REPLACE FUNCTION public.func1()\n' ' RETURNS integer\n' ' LANGUAGE sql\n' 'AS $function$select 1$function$\n',), ] headers = ['source'] status = None expected = [title, rows, headers, status] assert results == expected @dbtest def test_slash_sf_unknown(executor): try: executor('\sf non_existing') except Exception as e: assert 'non_existing' in str(e) else: assert False, "Expected an exception" @dbtest def test_slash_sf_parens(executor): results = executor('\sf func1()') title = None rows = [('CREATE OR REPLACE FUNCTION public.func1()\n' ' RETURNS integer\n' ' LANGUAGE sql\n' 'AS $function$select 1$function$\n',), ] headers = ['source'] status = None expected = [title, rows, headers, status] assert results == expected @dbtest def test_slash_sf_verbose(executor): results = executor('\sf+ schema1.s1_func1') title = None rows = [(' CREATE OR REPLACE FUNCTION schema1.s1_func1()\n' ' RETURNS integer\n' ' LANGUAGE sql\n' '1 AS $function$select 2$function$\n',), ] headers = ['source'] status = None expected = [title, rows, headers, status] assert results == expected