diff --git a/changes/181.a.canada.bugfix b/changes/181.a.canada.bugfix
new file mode 100644
index 00000000000..2292abd112a
--- /dev/null
+++ b/changes/181.a.canada.bugfix
@@ -0,0 +1 @@
+Fixed an issue with the binding of the `table-toggle-more` JS CKAN module.
diff --git a/changes/181.b.canada.changes b/changes/181.b.canada.changes
new file mode 100644
index 00000000000..6ce59a81d83
--- /dev/null
+++ b/changes/181.b.canada.changes
@@ -0,0 +1 @@
+Markdown no longer renders heading and image tags for accessibility reasons.
diff --git a/changes/181.canada.feature b/changes/181.canada.feature
new file mode 100644
index 00000000000..cc8a128b868
--- /dev/null
+++ b/changes/181.canada.feature
@@ -0,0 +1 @@
+DataStore now supports foreign key constraints. Supply `foreign_keys` dictionary to the `datastore_create` action method.
diff --git a/changes/183.canada.bugfix b/changes/183.canada.bugfix
new file mode 100644
index 00000000000..34912e19fca
--- /dev/null
+++ b/changes/183.canada.bugfix
@@ -0,0 +1 @@
+Display accented characters properly in Excel for filtered results downloaded using datatablesview.
diff --git a/ckan/lib/helpers.py b/ckan/lib/helpers.py
index b39c85529a7..bc00ab5d9b0 100644
--- a/ckan/lib/helpers.py
+++ b/ckan/lib/helpers.py
@@ -61,9 +61,11 @@
DEFAULT_FACET_NAMES = u'organization groups tags res_format license_id'
+# (canada fork only): disallow a11y controlling tags
+# h1, h2, h3 (reason: non-semantic tags)
+# img (reason: alt text)
MARKDOWN_TAGS = set([
- 'del', 'dd', 'dl', 'dt', 'h1', 'h2',
- 'h3', 'img', 'kbd', 'p', 'pre', 's',
+ 'del', 'dd', 'dl', 'dt', 'kbd', 'p', 'pre', 's',
'sup', 'sub', 'strike', 'br', 'hr'
]).union(ALLOWED_TAGS)
diff --git a/ckan/public/base/javascript/modules/table-toggle-more.js b/ckan/public/base/javascript/modules/table-toggle-more.js
index 7a5c49c396a..1245b47dacb 100644
--- a/ckan/public/base/javascript/modules/table-toggle-more.js
+++ b/ckan/public/base/javascript/modules/table-toggle-more.js
@@ -4,7 +4,11 @@
this.ckan.module('table-toggle-more', function($) {
return {
/* options object can be extended using data-module-* attributes */
- options: {},
+ options: {
+ // (canada fork only): jquery issues fix
+ showLabel: null,
+ hideLabel: null
+ },
/* Initialises the module setting up elements and event listeners.
*
@@ -18,12 +22,21 @@ this.ckan.module('table-toggle-more', function($) {
if (rows) {
// How much is the colspan?
var cols = $('thead tr th', this.el).length;
+ // (canada fork only): jquery issues fix
+ let showLabel = this._('Show more');
+ if( this.options.showLabel != null ){
+ showLabel = this.options.showLabel;
+ }
+ let hideLabel = this._('Hide');
+ if( this.options.hideLabel != null ){
+ hideLabel = this.options.hideLabel;
+ }
var template_more = [
'
',
'| ',
'',
- '' + this._('Show more') + '',
- '' + this._('Hide') + '',
+ '' + showLabel + '',
+ '' + hideLabel + '',
'',
' | ',
'
'
@@ -35,11 +48,41 @@ this.ckan.module('table-toggle-more', function($) {
''
].join('\n');
- var seperator = $(template_seperator).insertAfter($('.toggle-more:last-child', this.el));
+ var seperator = $(template_seperator).insertAfter($('.toggle-more:last-child', this.el));
$(template_more).insertAfter(seperator);
-
- $('.show-more', this.el).on('click', this._onShowMore);
- $('.show-less', this.el).on('click', this._onShowLess);
+ // (canada fork only): jquery issues fix
+ function _bindButtons(_moduleElement){
+ $('.show-more', _moduleElement).off('click.Show');
+ $('.show-more', _moduleElement).on('click.Show', function(_event){
+ _event.preventDefault();
+ _moduleElement.removeClass('table-toggle-more').addClass('table-toggle-less');
+ });
+ $('.show-more', _moduleElement).off('keyup.Show');
+ $('.show-more', _moduleElement).on('keyup.Show', function(_event){
+ let keyCode = _event.keyCode ? _event.keyCode : _event.which;
+ // enter key required for a11y
+ if( keyCode == 13 ){
+ _event.preventDefault();
+ _moduleElement.removeClass('table-toggle-more').addClass('table-toggle-less');
+ }
+ });
+ $('.show-less', _moduleElement).off('click.Hide');
+ $('.show-less', _moduleElement).on('click.Hide', function(_event){
+ _event.preventDefault();
+ _moduleElement.removeClass('table-toggle-less').addClass('table-toggle-more');
+ });
+ $('.show-less', _moduleElement).off('keyup.Hide');
+ $('.show-less', _moduleElement).on('keyup.Hide', function(_event){
+ let keyCode = _event.keyCode ? _event.keyCode : _event.which;
+ // enter key required for a11y
+ if( keyCode == 13 ){
+ _event.preventDefault();
+ _moduleElement.removeClass('table-toggle-less').addClass('table-toggle-more');
+ }
+ });
+ }
+ _bindButtons(this.el);
+ setTimeout(_bindButtons, 500, this.el);
}
},
diff --git a/ckanext/datastore/backend/postgres.py b/ckanext/datastore/backend/postgres.py
index c2171d86fbe..bd407291c3d 100644
--- a/ckanext/datastore/backend/postgres.py
+++ b/ckanext/datastore/backend/postgres.py
@@ -28,8 +28,11 @@
from psycopg2.extras import register_default_json, register_composite
import distutils.version
+# (canada fork only): foreign keys
+# TODO: upstream contrib!!
from sqlalchemy.exc import (ProgrammingError, IntegrityError,
- DBAPIError, DataError, DatabaseError)
+ DBAPIError, DataError, DatabaseError,
+ InternalError)
import ckan.model as model
import ckan.plugins as plugins
@@ -59,6 +62,11 @@
'permission_denied': '42501',
'duplicate_table': '42P07',
'duplicate_alias': '42712',
+ # (canada fork only): foreign keys
+ # TODO: upstream contrib!!
+ 'no_unique_constraint': '42830',
+ 'row_referenced_constraint': '23503',
+ 'table_referenced_constraint': '2BP01'
}
_DATE_FORMATS = ['%Y-%m-%d',
@@ -317,6 +325,50 @@ def _get_fields(connection, resource_id):
return fields
+# (canada fork only): foreign keys
+# TODO: upstream contrib!!
+def _get_foreign_constraints(connection, resource_id, return_constraint_names=False):
+ u'''
+ return a list of foreign constraint names.
+ '''
+ # p.conrelid: The table this constraint is on
+ # p.conindid: The index supporting this constraint
+ # p.confrelid: The referenced table
+ # p.confupdtype: Foreign key update action code (#TODO: support `cascade`??)
+ # p.confdeltype: Foreign key deletion action code (#TODO: support `cascade`??)
+ # p.conkey: List of the constrained columns (int2, pg_attribute.attnun {The number of the column})
+ # p.confkey: List of the referenced columns (int2, pg_attribute.attnun {The number of the column})
+ #FIXME: this is just super broken.
+ foreign_constraints = {} if not return_constraint_names else []
+ foreign_constraints_sql = sqlalchemy.text(u'''
+ SELECT
+ p.conname as constraint_name,
+ c1.relname as foreign_table,
+ co1.column_name as foreign_column,
+ co2.column_name as primary_column
+ FROM pg_constraint p
+ LEFT JOIN pg_class c1 ON c1.oid = p.confrelid
+ LEFT JOIN pg_class c2 ON c2.oid = p.conrelid
+ LEFT JOIN information_schema.columns co1 ON
+ (SELECT relname FROM pg_class WHERE oid = p.confrelid) = co1.table_name
+ AND co1.ordinal_position = ANY(p.confkey)
+ LEFT JOIN information_schema.columns co2 ON
+ (SELECT relname FROM pg_class WHERE oid = p.conrelid) = co2.table_name
+ AND co2.ordinal_position = ANY(p.conkey)
+ WHERE p.contype = 'f'
+ AND c2.relname = '{0}'
+ ORDER BY p.oid;
+ '''.format(resource_id))
+ foreign_constraints_results = connection.execute(foreign_constraints_sql)
+ for result in foreign_constraints_results.fetchall():
+ if return_constraint_names:
+ foreign_constraints.append(result.constraint_name)
+ continue
+ foreign_constraints[result.foreign_table] = {}
+ foreign_constraints[result.foreign_table][result.primary_column] = result.foreign_column
+ return foreign_constraints
+
+
def _cache_types(connection):
if not _pg_types:
results = connection.execute(
@@ -582,6 +634,15 @@ def _generate_index_name(resource_id, field):
value = (resource_id + field).encode('utf-8')
return hashlib.sha1(value).hexdigest()
+# (canada fork only): foreign keys
+# TODO: upstream contrib!!
+def _generate_constraint_name(foreign_table, primary_fields, foreign_fields):
+ value = '{0}_{1}_{2}'.format(
+ '-'.join(k for k in primary_fields),
+ foreign_table,
+ '-'.join(v for v in foreign_fields)
+ ).encode('utf-8')
+ return 'fk_%s' % hashlib.sha1(value).hexdigest()
def _get_fts_index_method():
method = config.get('ckan.datastore.default_fts_index_method')
@@ -934,6 +995,32 @@ def create_table(context, data_dict, plugin_data):
sql_fields = u", ".join([u'{0} {1}'.format(
identifier(f['id']), f['type']) for f in fields])
+ # (canada fork only): foreign keys
+ # TODO: upstream contrib!!
+ foreign_keys = data_dict.get('foreign_keys')
+ foreign_fields = []
+ if foreign_keys:
+ for f_table_name, column_name_map in foreign_keys.items():
+ constraint_name = _generate_constraint_name(f_table_name, column_name_map.keys(), column_name_map.values())
+ log.debug('Trying to build foreign key constraint {0} for column(s) {1}({2}) on foreign column(s) {3}({4})'.format(
+ identifier(constraint_name),
+ identifier(data_dict['resource_id']),
+ ','.join(identifier(k) for k in column_name_map.keys()),
+ identifier(f_table_name),
+ ','.join(identifier(v) for v in column_name_map.values())
+ ))
+ #TODO: allow for deletion and update enum??
+ # might be able to do this just for upstream. for now, we can force restrict delete and cascade update.
+ # a = no action, r = restrict, c = cascade, n = set null, d = set default
+ foreign_fields.append('CONSTRAINT {0} FOREIGN KEY ({1}) REFERENCES {2}({3}) ON DELETE RESTRICT ON UPDATE RESTRICT'.format(
+ identifier(constraint_name),
+ ','.join(identifier(k) for k in column_name_map.keys()),
+ identifier(f_table_name),
+ ','.join(identifier(v) for v in column_name_map.values())
+ ))
+ if foreign_fields:
+ sql_fields += ", " + ", ".join(foreign_fields)
+
sql_string = u'CREATE TABLE {0} ({1});'.format(
identifier(data_dict['resource_id']),
sql_fields
@@ -976,6 +1063,11 @@ def alter_table(context, data_dict, plugin_data):
supplied_fields = data_dict.get('fields', [])
current_fields = _get_fields(
context['connection'], data_dict['resource_id'])
+ # (canada fork only): foreign keys
+ # TODO: upstream contrib!!
+ current_foreign_constraints = _get_foreign_constraints(
+ context['connection'], data_dict['resource_id'],
+ return_constraint_names=True)
if not supplied_fields:
supplied_fields = current_fields
check_fields(context, supplied_fields)
@@ -1025,6 +1117,42 @@ def alter_table(context, data_dict, plugin_data):
identifier(f['id']),
f['type']))
+ # (canada fork only): foreign keys
+ # TODO: upstream contrib!!
+ foreign_keys = data_dict.get('foreign_keys')
+ if foreign_keys is not None:
+ constraints = set()
+ for f_table_name, column_name_map in foreign_keys.items():
+ constraint_name = _generate_constraint_name(f_table_name, column_name_map.keys(), column_name_map.values())
+ constraints.add(constraint_name)
+ if constraint_name in current_foreign_constraints:
+ log.debug('Foreign key constraint {0} already exists, skipping...'.format(constraint_name))
+ continue
+ log.debug('Trying to build foreign key constraint {0} for column(s) {1}({2}) on foreign column(s) {3}({4})'.format(
+ identifier(constraint_name),
+ identifier(data_dict['resource_id']),
+ ','.join(identifier(k) for k in column_name_map.keys()),
+ identifier(f_table_name),
+ ','.join(identifier(v) for v in column_name_map.values())
+ ))
+ #TODO: allow for deletion and update enum??
+ # might be able to do this just for upstream. for now, we can force restrict delete and cascade update.
+ # a = no action, r = restrict, c = cascade, n = set null, d = set default
+ alter_sql.append('ALTER TABLE {0} ADD CONSTRAINT {1} FOREIGN KEY ({2}) REFERENCES {3}({4}) ON DELETE RESTRICT ON UPDATE RESTRICT;'.format(
+ identifier(data_dict['resource_id']),
+ identifier(constraint_name),
+ ','.join(identifier(k) for k in column_name_map.keys()),
+ identifier(f_table_name),
+ ','.join(identifier(v) for v in column_name_map.values())
+ ))
+ #TODO: test when upstream contrib.
+ remove_foreign_constraints = set(current_foreign_constraints) - constraints
+ for constraint_name in remove_foreign_constraints:
+ alter_sql.append('ALTER TABLE {0} DROP CONSTRAINT {1};'.format(
+ identifier(data_dict['resource_id']),
+ identifier(constraint_name)
+ ))
+
if plugin_data or any('info' in f for f in supplied_fields):
raw_field_info, _old = _get_raw_field_info(
context['connection'],
@@ -1537,21 +1665,28 @@ def upsert(context, data_dict):
Any error results in total failure! For now pass back the actual error.
Should be transactional.
'''
- backend = DatastorePostgresqlBackend.get_active_backend()
- engine = backend._get_write_engine()
- context['connection'] = engine.connect()
+ # (canada fork only): use contextual connection for deferring commits
+ #TODO: upstream contrib??
+ connection = context.get('connection', None)
+ trans = None
+ if not connection:
+ backend = DatastorePostgresqlBackend.get_active_backend()
+ engine = backend._get_write_engine()
+ connection = engine.connect()
+ trans = connection.begin()
timeout = context.get('query_timeout', _TIMEOUT)
- trans = context['connection'].begin()
try:
# check if table already existes
- context['connection'].execute(
+ connection.execute(
u'SET LOCAL statement_timeout TO {0}'.format(timeout))
- upsert_data(context, data_dict)
- if data_dict.get(u'dry_run', False):
- trans.rollback()
- else:
- trans.commit()
+ # (canada fork only): use contextual connection for deferring commits
+ upsert_data(dict(context, connection=connection), data_dict)
+ if trans: # (canada fork only): use contextual connection for deferring commits
+ if data_dict.get(u'dry_run', False):
+ trans.rollback()
+ else:
+ trans.commit()
return _unrename_json_field(data_dict)
except IntegrityError as e:
if e.orig.pgcode == _PG_ERR_CODE['unique_violation']:
@@ -1577,10 +1712,14 @@ def upsert(context, data_dict):
})
raise
except Exception as e:
- trans.rollback()
+ # (canada fork only): use contextual connection for deferring commits
+ if trans:
+ trans.rollback()
raise
finally:
- context['connection'].close()
+ # (canada fork only): use contextual connection for deferring commits
+ if not context.get('defer_commit') and not context.get('connection'):
+ connection.close()
def search(context, data_dict):
@@ -1912,6 +2051,8 @@ def delete(self, context, data_dict):
try:
# check if table exists
if 'filters' not in data_dict:
+ # (canada fork only): change CASCADE -> RESTRICT???
+ # TODO: discuss this for deleting foreign keys...
context['connection'].execute(
u'DROP TABLE "{0}" CASCADE'.format(
data_dict['resource_id'])
@@ -1921,6 +2062,22 @@ def delete(self, context, data_dict):
trans.commit()
return _unrename_json_field(data_dict)
+ # (canada fork only): foreign keys
+ # TODO: upstream contrib!!
+ #FIXME: flagged issue, referencing someone else's table will limit their table from the constraints.
+ except (IntegrityError, ProgrammingError, InternalError) as e:
+ trans.rollback()
+ if e.orig.pgcode == _PG_ERR_CODE['row_referenced_constraint'] or e.orig.pgcode == _PG_ERR_CODE['table_referenced_constraint']:
+ # FIXME: better error message??
+ raise ValidationError({
+ 'foreign_constraints': ['Cannot delete records or table'
+ ' because of a reference to another table.'],
+ 'info': {
+ 'orig': str(e.orig),
+ 'pgcode': e.orig.pgcode
+ }
+ })
+ raise
except Exception:
trans.rollback()
raise
@@ -1989,6 +2146,20 @@ def create(self, context, data_dict, plugin_data):
}
})
raise
+ # (canada fork only): foreign keys
+ # TODO: upstream contrib!!
+ except ProgrammingError as e:
+ if e.orig.pgcode == _PG_ERR_CODE['no_unique_constraint']:
+ # FIXME: better error message??
+ raise ValidationError({
+ 'foreign_constraints': ['Cannot insert records or create index'
+ ' because of a foreign constraint'],
+ 'info': {
+ 'orig': str(e.orig),
+ 'pgcode': e.orig.pgcode
+ }
+ })
+ raise
except DataError as e:
raise ValidationError({
'data': e.message,
@@ -2111,6 +2282,11 @@ def resource_fields(self, id):
aliases.append(alias[0])
info['meta']['aliases'] = aliases
+ # (canada fork only): foreign keys
+ # TODO: upstream contrib!!
+ #FIXME: fix later, multiple column foreign keys and ordered dict/lists?
+ #info['foreign_keys'] = _get_foreign_constraints(engine, id)
+
# get the data dictionary for the resource
with engine.connect() as conn:
data_dictionary = _result_fields(
@@ -2119,6 +2295,8 @@ def resource_fields(self, id):
None
)
+ # (canada fork only): foreign keys
+ # TODO: upstream contrib!!
schema_sql = sqlalchemy.text(u'''
SELECT
f.attname AS column_name,
@@ -2133,7 +2311,11 @@ def resource_fields(self, id):
WHEN p.contype = 'u' THEN True
WHEN p.contype = 'p' THEN True
ELSE False
- END AS uniquekey
+ END AS uniquekey,
+ CASE
+ WHEN p.contype = 'f' THEN True
+ ELSE False
+ END AS foreignkeys
FROM pg_attribute f
JOIN pg_class c ON c.oid = f.attrelid
JOIN pg_type t ON t.oid = f.atttypid
@@ -2153,11 +2335,14 @@ def resource_fields(self, id):
colname = row.column_name
if colname.startswith('_'): # Skip internal rows
continue
+ # (canada fork only): foreign keys
+ # TODO: upstream contrib!!
colinfo = {'native_type': row.native_type,
'notnull': row.notnull,
'index_name': row.index_name,
'is_index': row.is_index,
- 'uniquekey': row.uniquekey}
+ 'uniquekey': row.uniquekey,
+ 'foreignkeys': row.foreignkeys}
schemainfo[colname] = colinfo
for field in data_dictionary:
@@ -2214,7 +2399,10 @@ def create_function(name, arguments, rettype, definition, or_replace):
# validator OneOf checks for safety of argmode
argmode=a['argmode'] if 'argmode' in a else '',
argname=identifier(a['argname']),
- argtype=identifier(a['argtype']))
+ # (canada fork only): support literal type boolean
+ # TODO: upstream contrib!! make better with constant list of psql literals.
+ # FIXME: psql has some literals that do not work in double quotes.
+ argtype=identifier(a['argtype']) if a['argtype'] != 'boolean' else a['argtype'])
for a in arguments),
rettype=identifier(rettype),
definition=literal_string(definition))
diff --git a/ckanext/datastore/logic/action.py b/ckanext/datastore/logic/action.py
index f69101fbf70..320455352d9 100644
--- a/ckanext/datastore/logic/action.py
+++ b/ckanext/datastore/logic/action.py
@@ -65,6 +65,9 @@ def datastore_create(context, data_dict):
:type records: list of dictionaries
:param primary_key: fields that represent a unique key (optional)
:type primary_key: list or comma separated string
+ :param foreign_keys: tables and fields that represent foreign keys (optional)
+ :type foreign_keys: dict of table names and field ids, e.g. {'table_name':
+ ['field_id1', 'field_id2']}
:param indexes: indexes on table (optional)
:type indexes: list or comma separated string
:param triggers: trigger functions to apply to this table on update/insert.
@@ -346,6 +349,9 @@ def datastore_upsert(context, data_dict):
return result
+# (canada fork only): allow GET request
+# TODO: upstream contrib??
+@logic.side_effect_free
def datastore_info(context, data_dict):
'''
Returns detailed metadata about a resource.
@@ -376,6 +382,7 @@ def datastore_info(context, data_dict):
- is_index
- notnull
- uniquekey
+ - foreignkey
'''
backend = DatastoreBackend.get_active_backend()
diff --git a/ckanext/datastore/logic/schema.py b/ckanext/datastore/logic/schema.py
index fa696c2535b..54825a049bd 100644
--- a/ckanext/datastore/logic/schema.py
+++ b/ckanext/datastore/logic/schema.py
@@ -118,6 +118,9 @@ def datastore_create_schema():
'info': [ignore_missing],
},
'primary_key': [ignore_missing, list_of_strings_or_string],
+ # (canada fork only): foreign_keys
+ # TODO: upstream contrib!!
+ 'foreign_keys': [ignore_missing, dict_only],
'indexes': [ignore_missing, list_of_strings_or_string],
'triggers': {
'when': [
diff --git a/ckanext/datatablesview/blueprint.py b/ckanext/datatablesview/blueprint.py
index faa9021738f..160fdebd2f9 100644
--- a/ckanext/datatablesview/blueprint.py
+++ b/ckanext/datatablesview/blueprint.py
@@ -206,6 +206,7 @@ def filtered_download(resource_view_id):
u'filters': json.dumps(filters),
u'format': request.form[u'format'],
u'fields': u','.join(cols),
+ u'bom': True,
}))