def _update_indexes()

in Allura/allura/command/show_models.py [0:0]


    def _update_indexes(self, collection, indexes):
        uindexes = {
            # convert list to tuple so it's hashable for 'set'
            tuple(i.index_spec): i
            for i in indexes
            if i.unique}
        indexes = {
            tuple(i.index_spec): i
            for i in indexes
            if not i.unique}
        prev_indexes = {}
        prev_uindexes = {}
        unique_flag_drop = {}
        unique_flag_add = {}
        try:
            existing_indexes = collection.index_information().items()
        except OperationFailure:
            # exception is raised if db or collection doesn't exist yet
            existing_indexes = {}
        for iname, fields in existing_indexes:
            if iname == '_id_':
                continue
            keys = tuple(fields['key'])
            if fields.get('unique'):
                if keys in indexes:
                    unique_flag_drop[iname] = keys
                else:
                    prev_uindexes[iname] = keys
            else:
                if keys in uindexes:
                    unique_flag_add[iname] = keys
                else:
                    prev_indexes[iname] = keys

        for iname, keys in unique_flag_drop.items():
            self._recreate_index(collection, iname, list(keys), unique=False)
        for iname, keys in unique_flag_add.items():
            self._recreate_index(collection, iname, list(keys), unique=True)

        # Ensure all indexes
        for keys, idx in uindexes.items():
            base.log.info('...... ensure %s:%s', collection.name, idx)
            while True:  # loop in case de-duping takes multiple attempts
                index_options = idx.index_options.copy()
                if idx.fields == ('_id',):
                    # as of mongo 3.4 _id fields can't have these options set
                    # _id is always non-sparse and unique anyway
                    del index_options['sparse']
                    del index_options['unique']
                try:
                    collection.create_index(idx.index_spec, **index_options)
                    break
                except DuplicateKeyError as err:
                    if self.options.delete_dupes:
                        base.log.warning('Found dupe key(%s), eliminating dupes', err)
                        self._remove_dupes(collection, idx.index_spec, index_options)
                    else:
                        print('Error creating unique index.  Run with --delete-duplicate-key-records if you want to delete records that violate this index', file=sys.stderr)
                        raise
        for keys, idx in indexes.items():
            base.log.info('...... ensure %s:%s', collection.name, idx)
            collection.create_index(idx.index_spec, background=True, **idx.index_options)
        # Drop obsolete indexes
        for iname, keys in prev_indexes.items():
            if keys not in indexes:
                if self.options.clean:
                    base.log.info('...... drop index %s:%s', collection.name, iname)
                    collection.drop_index(iname)
                else:
                    base.log.info('...... potentially unneeded index, could be removed by running with --clean %s:%s',
                                  collection.name, iname)
        for iname, keys in prev_uindexes.items():
            if keys not in uindexes:
                if self.options.clean:
                    base.log.info('...... drop index %s:%s', collection.name, iname)
                    collection.drop_index(iname)
                else:
                    base.log.info('...... potentially unneeded index, could be removed by running with --clean %s:%s',
                                  collection.name, iname)