in ees_sharepoint/deletion_sync_command.py [0:0]
def deindexing_items(self, collection, ids, key):
"""Fetches the id's of deleted items from the sharepoint server and
invokes delete documents api for those ids to remove them from
workplace search"""
logger = self.logger
delete_ids_items = ids["delete_keys"][collection].get(key)
logger.info(f"Deindexing {key}...")
if delete_ids_items:
delete_site = []
global_ids_items = ids["global_keys"][collection][key]
for site_url, item_details in delete_ids_items.items():
delete_list = []
for list_id, items in item_details.items():
doc = []
for item_id in items:
url = f"{site_url}/_api/web/lists(guid\'{list_id}\')/items"
resp = self.sharepoint_client.get(
url, f"?$filter= GUID eq \'{item_id}\'", "deindex")
if resp:
response = resp.json()
result = response.get('d', {}).get('results')
if resp.status_code == requests.codes['not_found'] or result == []:
doc.append(item_id)
if doc:
for chunk in split_list_into_buckets(doc, BATCH_SIZE):
self.workplace_search_custom_client.delete_documents(
document_ids=chunk)
updated_items = global_ids_items[site_url].get(list_id)
if updated_items is None:
continue
for updated_item_id in doc:
if updated_item_id in updated_items:
updated_items.remove(updated_item_id)
if updated_items == []:
delete_list.append(list_id)
for list_id in delete_list:
global_ids_items[site_url].pop(list_id)
if global_ids_items[site_url] == {}:
delete_site.append(site_url)
for site_url in delete_site:
global_ids_items.pop(site_url)
else:
logger.info("No %s found to be deleted for collection: %s" % (key, collection))
return ids