Skip to content

Commit

Permalink
Merge pull request #153 from marblestation/fix_reindex_docsPending
Browse files Browse the repository at this point in the history
Bugfix: new key for docsPending (used in reindex script)
  • Loading branch information
marblestation authored Mar 2, 2021
2 parents b2f6495 + 1f55cea commit 9d7c455
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 19 deletions.
29 changes: 16 additions & 13 deletions adsmp/tests/test_reindex.py

Large diffs are not rendered by default.

17 changes: 11 additions & 6 deletions scripts/reindex.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,11 @@ def get_solr_url(path):
update_url = get_solr_url('/collection2/update')
mbean_url = get_solr_url('/collection2/admin/mbeans?stats=true&wt=json')

def assert_different(dirname1, dirname2):
assert dirname1 != dirname2

def assert_same(dirname1, dirname2):
assert dirname1 == dirname2

def run():
# it is important that we do not run multiple times
Expand All @@ -60,12 +65,12 @@ def run():
if set(cores['status'].keys()) != set(['collection1', 'collection2']):
raise Exception('we dont have both cores available')

assert cores['status']['collection2']['dataDir'] != cores['status']['collection1']['dataDir']
assert_different(cores['status']['collection2']['dataDir'], cores['status']['collection1']['dataDir'])

logger.info('We are starting the indexing into collection2; once finished; we will automatically activate the new core')

logger.info('First, we will delete all documents from collection2')
r = r = requests.post(update_url, data={'commit': 'true', "delete":{"query":"*:*"}, 'waitSearcher': 'true'}, timeout=60*60)
r = requests.post(update_url, data={'commit': 'true', "delete":{"query":"*:*"}, 'waitSearcher': 'true'}, timeout=60*60)
r.raise_for_status()
logger.info('Done deleting all docs from collection2')

Expand Down Expand Up @@ -139,17 +144,17 @@ def run():

# verify the new core is loaded
new_cores = requests.get(cores_url + '?wt=json').json()
assert cores['status']['collection2']['dataDir'] == new_cores['status']['collection1']['dataDir']
assert_same(cores['status']['collection2']['dataDir'], new_cores['status']['collection1']['dataDir'])
logger.info('Verified the new collection is in place')


logger.info('Deleting the lock; congratulations on your new solr collection!')
os.remove(lockfile)
except Exception as e:
logger.error('Failed; we will keep the process permanently locked: %s' % (e,))
logger.exception('Failed; we will keep the process permanently locked')
data['last-exception'] = str(e)
write_lockfile(lockfile, data)
raise
sys.exit(1)


def execute(command, **kwargs):
Expand Down Expand Up @@ -208,7 +213,7 @@ def monitor_solr_writes():
beans = r.json()[u'solr-mbeans']
for bean in beans:
if type(bean) is dict and 'updateHandler' in bean:
current_docs_pending = bean['updateHandler']['stats']['docsPending']
current_docs_pending = bean['updateHandler']['stats']['UPDATE.updateHandler.docsPending']
if current_docs_pending == previous_docs_pending:
consecutive_match_count += 1
else:
Expand Down

0 comments on commit 9d7c455

Please sign in to comment.