Skip to content

Commit

Permalink
Merge pull request #455 from qld-gov-au/develop
Browse files Browse the repository at this point in the history
Develop to master - fix Solr replication
  • Loading branch information
ThrawnCA authored Aug 7, 2024
2 parents 3623906 + 2b06233 commit bdf59a3
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 13 deletions.
20 changes: 9 additions & 11 deletions files/default/solr-sync.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ set -x
BACKUP_NAME="$CORE_NAME-$(date +'%Y-%m-%dT%H:%M')"
SNAPSHOT_NAME="snapshot.$BACKUP_NAME"
LOCAL_SNAPSHOT="$LOCAL_DIR/$SNAPSHOT_NAME"
SYNC_SNAPSHOT="$SYNC_DIR/$SNAPSHOT_NAME"
SYNC_SNAPSHOT="$SYNC_DIR/${SNAPSHOT_NAME}.tgz"
MINUTE=$(date +%M)

function set_dns_primary () {
Expand Down Expand Up @@ -52,18 +52,18 @@ function export_snapshot () {
if [ "$REPLICATION_STATUS" != "0" ]; then
return $REPLICATION_STATUS
fi
sudo -u solr sh -c "$LUCENE_CHECK $LOCAL_SNAPSHOT && rsync -a --delete $LOCAL_SNAPSHOT/ $SYNC_SNAPSHOT/" || return 1
sh -c "$LUCENE_CHECK $LOCAL_SNAPSHOT && sudo -u solr tar --force-local --exclude=write.lock -czf $SYNC_SNAPSHOT -C $LOCAL_SNAPSHOT ." || return 1
}

function import_snapshot () {
# Give the master time to update the sync copy
for i in $(eval echo "{1..40}"); do
if [ -f "$SYNC_SNAPSHOT/write.lock" ]; then
sudo -u solr rm -r $LOCAL_DIR/snapshot.$CORE_NAME-*
sudo -u solr rsync -a --delete "$SYNC_SNAPSHOT/" "$LOCAL_SNAPSHOT/" || exit 1
rm $LOCAL_SNAPSHOT/write.lock
curl "$HOST/$CORE_NAME/replication?command=restore&location=$LOCAL_DIR&name=$BACKUP_NAME"
return 1
if [ -f "$SYNC_SNAPSHOT" ]; then
sudo service solr stop
sudo -u solr mkdir $LOCAL_DIR/index
rm $LOCAL_DIR/index/* && sudo -u solr tar -xzf "$SYNC_SNAPSHOT" -C $LOCAL_DIR/index || exit 1
sudo systemctl start solr
return 0
else
sleep 5
fi
Expand Down Expand Up @@ -100,9 +100,7 @@ if (/usr/local/bin/pick-solr-master.sh); then

# Hourly backup to S3
if [ "$MINUTE" = "00" ]; then
cd "$LOCAL_DIR"
tar --force-local -czf "$SNAPSHOT_NAME.tgz" "$SNAPSHOT_NAME"
aws s3 mv "$SNAPSHOT_NAME.tgz" "s3://$BUCKET/solr_backup/$CORE_NAME/" --expires $(date -d '30 days' --iso-8601=seconds)
aws s3 cp "$SYNC_SNAPSHOT" "s3://$BUCKET/solr_backup/$CORE_NAME/" --expires $(date -d '30 days' --iso-8601=seconds)
fi
else
# make traffic come to this instance only as a backup option
Expand Down
7 changes: 7 additions & 0 deletions recipes/ckanbatch-configure.rb
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,13 @@
group "root"
end

file "/etc/cron.daily/prune-health-checks" do
content "/usr/local/bin/pick-job-server.sh && find /data -maxdepth 1 -name '*-healthcheck_*' -mmin '+60' -execdir rm '{}' ';' >/dev/null 2>&1\n"
mode "0755"
owner "root"
group "root"
end

file "/etc/cron.d/ckan-worker" do
content "*/5 * * * * root /usr/local/bin/pick-job-server.sh && /usr/local/bin/ckan-monitor-job-queue.sh >/dev/null 2>&1\n"
mode '0644'
Expand Down
9 changes: 7 additions & 2 deletions recipes/solr-deploy.rb
Original file line number Diff line number Diff line change
Expand Up @@ -267,10 +267,15 @@
action [:stop]
end
bash "Copy latest index from EFS" do
user account_name
code <<-EOS
rsync -a --delete #{efs_data_dir}/ #{real_data_dir}/
LATEST_INDEX=`ls -dtr #{efs_data_dir}/data/#{core_name}/data/snapshot.* |tail -1`
rsync $LATEST_INDEX/ #{real_data_dir}/data/#{core_name}/data/index/
CORE_DATA="#{real_data_dir}/data/#{core_name}/data"
LATEST_INDEX=`ls -dtr $CORE_DATA/snapshot.* |tail -1`
if (echo "$LATEST_INDEX" |grep "[.]tgz$" >/dev/null 2>&1); then
mkdir -p "$CORE_DATA/index"
rm -f $CORE_DATA/index/*; tar -xzf "$LATEST_INDEX" -C $CORE_DATA/index
fi
EOS
only_if { ::File.directory? efs_data_dir }
end
Expand Down

0 comments on commit bdf59a3

Please sign in to comment.