Add sync script
This commit is contained in:
parent
1d1f866e66
commit
2f952bd9ee
2 changed files with 206 additions and 1 deletions
|
@ -3,4 +3,5 @@
|
|||
A collection of useful scripts that I use everyday
|
||||
|
||||
- getStarred.sh - Get starred articles from Miniflux and copy to clipboard. [Notes](https://www.alexhyett.com/notes/getting-starred-items-from-miniflux/)
|
||||
- unstar.sh - Unstar all starred articles on Miniflux. [Notes](https://www.alexhyett.com/notes/getting-starred-items-from-miniflux/)
|
||||
- unstar.sh - Unstar all starred articles on Miniflux. [Notes](https://www.alexhyett.com/notes/getting-starred-items-from-miniflux/)
|
||||
- syncBlog.sh - The script I use to sync my notes and articles from Obsidian to my website. Converting the links. [Notes](https://www.alexhyett.com/notes/syncing-my-obsidian-notes-with-my-blog/)
|
204
syncBlog.sh
Executable file
204
syncBlog.sh
Executable file
|
@ -0,0 +1,204 @@
|
|||
#!/bin/bash
|
||||
set -Eeuo pipefail
|
||||
|
||||
### USAGE ###
|
||||
#./syncBlog.sh --obsidian-dir "/path/to/obsidian" --blog-dir "/path/to/blog"
|
||||
###
|
||||
|
||||
# Get the directory of the script
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
# Default values
|
||||
DEFAULT_OBSIDIAN_DIR="/Users/alex/Notes"
|
||||
DEFAULT_BLOG_DIR="/Users/alex/Projects/alexhyett-blog"
|
||||
|
||||
# Parse command line arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--obsidian-dir)
|
||||
OBSIDIAN_DIR="$2"
|
||||
shift 2
|
||||
;;
|
||||
--blog-dir)
|
||||
BLOG_DIR="$2"
|
||||
shift 2
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option: $1"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Set directory paths, using defaults if not provided
|
||||
OBSIDIAN_DIR="${OBSIDIAN_DIR:-$DEFAULT_OBSIDIAN_DIR}"
|
||||
BLOG_DIR="${BLOG_DIR:-$DEFAULT_BLOG_DIR}"
|
||||
|
||||
WEBSITE_DIR="$OBSIDIAN_DIR/6. WEBSITE/"
|
||||
NOTES_DIR="$OBSIDIAN_DIR/5. GARDEN/"
|
||||
LAST_RUN_FILE="$SCRIPT_DIR/.last_run"
|
||||
|
||||
# Print the directories being used
|
||||
echo "Using Obsidian directory: $OBSIDIAN_DIR"
|
||||
echo "Using Blog directory: $BLOG_DIR"
|
||||
# Function to log messages with timestamps
|
||||
log() {
|
||||
echo "$(date '+%Y-%m-%d %H:%M:%S') $1"
|
||||
}
|
||||
|
||||
# Function to get the last run date
|
||||
get_last_run_date() {
|
||||
if [[ -f "$LAST_RUN_FILE" ]]; then
|
||||
cat "$LAST_RUN_FILE"
|
||||
else
|
||||
echo "1970-01-01T00:00:00"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to update the last run date
|
||||
update_last_run_date() {
|
||||
date +"%Y-%m-%dT%H:%M:%S" > "$LAST_RUN_FILE"
|
||||
}
|
||||
|
||||
# Function to check if a file has been updated since last run
|
||||
is_file_updated() {
|
||||
local file=$1
|
||||
local last_run=$(get_last_run_date)
|
||||
local updated=$(sed -n 's/^updated: //p' "$file")
|
||||
|
||||
if [[ -z "$updated" ]]; then
|
||||
return 0 # If no updated date, assume it needs processing
|
||||
fi
|
||||
|
||||
if [[ "$updated" > "$last_run" ]]; then
|
||||
return 0 # File is newer
|
||||
else
|
||||
return 1 # File is older
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to copy updated files
|
||||
copy_updated_files() {
|
||||
local source_dir=$1
|
||||
local dest_dir=$2
|
||||
local file_count=0
|
||||
|
||||
while IFS= read -r -d '' file; do
|
||||
if is_file_updated "$file"; then
|
||||
mkdir -p "$(dirname "$dest_dir/${file#$source_dir}")"
|
||||
cp "$file" "$dest_dir/${file#$source_dir}"
|
||||
((file_count++))
|
||||
fi
|
||||
done < <(find "$source_dir" -type f -name "*.md" -print0)
|
||||
|
||||
echo "$file_count"
|
||||
}
|
||||
|
||||
##### SCRIPT START #####
|
||||
mkdir -p $BLOG_DIR/_staging
|
||||
|
||||
# Copy updated website files to staging
|
||||
files=$(copy_updated_files "$WEBSITE_DIR" "$BLOG_DIR/_staging")
|
||||
if [[ "$files" != "0" ]]; then
|
||||
log "Transferred $files updated website files to staging"
|
||||
fi
|
||||
|
||||
# Copy updated notes files to staging
|
||||
files=$(copy_updated_files "$NOTES_DIR" "$BLOG_DIR/_staging/notes")
|
||||
if [[ "$files" != "0" ]]; then
|
||||
log "Transferred $files updated notes files to staging"
|
||||
fi
|
||||
|
||||
# Function to extract the permalink from YAML frontmatter
|
||||
extract_permalink() {
|
||||
local note_file=$1
|
||||
grep "^permalink:" "$note_file" | sed 's/^permalink: //'
|
||||
}
|
||||
|
||||
# Function to process each note file
|
||||
process_notes() {
|
||||
local note_file=$1
|
||||
local debug_file=${note_file/$BLOG_DIR\/_staging/""}
|
||||
local reference_notes_dir=$2
|
||||
local temp_file=$(mktemp)
|
||||
local modified=false
|
||||
|
||||
# Read through each line in the note file
|
||||
while IFS= read -r line || [[ -n "$line" ]]; do
|
||||
# Use a simpler regex pattern to find wiki links in the line
|
||||
link_regex='\[\[([^|[]+)(\|([^[]+))?\]\]'
|
||||
|
||||
# Replace links in the line
|
||||
new_line=""
|
||||
rest_of_line="$line"
|
||||
|
||||
while [[ $rest_of_line =~ $link_regex ]]; do
|
||||
full_match="${BASH_REMATCH[0]}"
|
||||
file_name="${BASH_REMATCH[1]}"
|
||||
display_name="${BASH_REMATCH[3]:-$file_name}"
|
||||
before="${rest_of_line%%"$full_match"*}"
|
||||
after="${rest_of_line#*"$full_match"}"
|
||||
|
||||
# Find the reference note file
|
||||
note_full_path=$(find "$reference_notes_dir" -type f -iname "$file_name.md" | head -n 1)
|
||||
|
||||
echo $note_full_path
|
||||
|
||||
replacement="$full_match" # Default replacement is the original link
|
||||
|
||||
# Extract the permalink if the reference note file exists
|
||||
if [[ -n $note_full_path ]]; then
|
||||
permalink=$(extract_permalink "$note_full_path")
|
||||
if [[ -n $permalink ]]; then
|
||||
log "$debug_file: Found permalink: $permalink"
|
||||
replacement="[$display_name]($permalink)"
|
||||
modified=true
|
||||
else
|
||||
log "$debug_file: No permalink found for: $note_full_path"
|
||||
fi
|
||||
else
|
||||
log "$debug_file: No matching file found for: $file_name"
|
||||
fi
|
||||
|
||||
new_line+="$before$replacement"
|
||||
rest_of_line="$after"
|
||||
done
|
||||
|
||||
new_line+="$rest_of_line"
|
||||
|
||||
# Write the processed line to the temp_file
|
||||
echo "$new_line" >> "$temp_file"
|
||||
done < "$note_file"
|
||||
|
||||
# If modifications were made, overwrite the original file; otherwise, delete the temp_file
|
||||
if $modified; then
|
||||
mv "$temp_file" "$note_file"
|
||||
else
|
||||
rm "$temp_file"
|
||||
fi
|
||||
}
|
||||
|
||||
log "Starting note processing"
|
||||
|
||||
# Recursively find and process each Markdown note in the directory
|
||||
find "$BLOG_DIR/_staging" -type f -name "*.md" | while IFS= read -r note_file; do
|
||||
process_notes "$note_file" "$BLOG_DIR/content"
|
||||
done
|
||||
|
||||
log "Finished note processing"
|
||||
|
||||
# Sync converted files to the blog directory
|
||||
output=$(rsync -zarvh --stats "$BLOG_DIR/_staging/" "$BLOG_DIR/content/" | grep -A 1 'files transferred')
|
||||
files=$(echo "$output" | awk '/files transferred/{print $NF}')
|
||||
|
||||
if [[ "$files" != "0" ]]; then
|
||||
log "Transferred $files files to blog"
|
||||
fi
|
||||
|
||||
# Delete _staging dir
|
||||
rm -rf $BLOG_DIR/_staging
|
||||
|
||||
# Update the last run date
|
||||
update_last_run_date
|
||||
|
||||
log "Script completed successfully"
|
Loading…
Reference in a new issue