Skip to content

Instantly share code, notes, and snippets.

@jskuse
Created May 17, 2014 11:19
Show Gist options
  • Save jskuse/a42e0d1b35634fe0c900 to your computer and use it in GitHub Desktop.
Save jskuse/a42e0d1b35634fe0c900 to your computer and use it in GitHub Desktop.
Ruby class for game developers etc. who want to use dropbox for synchronising big assets with a tiny bit of safety. Uses a cache of hashes to detect conflicts etc. and prompt accordingly. Useful for integrating into Rake tasks.
require "json"
require "digest/md5"
require "fileutils"
class DirectoryCommit
def initialize(working_path, sync_path, ignore = nil)
@hash_file = '.filehashes-' + working_path.gsub(/[^\w]/, '')
@working_path = working_path
@sync_path = sync_path
@ignore = ignore
end
def push
return DirectorySync.sync(@working_path, @sync_path, @hash_file, @ignore)
end
def pull
return DirectorySync.sync(@sync_path, @working_path, @hash_file, @ignore)
end
def status
return DirectorySync.changes(@working_path, @hash_file, @ignore)
end
def self.changes(path, hash_file, ignore = nil)
path = add_trailing_slash(path)
ignore = Regexp.union(ignore) if ignore
synced_file_hashes = load_hashes(hash_file)
path_hashes = get_hashes(path, ignore)
return get_changes(synced_file_hashes, path_hashes)
end
def self.sync(source, destination, hash_file, ignore = nil)
source = add_trailing_slash(source)
destination = add_trailing_slash(destination)
ignore = Regexp.union(ignore) if ignore
synced_file_hashes = load_hashes(hash_file)
updated_hashes = merge(source, destination, synced_file_hashes, ignore)
store_hashes(hash_file, updated_hashes)
updated_hashes
end
private
def self.add_trailing_slash(path)
path << '/' unless path.end_with?('/')
path
end
def self.get_hashes(directory, ignore = nil)
hashes = {}
assets = Dir.glob(directory + '**/*')
assets.each do |file|
if not File.directory? file and (ignore == nil or not file.match(ignore))
hashes[file.sub(directory, '')] = Digest::MD5.file(file).hexdigest
end
end
hashes
end
def self.store_hashes(file, hashes)
json = JSON.pretty_generate(hashes)
file = File.open(file, 'w')
file.write(json)
file.close
end
def self.load_hashes(file)
begin
JSON.parse(File.read(file))
rescue
{}
end
end
def self.pretty_print_changes(changes)
if changes.length == 0
puts 'No changes.'
else
changes.each do |file, change_data|
puts change_data[:change].to_s + ': ' + file
end
end
end
def self.get_changes(old_hashes, new_hashes)
changes = {}
# we start off assuming that everything has been potentially deleted
deleted_file_hashes = old_hashes.clone
new_hashes.each do |file, new_hash|
# this file still exists, so remove from potentially deleted file list
deleted_file_hashes.delete(file)
old_hash = old_hashes[file]
# no change
next if new_hash == old_hash
# added if there was no hash for this previously, modified otherwise
change = ! old_hash ? :added : :modified
changes[file] = { :change => change, :old_hash => old_hash, :new_hash => new_hash }
end
deleted_file_hashes.each do |deleted_file, deleted_hash|
changes[deleted_file] = { :change => :deleted, :old_hash => deleted_hash, :new_hash => nil }
end
changes
end
def self.print_no_changes
puts "Nothing to merge."
end
def self.merge(source, destination, synced_file_hashes, ignore = nil, dry_run = false)
actions = {}
updated_synced_file_hashes = synced_file_hashes.clone
source_hashes = get_hashes(source, ignore)
source_changes = get_changes(synced_file_hashes, source_hashes)
# if there are changes, then we'll need a list of what's changed at the other end
if (source_changes.length != 0)
destination_hashes = get_hashes(destination, ignore)
destination_changes = get_changes(synced_file_hashes, destination_hashes)
end
file_change_count = 0
source_changes.each do |file, change_data|
source_file = source + file
destination_file = destination + file
# the source and the destination have changes, a likely conflict...
if destination_changes.has_key? (file)
destination_change_data = destination_changes[file]
# ...unless their hashes are the same, in which case something odd is going on (user has been copying and pasting files manually etc)
if change_data[:new_hash] == destination_change_data[:new_hash]
updated_synced_file_hashes[file] = change_data[:new_hash] if (! dry_run)
next
end
# ask the user what they want to do about the conflict
while (true)
puts "File '#{file}' is conflicted. Trying to overwrite a version that has been been '#{destination_change_data[:change]}' with one that has been '#{change_data[:change]}'.\n(s)kip or (o)verwrite?"
input = STDIN.gets.chomp
break if input == 's' || input == 'o'
end
next if input == 's'
end
# finally, either copy file over or delete as required
case change_data[:change]
when :added, :modified
puts 'Copy: #{source_file} -> #{destination_file}'
file_change_count += 1
if (! dry_run)
FileUtils.mkdir_p(File.dirname(destination_file))
FileUtils.cp(source_file, destination_file)
updated_synced_file_hashes[file] = change_data[:new_hash]
end
when :deleted
puts "Delete: #{destination_file}"
file_change_count += 1
if (! dry_run)
FileUtils.rm(destination_file)
updated_synced_file_hashes.delete(file)
end
end
end
print_no_changes if file_change_count == 0
updated_synced_file_hashes
end
end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment