Recent changes to this wiki:

Get rid of mysterious "_why_"
diff --git a/doc/git-annex.mdwn b/doc/git-annex.mdwn
index 140a5aa..dfd0b6b 100644
--- a/doc/git-annex.mdwn
+++ b/doc/git-annex.mdwn
@@ -31,7 +31,7 @@ content from the key-value store.
 # EXAMPLES
 
 	# git annex get video/hackity_hack_and_kaxxt.mov
-	get video/_why_hackity_hack_and_kaxxt.mov (not available)
+	get video/hackity_hack_and_kaxxt.mov (not available)
 	  I was unable to access these remotes: server
 	  Try making some of these repositories available:
 	  	5863d8c0-d9a9-11df-adb2-af51e6559a49  -- my home file server
diff --git a/doc/walkthrough/transferring_files:_When_things_go_wrong.mdwn b/doc/walkthrough/transferring_files:_When_things_go_wrong.mdwn
index cfb70aa..d3db8c3 100644
--- a/doc/walkthrough/transferring_files:_When_things_go_wrong.mdwn
+++ b/doc/walkthrough/transferring_files:_When_things_go_wrong.mdwn
@@ -5,7 +5,7 @@ or file server is not accessible, it will let you know what it needs to get
 it:
 
 	# git annex get video/hackity_hack_and_kaxxt.mov
-	get video/_why_hackity_hack_and_kaxxt.mov (not available)
+	get video/hackity_hack_and_kaxxt.mov (not available)
 	  Unable to access these remotes: usbdrive, server
 	  Try making some of these repositories available:
 	  	5863d8c0-d9a9-11df-adb2-af51e6559a49  -- my home file server

Added a comment
diff --git a/doc/bugs/ssh-options_seems_to_be_ignored/comment_2_77af42bd6017d9fa9d356014e62d8bcc._comment b/doc/bugs/ssh-options_seems_to_be_ignored/comment_2_77af42bd6017d9fa9d356014e62d8bcc._comment
new file mode 100644
index 0000000..7d4489c
--- /dev/null
+++ b/doc/bugs/ssh-options_seems_to_be_ignored/comment_2_77af42bd6017d9fa9d356014e62d8bcc._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="https://openid.stackexchange.com/user/3ee5cf54-f022-4a71-8666-3c2b5ee231dd"
+ nickname="Anthony DeRobertis"
+ subject="comment 2"
+ date="2014-12-19T21:14:30Z"
+ content="""
+It does manage to pass the ControlMaster, etc. options there, so could the ssh-options be added? Or if backwards-compatibility is a worry, another option be added for that? I guess it's possible someone would want different SSH options for bulk data transfer.
+
+It would be nice to be able to pass that `-i` option.
+"""]]

add news item for git-annex 5.20141219
diff --git a/doc/news/version_5.20141013.mdwn b/doc/news/version_5.20141013.mdwn
deleted file mode 100644
index d4eb62f..0000000
--- a/doc/news/version_5.20141013.mdwn
+++ /dev/null
@@ -1,7 +0,0 @@
-git-annex 5.20141013 released with [[!toggle text="these changes"]]
-[[!toggleable text="""
-   * Adjust cabal file to support building w/o assistant on the hurd.
-   * Support building with yesod 1.4.
-   * S3: Fix embedcreds=yes handling for the Internet Archive.
-   * map: Handle .git prefixed remote repos. Closes: #[614759](http://bugs.debian.org/614759)
-   * repair: Prevent auto gc from happening when fetching from a remote."""]]
\ No newline at end of file
diff --git a/doc/news/version_5.20141219.mdwn b/doc/news/version_5.20141219.mdwn
new file mode 100644
index 0000000..74d228c
--- /dev/null
+++ b/doc/news/version_5.20141219.mdwn
@@ -0,0 +1,20 @@
+git-annex 5.20141219 released with [[!toggle text="these changes"]]
+[[!toggleable text="""
+   * Webapp: When adding a new box.com remote, use the new style chunking.
+     Thanks, Jon Ander Peñalba.
+   * External special remote protocol now includes commands for setting
+     and getting the urls associated with a key.
+   * Urls can now be claimed by remotes. This will allow creating,
+     for example, a external special remote that handles magnet: and
+     *.torrent urls.
+   * Use wget -q --show-progress for less verbose wget output,
+     when built with wget 1.16.
+   * Added bittorrent special remote.
+   * addurl behavior change: When downloading an url ending in .torrent,
+     it will download files from bittorrent, instead of the old behavior
+     of adding the torrent file to the repository.
+   * Added Recommends on aria2.
+   * When possible, build with the haskell torrent library for parsing
+     torrent files. As a fallback, can instead use btshowmetainfo from
+     bittornado | bittorrent.
+   * Fix build with -f-S3."""]]
\ No newline at end of file

Fix build with -f-S3.
diff --git a/Remote/Helper/AWS.hs b/Remote/Helper/AWS.hs
index d27f2aa..17e1a29 100644
--- a/Remote/Helper/AWS.hs
+++ b/Remote/Helper/AWS.hs
@@ -5,21 +5,19 @@
  - Licensed under the GNU GPL version 3 or higher.
  -}
 
-{-# LANGUAGE OverloadedStrings, TupleSections #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TupleSections #-}
 
 module Remote.Helper.AWS where
 
 import Common.Annex
 import Creds
 
-import qualified Aws
-import qualified Aws.S3 as S3
 import qualified Data.Map as M
 import qualified Data.ByteString as B
 import qualified Data.Text as T
 import Data.Text.Encoding (encodeUtf8)
 import Data.Text (Text)
-import Data.IORef
 
 creds :: UUID -> CredPairStorage
 creds u = CredPairStorage
@@ -28,13 +26,6 @@ creds u = CredPairStorage
 	, credPairRemoteKey = Just "s3creds"
 	}
 
-genCredentials :: CredPair -> IO Aws.Credentials
-genCredentials (keyid, secret) = Aws.Credentials
-	<$> pure (encodeUtf8 (T.pack keyid))
-	<*> pure (encodeUtf8 (T.pack secret))
-	<*> newIORef []
-	<*> pure Nothing
-
 data Service = S3 | Glacier
 	deriving (Eq)
 
@@ -82,7 +73,3 @@ s3HostName r = encodeUtf8 $ T.concat ["s3-", r, ".amazonaws.com"]
 
 s3DefaultHost :: String
 s3DefaultHost = "s3.amazonaws.com"
-
-mkLocationConstraint :: Region -> S3.LocationConstraint
-mkLocationConstraint "US" = S3.locationUsClassic
-mkLocationConstraint r = r
diff --git a/Remote/S3.hs b/Remote/S3.hs
index f2ee884..1a6e410 100644
--- a/Remote/S3.hs
+++ b/Remote/S3.hs
@@ -6,6 +6,7 @@
  -}
 
 {-# LANGUAGE TypeFamilies #-}
+{-# LANGUAGE OverloadedStrings #-}
 {-# LANGUAGE CPP #-}
 
 module Remote.S3 (remote, iaHost, configIA, iaItemUrl) where
@@ -26,6 +27,7 @@ import Network.HTTP.Types
 import Control.Monad.Trans.Resource
 import Control.Monad.Catch
 import Data.Conduit
+import Data.IORef
 
 import Common.Annex
 import Types.Remote
@@ -308,7 +310,7 @@ genBucket c u = do
 				showAction $ "creating bucket in " ++ datacenter
 				void $ sendS3Handle h $
 					S3.PutBucket (bucket $ hinfo h) Nothing $
-						AWS.mkLocationConstraint $
+						mkLocationConstraint $
 							T.pack datacenter
 		writeUUIDFile c u h
 	
@@ -391,7 +393,7 @@ sendS3Handle' h = AWS.pureAws (hawscfg h) (hs3cfg h) (hmanager h)
 withS3Handle :: RemoteConfig -> UUID -> S3Info -> (S3Handle -> Annex a) -> Annex a
 withS3Handle c u info a = do
 	creds <- getRemoteCredPairFor "S3" c (AWS.creds u)
-	awscreds <- liftIO $ AWS.genCredentials $ fromMaybe nocreds creds
+	awscreds <- liftIO $ genCredentials $ fromMaybe nocreds creds
 	let awscfg = AWS.Configuration AWS.Timestamp awscreds (AWS.defaultLog AWS.Error)
 	bracketIO (newManager httpcfg) closeManager $ \mgr -> 
 		a $ S3Handle mgr awscfg s3cfg info
@@ -505,3 +507,14 @@ iaKeyUrl :: Remote -> Key -> URLString
 iaKeyUrl r k = "http://archive.org/download/" ++ b ++ "/" ++ getBucketObject (config r) k
   where
 	b = fromMaybe "" $ getBucketName $ config r
+
+genCredentials :: CredPair -> IO AWS.Credentials
+genCredentials (keyid, secret) = AWS.Credentials
+	<$> pure (T.encodeUtf8 (T.pack keyid))
+	<*> pure (T.encodeUtf8 (T.pack secret))
+	<*> newIORef []
+	<*> pure Nothing
+
+mkLocationConstraint :: AWS.Region -> S3.LocationConstraint
+mkLocationConstraint "US" = S3.locationUsClassic
+mkLocationConstraint r = r
diff --git a/debian/changelog b/debian/changelog
index ff421ae..3f7db39 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -17,6 +17,7 @@ git-annex (5.20141204) UNRELEASED; urgency=medium
   * When possible, build with the haskell torrent library for parsing
     torrent files. As a fallback, can instead use btshowmetainfo from
     bittornado | bittorrent.
+  * Fix build with -f-S3.
 
  -- Joey Hess <id@joeyh.name>  Fri, 05 Dec 2014 13:42:08 -0400
 
diff --git a/doc/bugs/Build_error_when_S3_is_disabled.mdwn b/doc/bugs/Build_error_when_S3_is_disabled.mdwn
index 9b72afe..2ae3865 100644
--- a/doc/bugs/Build_error_when_S3_is_disabled.mdwn
+++ b/doc/bugs/Build_error_when_S3_is_disabled.mdwn
@@ -35,3 +35,5 @@ I'm installing dependencies with cabal but have disabled S3 support
     cabal configure "${_features[@]}"
 
     make
+
+> [[fixed|done]] --[[Joey]]

Added a comment
diff --git a/doc/bugs/ssh-options_seems_to_be_ignored/comment_1_bd9062bd492a36f54883fa3635b1c35f._comment b/doc/bugs/ssh-options_seems_to_be_ignored/comment_1_bd9062bd492a36f54883fa3635b1c35f._comment
new file mode 100644
index 0000000..c46c10d
--- /dev/null
+++ b/doc/bugs/ssh-options_seems_to_be_ignored/comment_1_bd9062bd492a36f54883fa3635b1c35f._comment
@@ -0,0 +1,7 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ subject="comment 1"
+ date="2014-12-19T20:41:58Z"
+ content="""
+The ssh-options are used when git-annex is running git-annex-shell on the remote (eg, when transferring data). However, `git annex sync` does a `git pull` and `git push`, and so it doesn't pass on the ssh-options there.
+"""]]

diff --git a/doc/forum/s3_server_side_encryption.mdwn b/doc/forum/s3_server_side_encryption.mdwn
new file mode 100644
index 0000000..f7b6492
--- /dev/null
+++ b/doc/forum/s3_server_side_encryption.mdwn
@@ -0,0 +1,9 @@
+AWS S3 offers a feature to enable server-side encryption of files.  
+If I understand correctly, this is enabled by sending a specific HTTP header with the request to upload the file in question.   
+So, this header needs to be set every time we want to upload a new file.  
+
+Is this feature already supported / being considered for future versions?
+
+If not, am I correct in assuming it would have to be implemented in https://github.com/joeyh/git-annex/blob/master/Remote/S3.hs ?
+
+Thank you

diff --git a/doc/forum/How_to_hide_broken_symlinks.mdwn b/doc/forum/How_to_hide_broken_symlinks.mdwn
new file mode 100644
index 0000000..7c92f32
--- /dev/null
+++ b/doc/forum/How_to_hide_broken_symlinks.mdwn
@@ -0,0 +1,45 @@
+This is a method for hiding broken links using git-annex views.
+
+Each annex will need it's own name for this system to work. For this example I'll use "localdrive." After getting file content, run:
+
+    git-annex metadata --not --in=here --metadata in=localdrive . -s in-=localdrive
+    git-annex metadata --in=here --not --metadata in=localdrive . -s in+=localdrive
+    git-annex view /=*
+    git-annex vfilter in=localdrive
+
+Unused links will be hidden. Folder structures will remain the same.
+
+To switch back use:
+
+    git-annex vpop 2
+
+Because this is a lot to type, I've placed these in a bash script in the base folder (ignored with .gitignore so it isn't sent to other repos). The local repo name can be changed by editing THISREPO:
+
+    #!/bin/bash
+    
+    THISREPO='localdrive'
+    
+    git-annex metadata --not --in=here --metadata in=$THISREPO . -s in-=$THISREPO
+    git-annex metadata --in=here --not --metadata in=$THISREPO . -s in+=$THISREPO
+    git-annex view /=*
+    git-annex vfilter in=$THISREPO
+    
+    exit 0
+
+## Hiding Broken Links in Preferred Content Repos
+
+If you have a repo with preferred content settings, this can be shortened to a single script which can be run to "refresh" the view:
+
+    #!/bin/bash
+    
+    THISREPO='pcrepo'
+    
+    git-annex vpop 2
+    git-annex sync
+    git-annex get --auto
+    git-annex metadata --not --in=here --metadata in=$THISREPO . -s in-=$THISREPO
+    git-annex metadata --in=here --not --metadata in=$THISREPO . -s in+=$THISREPO
+    git-annex view /=*
+    git-annex vfilter in=$THISREPO
+    
+    exit 0

Added a comment: typical repos
diff --git a/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_8_884a0b9571544a95fad55cb5fc5963d8._comment b/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_8_884a0b9571544a95fad55cb5fc5963d8._comment
new file mode 100644
index 0000000..3032f16
--- /dev/null
+++ b/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_8_884a0b9571544a95fad55cb5fc5963d8._comment
@@ -0,0 +1,15 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawnx8kHW66N3BqmkVpgtXDlYMvr8TJ5VvfY"
+ nickname="Yaroslav"
+ subject="typical repos"
+ date="2014-12-19T14:05:22Z"
+ content="""
+Thanks for doing it and asking for detail!!!
+Repositories will vary quite a bit.  I am currently testing how big we could actually make them (see https://github.com/datalad/datalad/issues/17) ;)
+
+Meanwhile here are sample few available for git clone/testing:
+
+https://github.com/datalad/nih--videocast   a good collection of heavyish video files
+http://psydata.ovgu.de/forrest_gump/.git/   a good single dataset with probably a somewhat typical amount of data
+http://data.pymvpa.org/datasets/haxby2001/.git/  relatively small dataset with typical data sizes
+"""]]

Added a comment: Solved
diff --git a/doc/forum/Git_Annex_not_dropping_unused_content/comment_1_da47b6af512b19cba077499f41455189._comment b/doc/forum/Git_Annex_not_dropping_unused_content/comment_1_da47b6af512b19cba077499f41455189._comment
new file mode 100644
index 0000000..d21648c
--- /dev/null
+++ b/doc/forum/Git_Annex_not_dropping_unused_content/comment_1_da47b6af512b19cba077499f41455189._comment
@@ -0,0 +1,23 @@
+[[!comment format=mdwn
+ username="ghen1"
+ subject="Solved"
+ date="2014-12-19T12:56:55Z"
+ content="""
+I've figured it out:
+
+In the annex in which the view was made (in my case *sdrive*), the views must be deleted with single-quotes:
+
+    git branch -D 'views/(added=14_09);(tag=Shared)'
+    git branch -D 'views/(added=14_09);Images_=_'
+    git branch -D 'views/added=_'
+
+In the connected annexes, for which *sdrive* is a remote, this command removes the remote branches:
+
+    git fetch -p sdrive
+
+     x [deleted]         (none)     -> sdrive/views/(added=14_09);(tag=Shared)
+     x [deleted]         (none)     -> sdrive/views/(added=14_09);Images_=_
+     x [deleted]         (none)     -> sdrive/views/added=_
+
+After these steps *git annex* recognized old files as unused.
+"""]]

Added a comment
diff --git a/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_7_6202ae898f24b3e02bc343d0fd2ac35a._comment b/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_7_6202ae898f24b3e02bc343d0fd2ac35a._comment
new file mode 100644
index 0000000..127fe77
--- /dev/null
+++ b/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_7_6202ae898f24b3e02bc343d0fd2ac35a._comment
@@ -0,0 +1,9 @@
+[[!comment format=mdwn
+ username="CandyAngel"
+ subject="comment 7"
+ date="2014-12-19T08:40:06Z"
+ content="""
+Okie dokie, I'll see what I can do.
+
+Can you give me an idea of the annex file properties (file size, count, files per directory, directory count) etc. please?
+"""]]

diff --git a/doc/bugs/ssh-options_seems_to_be_ignored.mdwn b/doc/bugs/ssh-options_seems_to_be_ignored.mdwn
new file mode 100644
index 0000000..facf36d
--- /dev/null
+++ b/doc/bugs/ssh-options_seems_to_be_ignored.mdwn
@@ -0,0 +1,45 @@
+### Please describe the problem.
+The docs say I can set ssh options via `annex.ssh-options` or `remote.NAME.annex-ssh-options`. I tried, and the setting appears to be completely ignored.
+
+Apologies in advance if I've made a stupid typo.
+
+### What steps will reproduce the problem?
+
+I tried all of these:
+
+    git config --local --replace-all annex.ssh-options "-i ~/.ssh/id_git_rsa"
+    git config --local --replace-all remote.Watt.annex-ssh-options "-i ~/.ssh/id_git_rsa"
+
+`git config -l | grep Watt` confirms it took:
+
+    remote.Watt.url=ssh://watt.home/home/anthony/Music/
+    remote.Watt.fetch=+refs/heads/*:refs/remotes/Watt/*
+    remote.Watt.annex-uuid=e74b57e5-e78c-4f3d-bde6-4803a0c33837
+    remote.Watt.annex-ssh-options=-i ~/.ssh/id_git_rsa
+
+Then I ran `git annex sync Watt`, and was prompted for a password:
+
+    anthony@Forest:~/Music$ git annex sync Watt
+    commit  ok
+    pull Watt 
+    Password: 
+
+Running `ps ww $(pidof ssh)` shows that the `-i` option is missing:
+
+      PID TTY      STAT   TIME COMMAND
+    22188 pts/4    S+     0:00 ssh -S .git/annex/ssh/watt.home -o ControlMaster=auto -o ControlPersist=yes watt.home git-upload-pack '/home/anthony/Music/'
+
+
+### What version of git-annex are you using? On what operating system?
+
+Debian testing 5.20141125
+
+### Please provide any additional information below.
+
+[[!format sh """
+# If you can, paste a complete transcript of the problem occurring here.
+# If the problem is with the git-annex assistant, paste in .git/annex/daemon.log
+
+
+# End of transcript or log.
+"""]]

Added a comment: +1
diff --git a/doc/bugs/__91__Android__93___5.0_needs_PIE_executables___40__git_annex_does_not_work_on_android_5.0__41__/comment_2_43bb5bb481f54764d355dfac536daba6._comment b/doc/bugs/__91__Android__93___5.0_needs_PIE_executables___40__git_annex_does_not_work_on_android_5.0__41__/comment_2_43bb5bb481f54764d355dfac536daba6._comment
new file mode 100644
index 0000000..cb171e6
--- /dev/null
+++ b/doc/bugs/__91__Android__93___5.0_needs_PIE_executables___40__git_annex_does_not_work_on_android_5.0__41__/comment_2_43bb5bb481f54764d355dfac536daba6._comment
@@ -0,0 +1,7 @@
+[[!comment format=mdwn
+ username="ztzg"
+ subject="+1"
+ date="2014-12-19T07:39:20Z"
+ content="""
+Same here.  Nexus 7 (2013), Android 5.0.1.  It looks like non-PIE executables are not coming back.
+"""]]

diff --git a/doc/bugs/git_annex_remotedaemon_fails_to_connect_after_several_LOSTNET_messages_in_a_row.mdwn b/doc/bugs/git_annex_remotedaemon_fails_to_connect_after_several_LOSTNET_messages_in_a_row.mdwn
new file mode 100644
index 0000000..e584064
--- /dev/null
+++ b/doc/bugs/git_annex_remotedaemon_fails_to_connect_after_several_LOSTNET_messages_in_a_row.mdwn
@@ -0,0 +1,73 @@
+### Please describe the problem.
+
+git annex remotedaemon fails to reconnect after receiving several LOSTNET messages and RESUME.
+
+Initially I've encountered the issue by noticing that git annex assistant stops updating local working copy after suspend/resume of laptop. Later I've found out that the same issue happens when disconnecting from network via NetworkManager and connecting back.
+
+After some experiments with git annex remotedaemon I've found out that it incorrectly handles several LOSTNET messages in a row. After RESUME remotedaemon starts new SSH process and sends DISCONNECTED right away. Also it doesn't try to reconnect with backoff as it does with bad connection.
+
+I've checked sources and with some logging found that it sends DISCONNECTED because of some kind of queueing of LOSTNET and switching to stopped mode right after RESUME. Unfortunately I don't know haskell to fix it myself.
+
+### What steps will reproduce the problem?
+
+[[!format sh """
+% git annex remotedaemon --debug
+[2014-12-19 11:07:13 NOVT] read: git ["config","--null","--list"]
+[2014-12-19 11:07:13 NOVT] read: ssh ["-O","stop","-S","git@annex","-o","ControlMaster=auto","-o","ControlPersist=yes","localhost"]
+[2014-12-19 11:07:13 NOVT] chat: ssh ["-S",".git/annex/ssh/git@annex","-o","ControlMaster=auto","-o","ControlPersist=yes","-T","git@annex","git-annex-shell 'notifychanges' '/~/annex' --uuid 858bc960-d379-4694-a6b8-98eba5fbeb82"]
+CONNECTED ssh://git@annex/~/annex
+[2014-12-19 11:07:13 NOVT] chat: git ["--git-dir=/home/user/annex/.git","--work-tree=/home/user/annex","cat-file","--batch"]
+LOSTNET   
+[2014-12-19 11:07:21 NOVT] read: ssh ["-O","stop","-S","git@annex","-o","ControlMaster=auto","-o","ControlPersist=yes","localhost"]
+DISCONNECTED ssh://git@annex/~/annex
+LOSTNET
+RESUME
+[2014-12-19 11:07:27 NOVT] chat: ssh ["-S",".git/annex/ssh/git@annex","-o","ControlMaster=auto","-o","ControlPersist=yes","-T","git@annex","git-annex-shell 'notifychanges' '/~/annex' --uuid 858bc960-d379-4694-a6b8-98eba5fbeb82"]
+DISCONNECTED ssh://git@annex/~/annex
+"""]]
+
+### What version of git-annex are you using? On what operating system?
+
+git-annex 5.20141203 on Gentoo Linux
+
+### Please provide any additional information below.
+
+Here is the original assistant log:
+
+[[!format sh """
+
+[2014-12-19 11:14:47 NOVT] NetWatcher: detected network disconnection
+[2014-12-19 11:14:47 NOVT] RemoteControl: LOSTNET
+[2014-12-19 11:14:47 NOVT] RemoteControl: DISCONNECTED ssh://git@annex/~/annex
+[2014-12-19 11:14:47 NOVT] RemoteControl: fromList []
+[2014-12-19 11:14:49 NOVT] NetWatcher: detected network disconnection
+[2014-12-19 11:14:49 NOVT] RemoteControl: LOSTNET
+[2014-12-19 11:14:52 NOVT] NetWatcher: detected network disconnection
+[2014-12-19 11:14:52 NOVT] RemoteControl: LOSTNET
+[2014-12-19 11:14:52 NOVT] NetWatcher: detected network disconnection
+[2014-12-19 11:14:52 NOVT] RemoteControl: LOSTNET
+[2014-12-19 11:14:53 NOVT] NetWatcher: detected network connection
+[2014-12-19 11:14:53 NOVT] NetWatcher: Syncing with origin 
+[2014-12-19 11:14:53 NOVT] read: git ["--git-dir=/home/user/annex/.git","--work-tree=/home/user/annex","symbolic-ref","HEAD"]
+[2014-12-19 11:14:53 NOVT] read: git ["--git-dir=/home/user/annex/.git","--work-tree=/home/user/annex","show-ref","refs/heads/master"]
+[2014-12-19 11:14:53 NOVT] call: git ["--git-dir=/home/user/annex/.git","--work-tree=/home/user/annex","fetch","origin"]
+[2014-12-19 11:14:54 NOVT] read: git ["--git-dir=/home/user/annex/.git","--work-tree=/home/user/annex","show-ref","git-annex"]
+[2014-12-19 11:14:54 NOVT] read: git ["--git-dir=/home/user/annex/.git","--work-tree=/home/user/annex","show-ref","--hash","refs/heads/git-annex"]
+[2014-12-19 11:14:54 NOVT] read: git ["--git-dir=/home/user/annex/.git","--work-tree=/home/user/annex","log","refs/heads/git-annex..b85cddc187d388ab12d18fddbdef266565e81e43","-n1","--pretty=%H"]
+[2014-12-19 11:14:54 NOVT] read: git ["--git-dir=/home/user/annex/.git","--work-tree=/home/user/annex","log","refs/heads/git-annex..12f83429b96850f81ff8edfc0a8651bfa65a066b","-n1","--pretty=%H"]
+[2014-12-19 11:14:54 NOVT] call: git ["--git-dir=/home/user/annex/.git","--work-tree=/home/user/annex","show-ref","--verify","-q","refs/remotes/origin/master"]
+[2014-12-19 11:14:54 NOVT] read: git ["--git-dir=/home/user/annex/.git","--work-tree=/home/user/annex","log","refs/heads/master..refs/remotes/origin/master","-n1","--pretty=%H"]
+[2014-12-19 11:14:54 NOVT] call: git ["--git-dir=/home/user/annex/.git","--work-tree=/home/user/annex","show-ref","--verify","-q","refs/remotes/origin/synced/master"]
+[2014-12-19 11:14:54 NOVT] read: git ["--git-dir=/home/user/annex/.git","--work-tree=/home/user/annex","log","refs/heads/synced/master..refs/remotes/origin/synced/master","-n1","--pretty=%H"]
+[2014-12-19 11:14:54 NOVT] read: git ["--git-dir=/home/user/annex/.git","--work-tree=/home/user/annex","symbolic-ref","HEAD"]
+[2014-12-19 11:14:54 NOVT] read: git ["--git-dir=/home/user/annex/.git","--work-tree=/home/user/annex","show-ref","refs/heads/master"]
+[2014-12-19 11:14:54 NOVT] NetWatcher: pushing to [Remote { name ="origin" }]
+[2014-12-19 11:14:54 NOVT] call: git ["--git-dir=/home/user/annex/.git","--work-tree=/home/user/annex","push","origin","+git-annex:synced/git-annex","master:synced/master"]
+Everything up-to-date
+[2014-12-19 11:14:54 NOVT] read: git ["--git-dir=/home/user/annex/.git","--work-tree=/home/user/annex","push","origin","master"]
+[2014-12-19 11:14:54 NOVT] RemoteControl: RESUME
+[2014-12-19 11:14:54 NOVT] RemoteControl: DISCONNECTED ssh://git@annex/~/annex
+[2014-12-19 11:14:54 NOVT] RemoteControl: fromList []
+
+
+"""]]

Added a comment
diff --git a/doc/forum/Can_Not_Sync_to_Git_Repo/comment_13_45132d348807fbf8ed32198e110d2caa._comment b/doc/forum/Can_Not_Sync_to_Git_Repo/comment_13_45132d348807fbf8ed32198e110d2caa._comment
new file mode 100644
index 0000000..f30f290
--- /dev/null
+++ b/doc/forum/Can_Not_Sync_to_Git_Repo/comment_13_45132d348807fbf8ed32198e110d2caa._comment
@@ -0,0 +1,26 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawlo7XnC4EU3u_9t80JUYXV7XPnUqBQ7mD4"
+ nickname="Ryan"
+ subject="comment 13"
+ date="2014-12-19T00:09:35Z"
+ content="""
+I'm having the same problem with my direct repo:
+
+git branch -a
+  git-annex
+* master
+  synced/git-annex
+  synced/master
+  remotes/cluster/git-annex
+  remotes/cluster/master
+  remotes/cluster/synced/master
+
+git annex version
+git-annex version: 5.20140818-g10bf03a
+build flags: Assistant Webapp Webapp-secure Pairing Testsuite S3 WebDAV Inotify DBus DesktopNotify XMPP DNS Feeds Quvi TDFA CryptoHash
+key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SHA256 SHA1 SHA512 SHA224 SHA384 SKEIN256 SKEIN512 WORM URL
+remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier ddar hook external
+local repository version: 5
+supported repository version: 5
+upgrade supported from repository versions: 0 1 2 4
+"""]]

response
diff --git a/doc/tips/using_the_web_as_a_special_remote/comment_12_2ce018f181e039b5dd52e2b712f63eea._comment b/doc/tips/using_the_web_as_a_special_remote/comment_12_2ce018f181e039b5dd52e2b712f63eea._comment
new file mode 100644
index 0000000..7e3d8c9
--- /dev/null
+++ b/doc/tips/using_the_web_as_a_special_remote/comment_12_2ce018f181e039b5dd52e2b712f63eea._comment
@@ -0,0 +1,7 @@
+[[!comment format=mdwn
+ username="joey"
+ subject="""comment 12"""
+ date="2014-12-18T19:59:54Z"
+ content="""
+You can see the url(s) of a file when you run `git annex whereis $file`
+"""]]

note about trust and checking copies
diff --git a/doc/special_remotes/bittorrent.mdwn b/doc/special_remotes/bittorrent.mdwn
index 4821acc..b9bbff6 100644
--- a/doc/special_remotes/bittorrent.mdwn
+++ b/doc/special_remotes/bittorrent.mdwn
@@ -20,3 +20,10 @@ it cannot upload or remove content.
 
 Multi-file torrents are supported; to handle them, `git annex addurl`
 will add a directory containing all the files from the torrent.
+
+It's hard to say if a torrent is healthy enough to let a file be downloaded
+from it, and harder to predict if a torrent will stay healthy. So,
+git-annex takes a cautious approach and when dropping a file, won't
+treat this special remote as one of the required [[copies]]. It's probably
+a good idea to configure git-annex to fully distrust this remote, by
+running `git annex untrust bittorrent`

Added a comment: How to get the URL of an added file?
diff --git a/doc/tips/using_the_web_as_a_special_remote/comment_11_9889828caa47aad88267d0ec35f2240d._comment b/doc/tips/using_the_web_as_a_special_remote/comment_11_9889828caa47aad88267d0ec35f2240d._comment
new file mode 100644
index 0000000..ccd27bf
--- /dev/null
+++ b/doc/tips/using_the_web_as_a_special_remote/comment_11_9889828caa47aad88267d0ec35f2240d._comment
@@ -0,0 +1,7 @@
+[[!comment format=mdwn
+ username="thnetos"
+ subject="How to get the URL of an added file?"
+ date="2014-12-18T18:56:57Z"
+ content="""
+Once a file has been added with either `addurl` or `importfeed`, how can I get the URL of the file or feed from git-annex?
+"""]]

design document
diff --git a/doc/todo/parallel_get.mdwn b/doc/todo/parallel_get.mdwn
new file mode 100644
index 0000000..119d078
--- /dev/null
+++ b/doc/todo/parallel_get.mdwn
@@ -0,0 +1,73 @@
+Wish: `git annex get [files] -jN` should run up to N downloads of files
+concurrently.
+
+This can already be done by just starting up N separate git-annex
+processes all trying to get the same files. They'll coordinate themselves
+to avoid downloading the same file twice.
+
+But, the output of concurrent git annex get's in a single teminal is a
+mess.
+
+It would be nice to have something similar to docker's output when fetching
+layers of an image. Something like:
+
+	get foo1 ok
+	get foo2 ok
+	get foo3 -> 5% 100 KiB/s
+	get foo4 -> 3% 90 KiB/s
+	get foo5 -> 20% 1 MiB/s
+
+Where the bottom N lines are progress displays for the downloads that are
+currently in progress. When a download finishes, it can scroll up the
+screen with "ok".
+
+	get foo1 ok
+	get foo2 ok
+	get foo5 ok
+	get foo3 -> 5% 100 KiB/s
+	get foo4 -> 3% 90 KiB/s
+	get foo6 -> 0% 110 Kib/S
+
+This display could perhaps be generalized for other concurrent actions.
+For example, drop:
+
+	drop foo1 ok
+	drop foo2 failed
+	  Not enough copies ...
+	drop foo3 -> (checking r1...)
+	drop foo4 -> (checking r2...)
+
+But, do get first.
+
+Pain points: 
+
+1. Currently, git-annex lets tools like rsync and wget display their own
+   progress. This makes sense for the single-file at a time get, because
+   rsync can display better output than just a percentage. (This is especially
+   the case with aria2c for torrents, which displays seeder/leecher info in
+   addition to percentage.)
+
+   But in multi-get mode, the progress display would be simplified. git-annex
+   can already get percent done information, either as reported by individiual
+   backends, or by falling back to polling the file as it's downloaded.
+
+2. The mechanics of updating the screen for a multi-line progress output
+   require some terminal handling code. Using eg, curses, in a mode that
+   doesn't take over the whole screen display, but just moves the cursor
+   up to the line for the progress that needs updating and redraws that
+   line. Doing this portably is probably going to be a pain, especially
+   I have no idea if it can be done on Windows.
+
+   An alternative would be a display more like apt uses for concurrent
+   downloads, all on one line:
+
+	get foo1 ok
+	get foo2 ok
+	get [foo3 -> 5% 100 KiB/s] [foo4 -> 3% 90 KiB/s] [foo5 -> 20% 1 MiB/s]
+
+   The problem with that is it has to avoid scrolling off the right
+   side, so it probably has to truncate the line. Since filenames
+   are often longer than "fooN", it probably has to elipsise the filename.
+   This approach is just not as flexible or nice in general.
+
+See also: [[parallel_possibilities]]

When possible, build with the haskell torrent library for parsing torrent files.
diff --git a/BuildFlags.hs b/BuildFlags.hs
index 59a060c..f7c53a9 100644
--- a/BuildFlags.hs
+++ b/BuildFlags.hs
@@ -86,6 +86,11 @@ buildFlags = filter (not . null)
 #else
 #warning Building without CryptoHash.
 #endif
+#ifdef WITH_TORRENTParser
+	, "TorrentParser"
+#else
+#warning Building without haskell torrent library; will instead use btshowmetainfo to parse torrent files.
+#endif
 #ifdef WITH_EKG
 	, "EKG"
 #endif
diff --git a/Remote/BitTorrent.hs b/Remote/BitTorrent.hs
index 4cb579f..4e4b954 100644
--- a/Remote/BitTorrent.hs
+++ b/Remote/BitTorrent.hs
@@ -5,6 +5,8 @@
  - Licensed under the GNU GPL version 3 or higher.
  -}
 
+{-# LANGUAGE CPP #-}
+
 module Remote.BitTorrent (remote) where
 
 import Common.Annex
@@ -14,8 +16,6 @@ import qualified Git
 import qualified Git.Construct
 import Config.Cost
 import Logs.Web
-import Logs.Trust.Basic
-import Types.TrustLevel
 import Types.UrlContents
 import Types.CleanupActions
 import Types.Key
@@ -26,9 +26,13 @@ import Annex.Perms
 import Annex.UUID
 import qualified Annex.Url as Url
 
-import qualified Data.Map as M
 import Network.URI
 
+#ifdef WITH_TORRENTPARSER
+import Data.Torrent
+import qualified Data.ByteString.Lazy as B
+#endif
+
 remote :: RemoteType
 remote = RemoteType {
 	typename = "bittorrent",
@@ -106,7 +110,7 @@ dropKey k = do
  - implemented, it tells us nothing about the later state of the torrent.
  -}
 checkKey :: Key -> Annex Bool
-checkKey key = error "cannot reliably check torrent status"
+checkKey = error "cannot reliably check torrent status"
 
 getBitTorrentUrls :: Key -> Annex [URLString]
 getBitTorrentUrls key = filter supported <$> getUrls key
@@ -266,9 +270,16 @@ downloadTorrentContent k u dest filenum p = do
 
 checkDependencies :: Annex ()
 checkDependencies = do
-	missing <- liftIO $ filterM (not <$$> inPath) ["aria2c", "btshowmetainfo"]
+	missing <- liftIO $ filterM (not <$$> inPath) deps
 	unless (null missing) $
 		error $ "need to install additional software in order to download from bittorrent: " ++ unwords missing
+  where
+	deps =
+		[ "aria2c"
+#ifndef TORRENT
+		, "btshowmetainfo"
+#endif
+		]
 
 ariaParams :: [CommandParam] -> Annex [CommandParam]
 ariaParams ps = do
@@ -299,9 +310,8 @@ parseAriaProgress totalsize = go [] . reverse . split ['\r']
 
 	frompercent p = toBytesProcessed $ totalsize * p `div` 100
 
-{- It would be better to use http://hackage.haskell.org/package/torrent,
- - but that package won't currently build. I sent a patch fixing it
- - to its author and plan to upload in Jan 2015 if I don't hear back. -}
+{- Used only if the haskell torrent library is not available. -}
+#ifndef WITH_TORRENTPARSER
 btshowmetainfo :: FilePath -> String -> IO [String]
 btshowmetainfo torrent field = 
 	findfield [] . lines <$> readProcess "btshowmetainfo" [torrent]
@@ -319,12 +329,25 @@ btshowmetainfo torrent field =
 	multiline c [] = findfield c []
 
 	fieldkey = field ++ take (14 - length field) (repeat '.') ++ ": "
+#endif
 
 {- Examines the torrent file and gets the list of files in it,
  - and their sizes.
  -}
 torrentFileSizes :: FilePath -> IO [(FilePath, Integer)]
 torrentFileSizes torrent = do
+#ifdef WITH_TORRENTPARSER
+	let mkfile = joinPath . map (scrub . decodeBS)
+	b <- B.readFile torrent
+	return $ case readTorrent b of
+		Left e -> error $ "failed to parse torrent: " ++ e
+		Right t -> case tInfo t of
+			SingleFile { tLength = l, tName = f } ->
+				[ (mkfile [f], l) ]
+			MultiFile { tFiles = fs, tName = dir } ->
+				map (\tf -> (mkfile $ dir:filePath tf, fileLength tf)) fs
+  where
+#else
 	files <- getfield "files"
 	if null files
 		then do
@@ -334,13 +357,12 @@ torrentFileSizes torrent = do
 				((fn:[]), (Just sz:[])) -> return [(scrub fn, sz)]
 		 		_ -> parsefailed (show (fnl, szl))
 		else do
-			v <- btshowmetainfo torrent "directory name"
+			v <- getfield "directory name"
 			case v of
 				(d:[]) -> return $ map (splitsize d) files
 				_ -> parsefailed (show v)
   where
 	getfield = btshowmetainfo torrent
-
 	parsefailed s = error $ "failed to parse btshowmetainfo output for torrent file: " ++ show s
 
 	-- btshowmetainfo outputs a list of "filename (size)"
@@ -351,7 +373,7 @@ torrentFileSizes torrent = do
 				reverse l
 		fn = reverse $ drop 2 $
 			dropWhile (/= '(') $ dropWhile (== ')') $ reverse l
-
+#endif
 	-- a malicious torrent file might try to do directory traversal
 	scrub f = if isAbsolute f || any (== "..") (splitPath f)
 		then error "found unsafe filename in torrent!"
diff --git a/debian/changelog b/debian/changelog
index f1b8d32..ff421ae 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -13,7 +13,10 @@ git-annex (5.20141204) UNRELEASED; urgency=medium
   * addurl behavior change: When downloading an url ending in .torrent,
     it will download files from bittorrent, instead of the old behavior
     of adding the torrent file to the repository.
-  * Added Recommends on aria2 and bittornado | bittorrent.
+  * Added Recommends on aria2.
+  * When possible, build with the haskell torrent library for parsing
+    torrent files. As a fallback, can instead use btshowmetainfo from
+    bittornado | bittorrent.
 
  -- Joey Hess <id@joeyh.name>  Fri, 05 Dec 2014 13:42:08 -0400
 
diff --git a/doc/special_remotes/bittorrent.mdwn b/doc/special_remotes/bittorrent.mdwn
index 36fa1b8..4821acc 100644
--- a/doc/special_remotes/bittorrent.mdwn
+++ b/doc/special_remotes/bittorrent.mdwn
@@ -8,8 +8,10 @@ torrent and add it to the git annex repository.
 See [[tips/using_the_web_as_a_special_remote]] for usage examples.
 
 git-annex uses [aria2](http://aria2.sourceforge.net/) to download torrents.
-It also needs the `btshowmetainfo` program, from either
-bittornado or the original BitTorrent client.
+
+If git-annex is not built using the haskell torrent library to parse
+torrents, it also needs the needs the `btshowmetainfo` program, from
+either bittornado or the original BitTorrent client.
 
 ## notes
 
diff --git a/git-annex.cabal b/git-annex.cabal
index 568374b..b8bca22 100644
--- a/git-annex.cabal
+++ b/git-annex.cabal
@@ -93,6 +93,9 @@ Flag CryptoHash
 Flag DesktopNotify
   Description: Enable desktop environment notifications
 
+Flag TorrentParser
+  Description: Use haskell torrent library to parse torrent files
+
 Flag EKG
   Description: Enable use of EKG to monitor git-annex as it runs (at http://localhost:4242/)
   Default: False
@@ -234,6 +237,10 @@ Executable git-annex
     Build-Depends: aeson
     CPP-Options: -DWITH_TAHOE
 
+  if flag(TorrentParser)
+    Build-Depends: torrent (>= 10000.0.0)
+    CPP-Options: -DWITH_TORRENTPARSER
+
   if flag(EKG)
     Build-Depends: ekg
     GHC-Options: -with-rtsopts=-T

diff --git a/doc/todo/inject_on_import.mdwn b/doc/todo/inject_on_import.mdwn
new file mode 100644
index 0000000..a4f4b1e
--- /dev/null
+++ b/doc/todo/inject_on_import.mdwn
@@ -0,0 +1,21 @@
+Would it be possible to add an `--inject` option to import?
+
+Say, for example, I have an annex on computer A which has a subset of files and a directory of files which are potentional duplicates of files in the annex.
+
+I would like to do something like this:
+
+    mkdir ~/annex/import
+    cd ~/annex/import
+    git annex import --deduplicate --inject ~/directory/of/files
+
+This would do the same as `--deduplicate`, except if the file is not present in the annex, it would be injected. For example:
+
+Annex knows about A and B, A is present but B is not.
+$DIR contains A, B and C.
+
+A would be deleted from $DIR due to `--deduplicate`.
+B would be injected into the repo (making it present) due to `--inject`, then deleted from $DIR.
+C would be added to the annex, resulting in this
+
+    $ ls ~/annex/import
+    C

Added a comment: unavailable files
diff --git a/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_6_6f3b5d5a5781b3a570f46481dc2ebca2._comment b/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_6_6f3b5d5a5781b3a570f46481dc2ebca2._comment
new file mode 100644
index 0000000..f505b34
--- /dev/null
+++ b/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_6_6f3b5d5a5781b3a570f46481dc2ebca2._comment
@@ -0,0 +1,9 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawnx8kHW66N3BqmkVpgtXDlYMvr8TJ5VvfY"
+ nickname="Yaroslav"
+ subject="unavailable files"
+ date="2014-12-18T16:37:13Z"
+ content="""
+for my use cases the best would be if FUSE simply didn't return until file becomes available. Making an option to return immediately with EBUSY/ENODATA could also be generally useful but not in my case :-/
+I wonder if any timeout would kick in in some use-cases if it takes too long?
+"""]]

diff --git a/doc/forum/Git_Annex_not_dropping_unused_content.mdwn b/doc/forum/Git_Annex_not_dropping_unused_content.mdwn
new file mode 100644
index 0000000..ffdebcd
--- /dev/null
+++ b/doc/forum/Git_Annex_not_dropping_unused_content.mdwn
@@ -0,0 +1,9 @@
+I've deleted some files, but their content remains in the objects directory, and *git annex unused* does not list them.
+
+I've read in this post <http://git-annex.branchable.com/forum/git-annex_unused_not_dropping_deleted_files/> that if other branches contain the files, then it won't count them as unused. My repo appears to have a few branches left over from views I've used.
+
+    remotes/sdrive/views/(added=14_09);(tag=Shared)
+    remotes/sdrive/views/(added=14_09);Images_=_
+    remotes/sdrive/views/added=_
+
+How can I delete these? Is git annex going to create a new branch for every new view I create?

Added a comment
diff --git a/doc/forum/Recovery_after_freeze_while_importing_files/comment_2_e14f330de3e9ebfa9a99c32e65d59d11._comment b/doc/forum/Recovery_after_freeze_while_importing_files/comment_2_e14f330de3e9ebfa9a99c32e65d59d11._comment
new file mode 100644
index 0000000..3a897cc
--- /dev/null
+++ b/doc/forum/Recovery_after_freeze_while_importing_files/comment_2_e14f330de3e9ebfa9a99c32e65d59d11._comment
@@ -0,0 +1,7 @@
+[[!comment format=mdwn
+ username="ghen1"
+ subject="comment 2"
+ date="2014-12-18T11:28:35Z"
+ content="""
+I did, and everything seems fine.
+"""]]

Added a comment
diff --git a/doc/forum/repair_stuck_on_ls-tree_command/comment_10_791c50f8a2284b704e34cacf15637341._comment b/doc/forum/repair_stuck_on_ls-tree_command/comment_10_791c50f8a2284b704e34cacf15637341._comment
new file mode 100644
index 0000000..3058cba
--- /dev/null
+++ b/doc/forum/repair_stuck_on_ls-tree_command/comment_10_791c50f8a2284b704e34cacf15637341._comment
@@ -0,0 +1,14 @@
+[[!comment format=mdwn
+ username="vho"
+ subject="comment 10"
+ date="2014-12-18T10:27:07Z"
+ content="""
+This one should work:
+https://www.dropbox.com/s/t7b406wm6m3vm6c/photos2.tar.xz.gpg?dl=0
+
+    $ md5sum photos2.tar.xz.gpg
+    df2b9bde3d1ad23d9f3c4247d2f5b21a  photos2.tar.xz.gpg
+    $ sha1sum photos2.tar.xz.gpg
+    cac6f8c07a1d95fb48e3d9aa0f4699a2c77c00d598  photos2.tar.xz.gpg
+
+"""]]

Added a comment
diff --git a/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_5_b92045c91d92da7db794aed2c67dde0d._comment b/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_5_b92045c91d92da7db794aed2c67dde0d._comment
new file mode 100644
index 0000000..de3a2a8
--- /dev/null
+++ b/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_5_b92045c91d92da7db794aed2c67dde0d._comment
@@ -0,0 +1,9 @@
+[[!comment format=mdwn
+ username="CandyAngel"
+ subject="comment 5"
+ date="2014-12-18T09:26:17Z"
+ content="""
+Having the lean view would be easy to implement either as an option you pass when mounting or something you can toggle by touching a file ($MNT/.config/lean/{on,off}).
+
+Regarding fetching of files, how would you like it to behave? My previous one would return EBUSY while downloading a file and ENODATA if it wasn't available and couldn't be fetched. I could, for example, make unavailable files appear as normal files (containing text regarding the download state) until they are available, then they become symlinks. What would work best for you?
+"""]]

Added a comment: failed upload
diff --git a/doc/forum/repair_stuck_on_ls-tree_command/comment_9_163cdd18380a13aaa13d68d516af1e30._comment b/doc/forum/repair_stuck_on_ls-tree_command/comment_9_163cdd18380a13aaa13d68d516af1e30._comment
new file mode 100644
index 0000000..f0d0207
--- /dev/null
+++ b/doc/forum/repair_stuck_on_ls-tree_command/comment_9_163cdd18380a13aaa13d68d516af1e30._comment
@@ -0,0 +1,7 @@
+[[!comment format=mdwn
+ username="vho"
+ subject="failed upload"
+ date="2014-12-18T09:18:14Z"
+ content="""
+It seems the upload failed at some point. I am going to share it somewhere else
+"""]]

update for torrents
diff --git a/doc/git-annex.mdwn b/doc/git-annex.mdwn
index 8a2633e..140a5aa 100644
--- a/doc/git-annex.mdwn
+++ b/doc/git-annex.mdwn
@@ -225,9 +225,12 @@ subdirectories).
   alternate locations from which the file can be downloaded. In this mode,
   addurl can be used both to add new files, or to add urls to existing files.
 
-  When quvi is installed, urls are automatically tested to see if they
+  When `quvi` is installed, urls are automatically tested to see if they
   point to a video hosting site, and the video is downloaded instead.
 
+  Urls to torrent files (including magnet links) will cause the content of
+  the torrent to be downloaded, using `aria2c`.
+
 * `rmurl file url`
 
   Record that the file is no longer available at the url.

Added a comment: (recovery from race)
diff --git a/doc/forum/repair_stuck_on_ls-tree_command/comment_8_726c9a887b7df1833d7aef3bdce50517._comment b/doc/forum/repair_stuck_on_ls-tree_command/comment_8_726c9a887b7df1833d7aef3bdce50517._comment
new file mode 100644
index 0000000..f79b16f
--- /dev/null
+++ b/doc/forum/repair_stuck_on_ls-tree_command/comment_8_726c9a887b7df1833d7aef3bdce50517._comment
@@ -0,0 +1,7 @@
+[[!comment format=mdwn
+ username="vho"
+ subject="(recovery from race)"
+ date="2014-12-18T00:46:09Z"
+ content="""
+git show 7dc2be23ddb9bda2edc0f01d4bbec2fdb5216763 seems quite abnormal.
+"""]]

Added a comment: update
diff --git a/doc/forum/repair_stuck_on_ls-tree_command/comment_7_5d9e5fd148d5f9e918ad818e07009d69._comment b/doc/forum/repair_stuck_on_ls-tree_command/comment_7_5d9e5fd148d5f9e918ad818e07009d69._comment
new file mode 100644
index 0000000..b27b503
--- /dev/null
+++ b/doc/forum/repair_stuck_on_ls-tree_command/comment_7_5d9e5fd148d5f9e918ad818e07009d69._comment
@@ -0,0 +1,30 @@
+[[!comment format=mdwn
+ username="vho"
+ subject="update"
+ date="2014-12-18T00:43:22Z"
+ content="""
+Hi,
+
+I have uploaded the repository to:
+http://dl.free.fr/bczxhyOhy
+
+You can decrypt it with your gpg key. It weights 200 Mo.
+
+Moreover when I try to run git annex whereis on files present only on the defective annex from another annex I get the following error:
+
+    $ git annex whereis IMG_4701.JPG
+    whereis IMG_4701.JPG (0 copies) failed
+    git-annex: whereis: 1 failed
+
+I guess the defective annex could not completely synchronize with the others.
+
+I also tried to look at the git tree with gitk --all.
+Well there seems to be a lot of commit with message update (recovery from race)
+I am forced to kill gitk because it starts lagging a lot.
+
+Hopefully you will be able to provide me some hints on how to resolve this issue.
+
+Best regards
+-- 
+vho
+"""]]

diff --git a/doc/bugs/Direct_mode_sync_fails_to_transfer_a_10GB_file.mdwn b/doc/bugs/Direct_mode_sync_fails_to_transfer_a_10GB_file.mdwn
new file mode 100644
index 0000000..773d452
--- /dev/null
+++ b/doc/bugs/Direct_mode_sync_fails_to_transfer_a_10GB_file.mdwn
@@ -0,0 +1,58 @@
+### Please describe the problem.
+
+On Windows, a 10GB file of mine is successfully indexed (``git annex add``'ed), but ``git annex sync --content`` always fails with rsync, saying "recvkey: received key with wrong size". This is the largest file I've tested so far, and the only one that's failed.
+
+### What steps will reproduce the problem?
+
+1. Copy a 10GB file to a working copy (mine is ''PNG_Sequence.rar'', 10 361 629 980 bytes).
+2. Run ``git annex add``
+3. Run ``git annex sync --content``
+
+### What version of git-annex are you using? On what operating system?
+
+Windows 7 x64 with:
+
+    git-annex version: 5.20141128-g70f997e
+    build flags: Assistant Webapp Webapp-secure Pairing Testsuite S3 WebDAV DNS Feed
+    s Quvi TDFA CryptoHash
+    key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SH
+    A256 SHA1 SHA512 SHA224 SHA384 SKEIN256 SKEIN512 WORM URL
+    remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier ddar ho
+    ok external
+
+### Please provide any additional information below.
+
+[[!format sh """
+# If you can, paste a complete transcript of the problem occurring here.
+# If the problem is with the git-annex assistant, paste in .git/annex/daemon.log
+
+commit  (Recording state in git...)
+ok
+pull origin
+ok
+copy Art/PlanetPioneers/PNG_Sequence.rar copy Art/PlanetPioneers/PNG_Sequence.ra
+r (checking origin...) (to origin...)
+  recvkey: received key with wrong size; discarding
+
+sent 39 bytes  received 12 bytes  102.00 bytes/sec
+rsync error: syntax or usage error (code 1) at /home/lapo/package/rsync-3.0.9-1/
+src/rsync-3.0.9/main.c(1052) [sender=3.0.9]total size is 10361629980  speedup is
+ 203169215.29
+  rsync failed -- run git annex again to resume file transfer
+failed
+pull origin
+ok
+push origin
+Counting objects: 24, done.
+Delta compression using up to 8 threads.
+Compressing objects: 100% (11/11), done.
+Writing objects: 100% (13/13), 1.06 KiB | 0 bytes/s, done.
+Total 13 (delta 7), reused 0 (delta 0)
+To ssh://gitannex@serv-gitannex:/home/gitannex/git-annex-test.git
+   f8f70de..41bec92  git-annex -> synced/git-annex
+   090ca15..e9e842b  annex/direct/master -> synced/master
+ok
+git-annex: sync: 1 failed
+
+# End of transcript or log.
+"""]]

diff --git a/doc/todo/Bittorrent-like_features.mdwn b/doc/todo/Bittorrent-like_features.mdwn
index 82e7d84..f1075a1 100644
--- a/doc/todo/Bittorrent-like_features.mdwn
+++ b/doc/todo/Bittorrent-like_features.mdwn
@@ -46,4 +46,4 @@ This way, a torrent would just become another source for a specific file. When w
 
 That way we avoid the implementation complexity of shoving a complete bittorrent client within the assistant. The `get` operation would block until the torrent is downloaded, i guess... --[[anarcat]]
 
-> This is now implemented. Including magnet link support, and multi-file torrent support. Leaving toto item open for the blue-sky stuff at top. --[[Joey]]
+> This is now [[implemented|special_remotes/bittorrent/]]. Including magnet link support, and multi-file torrent support. Leaving todo item open for the blue-sky stuff at top. --[[Joey]]

typo
diff --git a/doc/devblog/day_239-240__bittorrent_remote.mdwn b/doc/devblog/day_239-240__bittorrent_remote.mdwn
index 771a01f..3b46654 100644
--- a/doc/devblog/day_239-240__bittorrent_remote.mdwn
+++ b/doc/devblog/day_239-240__bittorrent_remote.mdwn
@@ -10,7 +10,7 @@ bittornado). I looked into using
 but that package is out of date and doesn't currently build. I've got a
 patch fixing that, but am waiting to hear back from the library's author.
 
-There is a bit of a behavior change here; while before `git annex adurl` of
+There is a bit of a behavior change here; while before `git annex addurl` of
 a torrent file would add the torrent file itself to the repository, it now will
 download and add the contents of the torrent. I think/hope this behavior
 change is ok..

update
diff --git a/doc/thanks.mdwn b/doc/thanks.mdwn
index 9c9f6ad..90c9948 100644
--- a/doc/thanks.mdwn
+++ b/doc/thanks.mdwn
@@ -14,7 +14,7 @@ git-annex development is partially supported by the
 [NSF](https://www.nsf.gov/awardsearch/showAward?AWD_ID=1429999) as a part of the
 [DataLad project](http://datalad.org/).
 
-Thanks also to Martin f. Krafft and John Byrnes.
+Thanks also to these folks for their support: martin f. krafft and John Byrnes.
 
 ## 2013-2014
 
@@ -134,7 +134,7 @@ Poobalasubramanian, Alexandre Garel, David Clark, Jeff Johnson,
 Mica Semrick, Paul Staab, Rémi Vanicat, Martin Holtschneider, Jan Ivar
 Beddari, Peter Simons, Thomas Koch, Justin Geibel, Guillaume DELVIT, Shanti
 Bouchez, Oliver Brandt, François Deppierraz, Chad Walstrom, Tim Mattison,
-Jakub Antoni Tyszko, Casa do Boneco, Florian Tham, martin f. krafft,
+Jakub Antoni Tyszko, Casa do Boneco, Florian Tham,
 and 30 anonymous bitcoin users
 
 With an especial thanks to the WikiMedia foundation,

note on behavior change
diff --git a/doc/devblog/day_239-240__bittorrent_remote.mdwn b/doc/devblog/day_239-240__bittorrent_remote.mdwn
index a5fa164..771a01f 100644
--- a/doc/devblog/day_239-240__bittorrent_remote.mdwn
+++ b/doc/devblog/day_239-240__bittorrent_remote.mdwn
@@ -9,3 +9,8 @@ bittornado). I looked into using
 <http://hackage.haskell.org/package/torrent> instead,
 but that package is out of date and doesn't currently build. I've got a
 patch fixing that, but am waiting to hear back from the library's author.
+
+There is a bit of a behavior change here; while before `git annex adurl` of
+a torrent file would add the torrent file itself to the repository, it now will
+download and add the contents of the torrent. I think/hope this behavior
+change is ok..

update
diff --git a/doc/thanks.mdwn b/doc/thanks.mdwn
index c4cefbb..9c9f6ad 100644
--- a/doc/thanks.mdwn
+++ b/doc/thanks.mdwn
@@ -14,6 +14,8 @@ git-annex development is partially supported by the
 [NSF](https://www.nsf.gov/awardsearch/showAward?AWD_ID=1429999) as a part of the
 [DataLad project](http://datalad.org/).
 
+Thanks also to Martin f. Krafft and John Byrnes.
+
 ## 2013-2014
 
 Continued git-annex development was [crowd funded](https://campaign.joeyh.name/)

devblog
diff --git a/doc/devblog/day_239-240__bittorrent_remote.mdwn b/doc/devblog/day_239-240__bittorrent_remote.mdwn
new file mode 100644
index 0000000..a5fa164
--- /dev/null
+++ b/doc/devblog/day_239-240__bittorrent_remote.mdwn
@@ -0,0 +1,11 @@
+Spent a couple days adding a [[bittorrent_special_remote|special_remotes/bittorrent]]
+to git-annex. This is better than the demo external torrent remote I made
+on Friday: It's built into git-annex; it supports magnet links; it even
+parses aria2c's output so the webapp can display progress bars.
+
+Besides needing `aria2` to download torrents, it also currently depends on
+the `btshowmetainfo` command from the original bittorrent client (or
+bittornado). I looked into using
+<http://hackage.haskell.org/package/torrent> instead,
+but that package is out of date and doesn't currently build. I've got a
+patch fixing that, but am waiting to hear back from the library's author.

make checkKey always return unknown
diff --git a/doc/special_remotes/external/git-annex-remote-torrent b/doc/special_remotes/external/git-annex-remote-torrent
index 4f99483..4df1f81 100755
--- a/doc/special_remotes/external/git-annex-remote-torrent
+++ b/doc/special_remotes/external/git-annex-remote-torrent
@@ -181,9 +181,9 @@ while read line; do
 		;;
 		CHECKPRESENT)
 			key="$2"
-			# Let's just assume that torrents are always present
+			# Let's just assume that torrents are never present
 			# for simplicity.
-			echo CHECKPRESENT-SUCCESS "$key"
+			echo CHECKPRESENT-UNKNOWN "$key" "cannot reliably check torrent status"
 		;;
 		REMOVE)
 			key="$2"

remove default untrusted hack for bittorrent
This is better handled by checkPresent always failing.
diff --git a/Remote/BitTorrent.hs b/Remote/BitTorrent.hs
index 082fa93..4cb579f 100644
--- a/Remote/BitTorrent.hs
+++ b/Remote/BitTorrent.hs
@@ -72,8 +72,7 @@ gen r _ c gc =
 		}
 
 downloadKey :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex Bool
-downloadKey key _file dest p = do
-	defaultUnTrusted
+downloadKey key _file dest p = 
 	get . map (torrentUrlNum . fst . getDownloader) =<< getBitTorrentUrls key
   where
 	get [] = do
@@ -109,11 +108,6 @@ dropKey k = do
 checkKey :: Key -> Annex Bool
 checkKey key = error "cannot reliably check torrent status"
 
--- Makes this remote UnTrusted, unless it already has a trust set.
-defaultUnTrusted :: Annex ()
-defaultUnTrusted = whenM (isNothing . M.lookup bitTorrentUUID <$> trustMapRaw) $
-	trustSet bitTorrentUUID UnTrusted
-
 getBitTorrentUrls :: Key -> Annex [URLString]
 getBitTorrentUrls key = filter supported <$> getUrls key
   where
diff --git a/doc/special_remotes/bittorrent.mdwn b/doc/special_remotes/bittorrent.mdwn
index 7fe8ebd..36fa1b8 100644
--- a/doc/special_remotes/bittorrent.mdwn
+++ b/doc/special_remotes/bittorrent.mdwn
@@ -16,8 +16,5 @@ bittornado or the original BitTorrent client.
 Currently git-annex only supports downloading content from a torrent; 
 it cannot upload or remove content.
 
-Torrent swarms tend to come and go, so git-annex defaults to *not*
-trusting the bittorrent special remote.
-
 Multi-file torrents are supported; to handle them, `git annex addurl`
 will add a directory containing all the files from the torrent.

update
diff --git a/doc/todo/Bittorrent-like_features.mdwn b/doc/todo/Bittorrent-like_features.mdwn
index 1b4dcb3..82e7d84 100644
--- a/doc/todo/Bittorrent-like_features.mdwn
+++ b/doc/todo/Bittorrent-like_features.mdwn
@@ -46,4 +46,4 @@ This way, a torrent would just become another source for a specific file. When w
 
 That way we avoid the implementation complexity of shoving a complete bittorrent client within the assistant. The `get` operation would block until the torrent is downloaded, i guess... --[[anarcat]]
 
-This is now somewhat implemented, see [[devblog/day_238__extending_addurl_further/]] for details.
+> This is now implemented. Including magnet link support, and multi-file torrent support. Leaving toto item open for the blue-sky stuff at top. --[[Joey]]

addurl with #n doesn't work, remove from docs
Doesn't really seem worth making it work; addurl --fast can be used to
get a tree of files in the torrent and then the user can rm the ones they
don't want.
diff --git a/doc/special_remotes/bittorrent.mdwn b/doc/special_remotes/bittorrent.mdwn
index c5ef91a..7fe8ebd 100644
--- a/doc/special_remotes/bittorrent.mdwn
+++ b/doc/special_remotes/bittorrent.mdwn
@@ -20,6 +20,4 @@ Torrent swarms tend to come and go, so git-annex defaults to *not*
 trusting the bittorrent special remote.
 
 Multi-file torrents are supported; to handle them, `git annex addurl`
-will add a directory containing all the files from the torrent. To
-specify a single file from a multi-file torrent, append "#n" to its url;
-"#1" is the first file, "#2" is the second, and so on.
+will add a directory containing all the files from the torrent.

Added bittorrent special remote
addurl behavior change: When downloading an url ending in .torrent,
it will download files from bittorrent, instead of the old behavior
of adding the torrent file to the repository.
Added Recommends on aria2 and bittornado | bittorrent.
This commit was sponsored by Asbjørn Sloth Tønnesen.
diff --git a/Logs/Trust.hs b/Logs/Trust.hs
index b880f44..41ce5a5 100644
--- a/Logs/Trust.hs
+++ b/Logs/Trust.hs
@@ -15,7 +15,6 @@ module Logs.Trust (
 	trustExclude,
 	lookupTrust,
 	trustMapLoad,
-	trustMapRaw,
 ) where
 
 import qualified Data.Map as M
@@ -23,7 +22,6 @@ import Data.Default
 
 import Common.Annex
 import Types.TrustLevel
-import qualified Annex.Branch
 import qualified Annex
 import Logs
 import Remote.List
@@ -77,8 +75,3 @@ trustMapLoad = do
 	configuredtrust r = (\l -> Just (Types.Remote.uuid r, l))
 		=<< readTrustLevel 
 		=<< remoteAnnexTrustLevel (Types.Remote.gitconfig r)
-
-{- Does not include forcetrust or git config values, just those from the
- - log file. -}
-trustMapRaw :: Annex TrustMap
-trustMapRaw = calcTrustMap <$> Annex.Branch.get trustLog
diff --git a/Logs/Trust/Basic.hs b/Logs/Trust/Basic.hs
index 646e2e0..c356be2 100644
--- a/Logs/Trust/Basic.hs
+++ b/Logs/Trust/Basic.hs
@@ -8,6 +8,7 @@
 module Logs.Trust.Basic (
 	module X,
 	trustSet,
+	trustMapRaw,
 ) where
 
 import Data.Time.Clock.POSIX
@@ -30,3 +31,8 @@ trustSet uuid@(UUID _) level = do
 				parseLog (Just . parseTrustLog)
 	Annex.changeState $ \s -> s { Annex.trustmap = Nothing }
 trustSet NoUUID _ = error "unknown UUID; cannot modify"
+
+{- Does not include forcetrust or git config values, just those from the
+ - log file. -}
+trustMapRaw :: Annex TrustMap
+trustMapRaw = calcTrustMap <$> Annex.Branch.get trustLog
diff --git a/Remote/BitTorrent.hs b/Remote/BitTorrent.hs
new file mode 100644
index 0000000..aaedcd0
--- /dev/null
+++ b/Remote/BitTorrent.hs
@@ -0,0 +1,342 @@
+{- BitTorrent remote.
+ -
+ - Copyright 2014 Joey Hess <joey@kitenet.net>
+ -
+ - Licensed under the GNU GPL version 3 or higher.
+ -}
+
+module Remote.BitTorrent (remote) where
+
+import Common.Annex
+import Types.Remote
+import qualified Annex
+import qualified Git
+import qualified Git.Construct
+import Config.Cost
+import Logs.Web
+import Logs.Trust.Basic
+import Types.TrustLevel
+import Types.UrlContents
+import Types.CleanupActions
+import Utility.Metered
+import Utility.Tmp
+import Backend.URL
+import Annex.Perms
+import qualified Annex.Url as Url
+
+import qualified Data.Map as M
+import Network.URI
+
+-- Dummy uuid for bittorrent. Do not alter.
+bitTorrentUUID :: UUID
+bitTorrentUUID = UUID "00000000-0000-0000-0000-000000000002"
+
+remote :: RemoteType
+remote = RemoteType {
+	typename = "bittorrent",
+	enumerate = list,
+	generate = gen,
+	setup = error "not supported"
+}
+
+-- There is only one bittorrent remote, and it always exists.
+list :: Annex [Git.Repo]
+list = do
+	r <- liftIO $ Git.Construct.remoteNamed "bittorrent" Git.Construct.fromUnknown
+	return [r]
+
+gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
+gen r _ c gc = 
+	return $ Just Remote
+		{ uuid = bitTorrentUUID
+		, cost = expensiveRemoteCost
+		, name = Git.repoDescribe r
+		, storeKey = uploadKey
+		, retrieveKeyFile = downloadKey
+		, retrieveKeyFileCheap = downloadKeyCheap
+		, removeKey = dropKey
+		, checkPresent = checkKey
+		, checkPresentCheap = False
+		, whereisKey = Nothing
+		, remoteFsck = Nothing
+		, repairRepo = Nothing
+		, config = c
+		, gitconfig = gc
+		, localpath = Nothing
+		, repo = r
+		, readonly = True
+		, availability = GloballyAvailable
+		, remotetype = remote
+		, mkUnavailable = return Nothing
+		, getInfo = return []
+		, claimUrl = Just (pure . isSupportedUrl)
+		, checkUrl = Just checkTorrentUrl
+		}
+
+downloadKey :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex Bool
+downloadKey key _file dest p = do
+	defaultUnTrusted
+	get . map (torrentUrlNum . fst . getDownloader) =<< getBitTorrentUrls key
+  where
+	get [] = do
+		warning "no known torrent url"
+		return False
+	get urls = do
+		showOutput -- make way for download progress bar
+		untilTrue urls $ \(u, filenum) -> do
+			registerTorrentCleanup u
+			checkDependencies
+			unlessM (downloadTorrentFile u) $
+				error "could not download torrent file"
+			downloadTorrentContent u dest filenum p
+
+downloadKeyCheap :: Key -> FilePath -> Annex Bool
+downloadKeyCheap _ _ = return False
+
+uploadKey :: Key -> AssociatedFile -> MeterUpdate -> Annex Bool
+uploadKey _ _ _ = do
+	warning "upload to bittorrent not supported"
+	return False
+
+dropKey :: Key -> Annex Bool
+dropKey k = do
+	mapM_ (setUrlMissing bitTorrentUUID k) =<< getBitTorrentUrls k
+	return True
+
+{- This is a very poor check, but checking if a torrent has enough seeders
+ - with all the pieces etc is quite hard.. and even if implemented, it
+ - tells us nothing about the later state of the torrent.
+ -
+ - This is why this remote needs to default to untrusted!
+ -}
+checkKey :: Key -> Annex Bool
+checkKey key = not . null <$> getBitTorrentUrls key
+
+-- Makes this remote UnTrusted, unless it already has a trust set.
+defaultUnTrusted :: Annex ()
+defaultUnTrusted = whenM (isNothing . M.lookup bitTorrentUUID <$> trustMapRaw) $
+	trustSet bitTorrentUUID UnTrusted
+
+getBitTorrentUrls :: Key -> Annex [URLString]
+getBitTorrentUrls key = filter supported <$> getUrls key
+  where
+	supported u =
+		let (u', dl) = (getDownloader u)
+		in dl == OtherDownloader && isSupportedUrl u'
+
+isSupportedUrl :: URLString -> Bool
+isSupportedUrl u = isTorrentMagnetUrl u || isTorrentUrl u
+
+isTorrentUrl :: URLString -> Bool
+isTorrentUrl = maybe False (\u -> ".torrent" `isSuffixOf` uriPath u) . parseURI
+
+isTorrentMagnetUrl :: URLString -> Bool
+isTorrentMagnetUrl u = "magnet:" `isPrefixOf` u && checkbt (parseURI u)
+  where
+	checkbt (Just uri) | "xt=urn:btih:" `isInfixOf` uriQuery uri = True
+	checkbt _ = False
+
+checkTorrentUrl :: URLString -> Annex UrlContents
+checkTorrentUrl u = do
+	checkDependencies
+	registerTorrentCleanup u
+	ifM (downloadTorrentFile u)
+		( torrentContents u

(Diff truncated)
Added a comment
diff --git a/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_4_ab0d45c5058595a71656035c962c1143._comment b/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_4_ab0d45c5058595a71656035c962c1143._comment
new file mode 100644
index 0000000..a527f9b
--- /dev/null
+++ b/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_4_ab0d45c5058595a71656035c962c1143._comment
@@ -0,0 +1,13 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawnx8kHW66N3BqmkVpgtXDlYMvr8TJ5VvfY"
+ nickname="Yaroslav"
+ subject="comment 4"
+ date="2014-12-16T20:42:28Z"
+ content="""
+what could I say to a \"much better one\" offer, besides \"GO AHEAD\" and \"Thank you in advance\"! :)
+
+I wonder though what joey thinks about possible utility of a basic fuse wrapper for annex, and possibly shipping it along?
+
+My primary use-case would be primarily oriented for testing, e.g. if I would like to run a (sub)collection of tests (e.g. on travis) which rely on having some data from annex available, now I would need either provide some project/language specific wrapping which would check if file is available or not and then fetch it.  With FUSE I thought I could just do that transparently without requiring any per-project coding/setup.  Similar use-case would be analysis of some large datasets, once again, without requiring pre-fetching them in entirety and/or piece-by-piece fetching.
+Another possible additional usecase/mode could also be -- expose only available files under FUSE.  If easy to \"trigger\" it would help to provide that \"lean\" view I was blurbing about (https://github.com/datalad/datalad/issues/25) although it would be quite a suboptimal workaround (since if directory is heavily loaded with broken links, it would take a while for FUSE handler to first traverse the tree anyways)
+"""]]

Added a comment
diff --git a/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_3_d6f6e7181f30094339a49ab420bee380._comment b/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_3_d6f6e7181f30094339a49ab420bee380._comment
new file mode 100644
index 0000000..f32de2f
--- /dev/null
+++ b/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_3_d6f6e7181f30094339a49ab420bee380._comment
@@ -0,0 +1,9 @@
+[[!comment format=mdwn
+ username="CandyAngel"
+ subject="comment 3"
+ date="2014-12-16T16:56:05Z"
+ content="""
+@Yaroslav: I made one of these while I was messing with FUSE but found I didn't use it much.
+
+If I can find it, I'll post it somewhere or if you really want it, I can just write a (much) better one!
+"""]]

Added a comment: I wondered if there is any even simple fuse wrapper for git-annex?
diff --git a/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_2_341b567722797eb02bd96ffada431b0c._comment b/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_2_341b567722797eb02bd96ffada431b0c._comment
new file mode 100644
index 0000000..e155225
--- /dev/null
+++ b/doc/news/sharebox_a_FUSE_filesystem_for_git-annex/comment_2_341b567722797eb02bd96ffada431b0c._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawnx8kHW66N3BqmkVpgtXDlYMvr8TJ5VvfY"
+ nickname="Yaroslav"
+ subject="I wondered if there is any even simple fuse wrapper for git-annex?"
+ date="2014-12-16T16:34:06Z"
+ content="""
+sharebox seems aiming to achieve what assistant is aiming for (synchronization).  In my usecase I wondered if there is a simple(r) FUSE wrapper for git-annex which would just 'annex get' any file which is requested (for reading).
+"""]]

diff --git a/doc/bugs/Attempting_to_repair_repository_almost_every_day.mdwn b/doc/bugs/Attempting_to_repair_repository_almost_every_day.mdwn
new file mode 100644
index 0000000..6925606
--- /dev/null
+++ b/doc/bugs/Attempting_to_repair_repository_almost_every_day.mdwn
@@ -0,0 +1,5 @@
+I'm using the webapp on my two main computers with up-to-date Debian (one has testing and the other one sid) and I have consistency checks scheduled every day.
+
+The problem is that almost every time the check runs I'm getting the "Attempting to repair" message and git-annex starts using 100% CPU for quite a while, but after it it seams to have done no change to the git tree or the files.
+
+The two computers are never on at the same time, but they are constantly syncing files (through box.com), I don't now if downloading files while the check is in progress might have something to do.

Added a comment: Where to start reading the source code?
diff --git a/doc/install/fromsource/comment_47_6de25c1e450e1e3b1d18d2c76235ccb8._comment b/doc/install/fromsource/comment_47_6de25c1e450e1e3b1d18d2c76235ccb8._comment
new file mode 100644
index 0000000..2edb95e
--- /dev/null
+++ b/doc/install/fromsource/comment_47_6de25c1e450e1e3b1d18d2c76235ccb8._comment
@@ -0,0 +1,7 @@
+[[!comment format=mdwn
+ username="thnetos"
+ subject="Where to start reading the source code?"
+ date="2014-12-15T22:04:06Z"
+ content="""
+Do you have any recommendations for the overall haskell architecture of the project? Where to start looking if I want to read through the source code?
+"""]]

Added a comment
diff --git a/doc/forum/git-annex_and_tagfs/comment_3_d96948c444f9f485a329420b7c9e3b16._comment b/doc/forum/git-annex_and_tagfs/comment_3_d96948c444f9f485a329420b7c9e3b16._comment
new file mode 100644
index 0000000..4bc1ccc
--- /dev/null
+++ b/doc/forum/git-annex_and_tagfs/comment_3_d96948c444f9f485a329420b7c9e3b16._comment
@@ -0,0 +1,7 @@
+[[!comment format=mdwn
+ username="Xyem"
+ subject="comment 3"
+ date="2014-12-15T08:38:58Z"
+ content="""
+Do the [metadata views](http://git-annex.branchable.com/tips/metadata_driven_views/) not do what you want?
+"""]]

first implementation of the easier stuff :)
diff --git a/doc/todo/Bittorrent-like_features.mdwn b/doc/todo/Bittorrent-like_features.mdwn
index 1dfe2c6..1b4dcb3 100644
--- a/doc/todo/Bittorrent-like_features.mdwn
+++ b/doc/todo/Bittorrent-like_features.mdwn
@@ -45,3 +45,5 @@ or even better:
 This way, a torrent would just become another source for a specific file. When we `get` the file, it fires up `$YOUR_FAVORITE_TORRENT_CLIENT` to download the file.
 
 That way we avoid the implementation complexity of shoving a complete bittorrent client within the assistant. The `get` operation would block until the torrent is downloaded, i guess... --[[anarcat]]
+
+This is now somewhat implemented, see [[devblog/day_238__extending_addurl_further/]] for details.

link to aria2c software
diff --git a/doc/devblog/day_238__extending_addurl_further.mdwn b/doc/devblog/day_238__extending_addurl_further.mdwn
index f58bee6..0dd3764 100644
--- a/doc/devblog/day_238__extending_addurl_further.mdwn
+++ b/doc/devblog/day_238__extending_addurl_further.mdwn
@@ -8,7 +8,7 @@ Then, got `git annex importfeed` to also check if a remote claims an url.
 
 Finally, I put together a quick demo external remote using this new
 interface. [[special_remotes/external/git-annex-remote-torrent]]
-adds support for torrent files to git-annex, using aria2c to download them.
+adds support for torrent files to git-annex, using [aria2c](http://aria2.sourceforge.net/) to download them.
 It supports multi-file torrents, but not magnet links. (I'll probably
 rewrite this more robustly and efficiently in haskell sometime soon.)
 

fix support for single-file torrents
diff --git a/doc/special_remotes/external/git-annex-remote-torrent b/doc/special_remotes/external/git-annex-remote-torrent
index a748857..4f99483 100755
--- a/doc/special_remotes/external/git-annex-remote-torrent
+++ b/doc/special_remotes/external/git-annex-remote-torrent
@@ -72,16 +72,24 @@ downloadtorrent () {
 	dest="$3"
 
 	tmpdir="$(mktemp -d)"
-	if ! runcmd aria2c --select-file="$n" "$torrent" -d "$tmpdir"; then
-		false
-	fi
 
 	# aria2c will create part of the directory structure
 	# contained in the torrent. It may download parts of other files
 	# in addition to the one we asked for. So, we need to find
 	# out the filename we want, and look for it.
-	wantdir="$(btshowmetainfo "$torrent" | grep "^directory name: " | sed "s/^directory name: //")"
-	wantfile="$(btshowmetainfo "$tmp" | grep '^   ' | sed 's/^   //' | head -n "$n" | tail -n 1 | sed 's/ ([0-9]*)$//')"
+	wantdir="$(btshowmetainfo "$torrent" | grep "^directory name: " | sed "s/^directory name: //" || true)"
+	if [ -n "$wantdir" ]; then
+		wantfile="$(btshowmetainfo "$torrent" | grep '^   ' | sed 's/^   //' | head -n "$n" | tail -n 1 | sed 's/ ([0-9]*)$//')"
+		if ! runcmd aria2c --select-file="$n" "$torrent" -d "$tmpdir"; then
+			false
+		fi
+	else
+		wantfile="$(btshowmetainfo "$torrent" | egrep "^file name.*: " | sed "s/^file name.*: //")"
+		wantdir=.
+		if ! runcmd aria2c "$torrent" -d "$tmpdir"; then
+			false
+		fi
+	fi
 	if [ -e "$tmpdir/$wantdir/$wantfile" ]; then
 		mv "$tmpdir/$wantdir/$wantfile" "$dest"
 		rm -rf "$tmpdir"
@@ -130,6 +138,11 @@ while read line; do
 					n=$(expr $n + 1)
 					printf " $url#$n $size $file"
 				done
+				if [ "$n" = 0 ]; then
+					file="$(btshowmetainfo "$tmp" | egrep "^file name.*: " | sed "s/^file name.*: //")"
+					size="$(btshowmetainfo "$tmp" | egrep "^file size.*: " | sed "s/^file size.*: \([0-9]*\).*/\1/")"
+					printf " $url $size $file"
+				fi
 				printf "\n"
 				IFS="$oldIFS"
 			fi
@@ -154,7 +167,7 @@ while read line; do
 						if ! runcmd curl -o "$tmp" "$url"; then
 							echo TRANSFER-FAILURE RETRIEVE "$key" "failed downloading torrent file from $url"
 						else
-							filenum="$(echo "$url" | sed 's/.*#\(\d*\)/\1/')"
+							filenum="$(echo "$url" | sed 's/(.*#\(\d*\)/\1/')"
 							if downloadtorrent "$tmp" "$filenum" "$file"; then
 								echo TRANSFER-SUCCESS RETRIEVE "$key"
 							else

move error message to return value
diff --git a/doc/special_remotes/external/git-annex-remote-torrent b/doc/special_remotes/external/git-annex-remote-torrent
index 4f5e62a..a748857 100755
--- a/doc/special_remotes/external/git-annex-remote-torrent
+++ b/doc/special_remotes/external/git-annex-remote-torrent
@@ -140,8 +140,7 @@ while read line; do
 			file="$4"
 			case "$2" in
 				STORE)
-					runcmd echo "upload not supported"
-					echo TRANSFER-FAILURE STORE "$key"
+					echo TRANSFER-FAILURE STORE "$key" "upload not supported"
 				;;
 				RETRIEVE)
 					urltmp=$(mktemp)

devblog
diff --git a/doc/devblog/day_238__extending_addurl_further.mdwn b/doc/devblog/day_238__extending_addurl_further.mdwn
new file mode 100644
index 0000000..f58bee6
--- /dev/null
+++ b/doc/devblog/day_238__extending_addurl_further.mdwn
@@ -0,0 +1,67 @@
+Some more work on the interface that lets remotes claim urls for `git annex
+addurl`. Added support for remotes suggesting a filename to use when
+adding an url. Also, added support for urls that result in multiple files
+when downloaded. The obvious use case for that is an url to a torrent that
+contains multiple files.
+
+Then, got `git annex importfeed` to also check if a remote claims an url.
+
+Finally, I put together a quick demo external remote using this new
+interface. [[special_remotes/external/git-annex-remote-torrent]]
+adds support for torrent files to git-annex, using aria2c to download them.
+It supports multi-file torrents, but not magnet links. (I'll probably
+rewrite this more robustly and efficiently in haskell sometime soon.)
+
+Here's a demo:
+
+<pre>
+# git annex initremote torrent type=external encryption=none externaltype=torrent
+initremote torrent ok
+(Recording state in git...)
+# ls
+# git annex addurl  --fast file:///home/joey/my.torrent
+  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
+                                 Dload  Upload   Total   Spent    Left  Speed
+100   198  100   198    0     0  3946k      0 --:--:-- --:--:-- --:--:-- 3946k
+addurl _home_joey_my.torrent/bar (using torrent) ok
+addurl _home_joey_my.torrent/baz (using torrent) ok
+addurl _home_joey_my.torrent/foo (using torrent) ok
+(Recording state in git...)
+# ls _home_joey_my.torrent/
+bar@  baz@  foo@
+# git annex get _home_joey_my.torrent/baz
+get _home_joey_my.torrent/baz (from torrent...) 
+  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
+                                 Dload  Upload   Total   Spent    Left  Speed
+  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:-100   198  100   198    0     0  3580k      0 --:--:-- --:--:-- --:--:-- 3580k
+
+12/11 18:14:56 [NOTICE] IPv4 DHT: listening on UDP port 6946
+
+12/11 18:14:56 [NOTICE] IPv4 BitTorrent: listening on TCP port 6961
+
+12/11 18:14:56 [NOTICE] IPv6 BitTorrent: listening on TCP port 6961
+
+12/11 18:14:56 [NOTICE] Seeding is over.
+12/11 18:14:57 [NOTICE] Download complete: /home/joey/tmp/tmp.Le89hJSXyh/tor
+
+12/11 18:14:57 [NOTICE] Your share ratio was 0.0, uploaded/downloaded=0B/0B
+                                                                               
+Download Results:
+gid   |stat|avg speed  |path/URI
+======+====+===========+=======================================================
+71f6b6|OK  |       0B/s|/home/joey/tmp/tmp.Le89hJSXyh/tor/baz
+
+Status Legend:
+(OK):download completed.
+ok                      
+(Recording state in git...)
+# git annex find
+_home_joey_my.torrent/baz
+# git annex whereis _home_joey_my.torrent/baz
+whereis _home_joey_my.torrent/baz (2 copies) 
+  	1878241d-ee49-446d-8cce-041c46442d94 -- [torrent]
+   	52412020-2bb3-4aa4-ae16-0da22ba48875 -- joey@darkstar:~/tmp/repo [here]
+
+  torrent: file:///home/joey/my.torrent#2
+ok
+</pre>

add working external special remote for torrents
Not IMHO good enough quality to be more than an example, but it does work!
diff --git a/doc/special_remotes/external.mdwn b/doc/special_remotes/external.mdwn
index d969996..010f2a1 100644
--- a/doc/special_remotes/external.mdwn
+++ b/doc/special_remotes/external.mdwn
@@ -14,14 +14,18 @@ It's not hard!
 * Install it in PATH.
 * When the user runs `git annex initremote foo type=external externaltype=$bar`,
   it will use your program.
+* See [[design/external_special_remote_protocol]] for what the program
+  needs to do. There's an example at the end of this page.
 * If things don't seem to work, pass `--debug` and you'll see, amoung other
   things, a transcript of git-annex's communication with your program.
 * If you build a new special remote, please add it to the list
   of [[special_remotes]].
 
+Here's an example of using an external special remote to add torrent
+support to git-annex: [[external/git-annex-remote-torrent]]
+
 Here's a simple shell script example, which can easily be adapted
 to run whatever commands you need. Or better, re-written in some better
-language of your choice. See [[design/external_special_remote_protocol]]
-for the details.
+language of your choice.
 
 [[!inline pages="special_remotes/external/example.sh" feeds=no]]
diff --git a/doc/special_remotes/external/git-annex-remote-torrent b/doc/special_remotes/external/git-annex-remote-torrent
new file mode 100755
index 0000000..4f5e62a
--- /dev/null
+++ b/doc/special_remotes/external/git-annex-remote-torrent
@@ -0,0 +1,191 @@
+#!/bin/sh
+# This is a demo git-annex external special remote program,
+# which adds basic torrent download support to git-annex.
+#
+# Uses aria2c. Also needs the original bittorrent (or bittornado) for the
+# btshowmetainfo command.
+# 
+# Install in PATH as git-annex-remote-torrent
+#
+# Enable remote by running:
+#  git annex initremote torrent type=external encryption=none externaltype=torrent
+#  git annex untrust torrent
+#
+# Copyright 2014 Joey Hess; licenced under the GNU GPL version 3 or higher.
+
+set -e
+
+# This program speaks a line-based protocol on stdin and stdout.
+# When running any commands, their stdout should be redirected to stderr
+# (or /dev/null) to avoid messing up the protocol.
+runcmd () {
+	"$@" >&2
+}
+
+# Gets a VALUE response and stores it in $RET
+getvalue () {
+	read resp
+	# Tricky POSIX shell code to split first word of the resp,
+	# preserving all other whitespace
+	case "${resp%% *}" in
+		VALUE)
+			RET="$(echo "$resp" | sed 's/^VALUE \?//')"
+		;;
+		*)
+		RET=""
+		;;
+	esac
+}
+
+# Get a list of all known torrent urls for a key,
+# storing it in a temp file.
+geturls () {
+	key="$1"
+	tmp="$2"
+
+	echo GETURLS "$key"
+	getvalue
+	while [ -n "$RET" ]; do
+		if istorrent "$RET"; then
+			echo "$RET" >> "$tmp"
+		fi
+		getvalue
+	done
+}
+
+# Does the url end in .torrent?
+# Note that we use #N on the url to indicate which file
+# from a multi-file torrent is wanted.
+istorrent () {
+	echo "$1" | egrep -q "\.torrent(#.*)?$"
+}
+
+# Download a single file from a torrent.
+#
+# Note: Does not support resuming interrupted transfers.
+# Note: Does not feed progress info back to git-annex, and since
+# the destination file is only populated at the end, git-annex will fail
+# to display a progress bar for this download.
+downloadtorrent () {
+	torrent="$1"
+	n="$2"
+	dest="$3"
+
+	tmpdir="$(mktemp -d)"
+	if ! runcmd aria2c --select-file="$n" "$torrent" -d "$tmpdir"; then
+		false
+	fi
+
+	# aria2c will create part of the directory structure
+	# contained in the torrent. It may download parts of other files
+	# in addition to the one we asked for. So, we need to find
+	# out the filename we want, and look for it.
+	wantdir="$(btshowmetainfo "$torrent" | grep "^directory name: " | sed "s/^directory name: //")"
+	wantfile="$(btshowmetainfo "$tmp" | grep '^   ' | sed 's/^   //' | head -n "$n" | tail -n 1 | sed 's/ ([0-9]*)$//')"
+	if [ -e "$tmpdir/$wantdir/$wantfile" ]; then
+		mv "$tmpdir/$wantdir/$wantfile" "$dest"
+		rm -rf "$tmpdir"
+	else
+		rm -rf "$tmpdir"
+		false
+	fi
+}
+
+# This has to come first, to get the protocol started.
+echo VERSION 1
+
+while read line; do
+	set -- $line
+	case "$1" in
+		INITREMOTE)
+			echo INITREMOTE-SUCCESS
+		;;
+		PREPARE)
+			echo PREPARE-SUCCESS
+		;;
+		CLAIMURL)
+			url="$2"
+			if istorrent "$url"; then
+				echo CLAIMURL-SUCCESS
+			else
+				echo CLAIMURL-FAILURE
+			fi
+		;;
+		CHECKURL)
+			url="$2"
+			# List contents of torrent.
+			tmp=$(mktemp)
+			if ! runcmd curl -o "$tmp" "$url"; then
+				echo CHECKURL-FAILURE
+			else
+				oldIFS="$IFS"
+			IFS="
+"
+				printf "CHECKURL-MULTI"
+				n=0
+				for l in $(btshowmetainfo "$tmp" | grep '^   ' | sed 's/^   //'); do
+					# Note that the file cannot contain spaces.
+					file="$(echo "$l" | sed 's/ ([0-9]*)$//' | sed 's/ /_/g')"
+					size="$(echo "$l" | sed 's/.* (\([0-9]*\))$/\1/')"
+					n=$(expr $n + 1)
+					printf " $url#$n $size $file"
+				done
+				printf "\n"
+				IFS="$oldIFS"
+			fi
+			rm -f "$tmp"
+		;;
+		TRANSFER)
+			key="$3"
+			file="$4"
+			case "$2" in
+				STORE)
+					runcmd echo "upload not supported"
+					echo TRANSFER-FAILURE STORE "$key"
+				;;
+				RETRIEVE)
+					urltmp=$(mktemp)
+					geturls "$key" "$urltmp"
+					url="$(head "$urltmp")" || true
+					rm -f "$urltmp"
+					if [ -z "$url" ]; then
+						echo TRANSFER-FAILURE RETRIEVE "$key" "no known torrent urls for this key"
+					else
+						tmp=$(mktemp)
+						if ! runcmd curl -o "$tmp" "$url"; then
+							echo TRANSFER-FAILURE RETRIEVE "$key" "failed downloading torrent file from $url"
+						else
+							filenum="$(echo "$url" | sed 's/.*#\(\d*\)/\1/')"
+							if downloadtorrent "$tmp" "$filenum" "$file"; then
+								echo TRANSFER-SUCCESS RETRIEVE "$key"
+							else
+								echo TRANSFER-FAILURE RETRIEVE "$key" "failed to download torrent contents from $url"
+							fi
+						fi
+						rm -f "$tmp"					
+					fi
+				;;
+			esac
+		;;

(Diff truncated)
update
diff --git a/doc/design/external_special_remote_protocol.mdwn b/doc/design/external_special_remote_protocol.mdwn
index 76d25bf..cc3bfce 100644
--- a/doc/design/external_special_remote_protocol.mdwn
+++ b/doc/design/external_special_remote_protocol.mdwn
@@ -131,10 +131,8 @@ replying with `UNSUPPORTED-REQUEST` is acceptable.
   If not, it can send `CLAIMURL-FAILURE`.
 * `CHECKURL Url`  
   Asks the remote to check if the url's content can currently be downloaded
-  (without downloading it). If the url is not accessible, send
-  `CHECKURL-FAILURE`. If the url is accessible and the size is known,
-  send the size in `CHECKURL-SIZE`. If the url is accessible, but the size
-  is unknown, send `CHECKURL-SIZEUNKOWN`.
+  (without downloading it). The remote replies with one of `CHECKURL-FAILURE`,
+  `CHECKURL-CONTENTS`, or `CHECKURL-MULTI`.
 
 More optional requests may be added, without changing the protocol version,
 so if an unknown request is seen, reply with `UNSUPPORTED-REQUEST`.

Expand checkurl to support recommended filename, and multi-file-urls
This commit was sponsored by an anonymous bitcoiner.
diff --git a/Command/AddUrl.hs b/Command/AddUrl.hs
index 76095d6..6f14ed8 100644
--- a/Command/AddUrl.hs
+++ b/Command/AddUrl.hs
@@ -25,6 +25,7 @@ import Annex.Content
 import Logs.Web
 import Types.Key
 import Types.KeySource
+import Types.UrlContents
 import Config
 import Annex.Content.Direct
 import Logs.Location
@@ -50,73 +51,70 @@ relaxedOption :: Option
 relaxedOption = flagOption [] "relaxed" "skip size check"
 
 seek :: CommandSeek
-seek ps = do
-	f <- getOptionField fileOption return
+seek us = do
+	optfile <- getOptionField fileOption return
 	relaxed <- getOptionFlag relaxedOption
-	d <- getOptionField pathdepthOption (return . maybe Nothing readish)
-	withStrings (start relaxed f d) ps
-
-start :: Bool -> Maybe FilePath -> Maybe Int -> String -> CommandStart
-start relaxed optfile pathdepth s = do
-	r <- Remote.claimingUrl s
-	if Remote.uuid r == webUUID
-		then startWeb relaxed optfile pathdepth s
-		else startRemote r relaxed optfile pathdepth s
+	pathdepth <- getOptionField pathdepthOption (return . maybe Nothing readish)
+	forM_ us $ \u -> do
+		r <- Remote.claimingUrl u
+		if Remote.uuid r == webUUID
+			then void $ commandAction $ startWeb relaxed optfile pathdepth u
+			else do
+				let handlecontents url c = case c of
+					UrlContents sz mkf -> 
+						void $ commandAction $
+							startRemote r relaxed optfile pathdepth url sz mkf
+					UrlNested l ->
+						forM_ l $ \(url', c) ->
+							handlecontents url' c
+				res <- tryNonAsync $ maybe
+					(error "unable to checkUrl")
+					(flip id u)
+					(Remote.checkUrl r)
+				case res of
+					Left e -> void $ commandAction $ do
+						showStart "addurl" u
+						warning (show e)
+						next $ next $ return False
+					Right c -> handlecontents u c
 
-startRemote :: Remote -> Bool -> Maybe FilePath -> Maybe Int -> String -> CommandStart
-startRemote r relaxed optfile pathdepth s = do
+startRemote :: Remote -> Bool -> Maybe FilePath -> Maybe Int -> String -> Maybe Integer -> (FilePath -> FilePath) -> CommandStart
+startRemote r relaxed optfile pathdepth s sz mkf = do
 	url <- case Url.parseURIRelaxed s of
 		Nothing -> error $ "bad uri " ++ s
 		Just u -> pure u
 	pathmax <- liftIO $ fileNameLengthLimit "."
-	let file = choosefile $ url2file url pathdepth pathmax
+	let file = mkf $ choosefile $ url2file url pathdepth pathmax
 	showStart "addurl" file
 	showNote $ "using " ++ Remote.name r 
-	next $ performRemote r relaxed s file
+	next $ performRemote r relaxed s file sz
   where
 	choosefile = flip fromMaybe optfile
 
-performRemote :: Remote -> Bool -> URLString -> FilePath -> CommandPerform
-performRemote r relaxed uri file = ifAnnexed file adduri geturi
+performRemote :: Remote -> Bool -> URLString -> FilePath -> Maybe Integer -> CommandPerform
+performRemote r relaxed uri file sz = ifAnnexed file adduri geturi
   where
 	loguri = setDownloader uri OtherDownloader
 	adduri = addUrlChecked relaxed loguri (Remote.uuid r) checkexistssize
-	checkexistssize key = do
-		res <- tryNonAsync $ Remote.checkUrl r uri
-		case res of
-			Left e -> do
-				warning (show e)
-				return (False, False)
-			Right Nothing ->
-				return (True, True)
-			Right (Just sz) ->
-				return (True, sz == fromMaybe sz (keySize key))
+	checkexistssize key = return $ case sz of
+		Nothing -> (True, True)
+		Just n -> (True, n == fromMaybe n (keySize key))
 	geturi = do
-		dummykey <- Backend.URL.fromUrl uri =<<
-			if relaxed
-				then return Nothing
-				else Remote.checkUrl r uri
+		urlkey <- Backend.URL.fromUrl uri sz
 		liftIO $ createDirectoryIfMissing True (parentDir file)
 		next $ ifM (Annex.getState Annex.fast <||> pure relaxed)
 			( do
-				res <- tryNonAsync $ Remote.checkUrl r uri
-				case res of
-					Left e -> do
-						warning (show e)
-						return False
-					Right size -> do
-						key <- Backend.URL.fromUrl uri size
-						cleanup (Remote.uuid r) loguri file key Nothing
-						return True
+				cleanup (Remote.uuid r) loguri file urlkey Nothing
+				return True
 			, do
-				-- Set temporary url for the dummy key
+				-- Set temporary url for the urlkey
 				-- so that the remote knows what url it
 				-- should use to download it.
-				setTempUrl dummykey uri
-				let downloader = Remote.retrieveKeyFile r dummykey (Just file)
+				setTempUrl urlkey uri
+				let downloader = Remote.retrieveKeyFile r urlkey (Just file)
 				ok <- isJust <$>
-					downloadWith downloader dummykey (Remote.uuid r) loguri file
-				removeTempUrl dummykey
+					downloadWith downloader urlkey (Remote.uuid r) loguri file
+				removeTempUrl urlkey
 				return ok
 			)
 
diff --git a/Remote/Bup.hs b/Remote/Bup.hs
index 405ce30..16f73a6 100644
--- a/Remote/Bup.hs
+++ b/Remote/Bup.hs
@@ -75,7 +75,7 @@ gen r u c gc = do
 		, mkUnavailable = return Nothing
 		, getInfo = return [("repo", buprepo)]
 		, claimUrl = Nothing
-		, checkUrl = const $ return Nothing
+		, checkUrl = Nothing
 		}
 	return $ Just $ specialRemote' specialcfg c
 		(simplyPrepare $ store this buprepo)
diff --git a/Remote/Ddar.hs b/Remote/Ddar.hs
index 1b8003d..f771930 100644
--- a/Remote/Ddar.hs
+++ b/Remote/Ddar.hs
@@ -72,7 +72,7 @@ gen r u c gc = do
 		, mkUnavailable = return Nothing
 		, getInfo = return [("repo", ddarrepo)]
 		, claimUrl = Nothing
-		, checkUrl = const $ return Nothing
+		, checkUrl = Nothing
 		}
 	ddarrepo = fromMaybe (error "missing ddarrepo") $ remoteAnnexDdarRepo gc
 	specialcfg = (specialRemoteCfg c)
diff --git a/Remote/Directory.hs b/Remote/Directory.hs
index fec40ba..b798ff0 100644
--- a/Remote/Directory.hs
+++ b/Remote/Directory.hs
@@ -70,7 +70,7 @@ gen r u c gc = do
 				gc { remoteAnnexDirectory = Just "/dev/null" },
 			getInfo = return [("directory", dir)],
 			claimUrl = Nothing,
-			checkUrl = const $ return Nothing
+			checkUrl = Nothing
 		}
   where
 	dir = fromMaybe (error "missing directory") $ remoteAnnexDirectory gc
diff --git a/Remote/External.hs b/Remote/External.hs
index 6267175..c5330f7 100644
--- a/Remote/External.hs
+++ b/Remote/External.hs
@@ -12,6 +12,7 @@ import qualified Annex
 import Common.Annex
 import Types.Remote
 import Types.CleanupActions
+import Types.UrlContents
 import qualified Git
 import Config
 import Remote.Helper.Special
@@ -71,7 +72,7 @@ gen r u c gc = do
 				gc { remoteAnnexExternalType = Just "!dne!" },
 			getInfo = return [("externaltype", externaltype)],
 			claimUrl = Just (claimurl external),
-			checkUrl = checkurl external
+			checkUrl = Just (checkurl external)
 		}
   where
 	externaltype = fromMaybe (error "missing externaltype") (remoteAnnexExternalType gc)
@@ -429,11 +430,14 @@ claimurl external url =
 		UNSUPPORTED_REQUEST -> Just $ return False
 		_ -> Nothing
 
-checkurl :: External -> URLString -> Annex (Maybe Integer)
+checkurl :: External -> URLString -> Annex UrlContents
 checkurl external url = 
 	handleRequest external (CHECKURL url) Nothing $ \req -> case req of
-		CHECKURL_SIZE sz -> Just $ return $ Just sz
-		CHECKURL_SIZEUNKNOWN -> Just $ return Nothing
+		CHECKURL_CONTENTS sz f -> Just $ return $ UrlContents sz
+			(if null f then id else const f)

(Diff truncated)
Revert "let url claims optionally include a suggested filename"
This reverts commit 85df9c30e97068fdc765c16f7f189d09c7dbad6a.
Putting filename in the claim was a bad idea.
diff --git a/Annex/URLClaim.hs b/Annex/URLClaim.hs
deleted file mode 100644
index 3acb28e..0000000
--- a/Annex/URLClaim.hs
+++ /dev/null
@@ -1,29 +0,0 @@
-{- Url claim checking.
- -
- - Copyright 2013-2014 Joey Hess <joey@kitenet.net>
- -
- - Licensed under the GNU GPL version 3 or higher.
- -}
-
-module Annex.URLClaim (
-	URLClaim(..),
-	urlClaim
-) where
-
-import Common.Annex
-import Types.URLClaim
-import Logs.Web
-import Remote
-import qualified Types.Remote as Remote
-
-urlClaim :: URLString -> Annex (Remote, URLClaim)
-urlClaim url = do
-	rs <- remoteList
-	-- The web special remote claims urls by default.
-	let web = Prelude.head $ filter (\r -> uuid r == webUUID) rs
-	fromMaybe (web, URLClaimed) <$> getM (\r -> ret r <$> checkclaim r) rs
-  where
-	checkclaim = maybe (pure Nothing) (flip id url) . Remote.claimUrl
-
-	ret _ Nothing = Nothing
-	ret r (Just c) = Just (r, c)
diff --git a/Command/AddUrl.hs b/Command/AddUrl.hs
index 27c8359..76095d6 100644
--- a/Command/AddUrl.hs
+++ b/Command/AddUrl.hs
@@ -21,7 +21,6 @@ import qualified Annex.Url as Url
 import qualified Backend.URL
 import qualified Remote
 import qualified Types.Remote as Remote
-import Annex.URLClaim
 import Annex.Content
 import Logs.Web
 import Types.Key
@@ -59,23 +58,23 @@ seek ps = do
 
 start :: Bool -> Maybe FilePath -> Maybe Int -> String -> CommandStart
 start relaxed optfile pathdepth s = do
-	(r, claim) <- urlClaim s
+	r <- Remote.claimingUrl s
 	if Remote.uuid r == webUUID
 		then startWeb relaxed optfile pathdepth s
-		else startRemote r claim relaxed optfile pathdepth s
+		else startRemote r relaxed optfile pathdepth s
 
-startRemote :: Remote -> URLClaim -> Bool -> Maybe FilePath -> Maybe Int -> String -> CommandStart
-startRemote r claim relaxed optfile pathdepth s = do
+startRemote :: Remote -> Bool -> Maybe FilePath -> Maybe Int -> String -> CommandStart
+startRemote r relaxed optfile pathdepth s = do
 	url <- case Url.parseURIRelaxed s of
 		Nothing -> error $ "bad uri " ++ s
 		Just u -> pure u
 	pathmax <- liftIO $ fileNameLengthLimit "."
-	let file = flip fromMaybe optfile $ case claim of
-		URLClaimedAs f -> f
-		URLClaimed -> url2file url pathdepth pathmax
+	let file = choosefile $ url2file url pathdepth pathmax
 	showStart "addurl" file
 	showNote $ "using " ++ Remote.name r 
 	next $ performRemote r relaxed s file
+  where
+	choosefile = flip fromMaybe optfile
 
 performRemote :: Remote -> Bool -> URLString -> FilePath -> CommandPerform
 performRemote r relaxed uri file = ifAnnexed file adduri geturi
diff --git a/Command/ReKey.hs b/Command/ReKey.hs
index a0348d8..5dadf4e 100644
--- a/Command/ReKey.hs
+++ b/Command/ReKey.hs
@@ -16,7 +16,6 @@ import qualified Command.Add
 import Logs.Web
 import Logs.Location
 import Utility.CopyFile
-import Annex.URLClaim
 import qualified Remote
 
 cmd :: [Command]
@@ -64,7 +63,7 @@ cleanup file oldkey newkey = do
 	-- the new key as well.
 	urls <- getUrls oldkey
 	forM_ urls $ \url -> do
-		r <- fst <$> urlClaim url
+		r <- Remote.claimingUrl url
 		setUrlPresent (Remote.uuid r) newkey url
 
 	-- Update symlink to use the new key.
diff --git a/Command/RmUrl.hs b/Command/RmUrl.hs
index 737c935..5700042 100644
--- a/Command/RmUrl.hs
+++ b/Command/RmUrl.hs
@@ -10,7 +10,6 @@ module Command.RmUrl where
 import Common.Annex
 import Command
 import Logs.Web
-import Annex.URLClaim
 import qualified Remote
 
 cmd :: [Command]
@@ -28,7 +27,7 @@ start (file, url) = flip whenAnnexed file $ \_ key -> do
 
 cleanup :: String -> Key -> CommandCleanup
 cleanup url key = do
-	r <- fst <$> urlClaim url
+	r <- Remote.claimingUrl url
 	let url' = if Remote.uuid r == webUUID
 		then url
 		else setDownloader url OtherDownloader
diff --git a/Command/Whereis.hs b/Command/Whereis.hs
index 5f75bad..314c204 100644
--- a/Command/Whereis.hs
+++ b/Command/Whereis.hs
@@ -14,7 +14,6 @@ import Command
 import Remote
 import Logs.Trust
 import Logs.Web
-import Annex.URLClaim
 
 cmd :: [Command]
 cmd = [noCommit $ withOptions (jsonOption : keyOptions) $
@@ -72,4 +71,4 @@ performRemote key remote = do
 			. filter (\(_, d) -> d == OtherDownloader)
 			. map getDownloader
 			<$> getUrls key
-		filterM (\u -> (==) <$> pure remote <*> (fst <$> urlClaim u)) us
+		filterM (\u -> (==) <$> pure remote <*> claimingUrl u) us
diff --git a/Remote.hs b/Remote.hs
index dd68249..65e7253 100644
--- a/Remote.hs
+++ b/Remote.hs
@@ -46,6 +46,7 @@ module Remote (
 	logStatus,
 	checkAvailable,
 	isXMPPRemote,
+	claimingUrl,
 ) where
 
 import qualified Data.Map as M
@@ -60,6 +61,7 @@ import Annex.UUID
 import Logs.UUID
 import Logs.Trust
 import Logs.Location hiding (logStatus)
+import Logs.Web
 import Remote.List
 import Config
 import Git.Types (RemoteName)
@@ -318,3 +320,12 @@ hasKey r k = either (Left  . show) Right <$> tryNonAsync (checkPresent r k)
 
 hasKeyCheap :: Remote -> Bool
 hasKeyCheap = checkPresentCheap
+
+{- The web special remote claims urls by default. -}
+claimingUrl :: URLString -> Annex Remote
+claimingUrl url = do
+	rs <- remoteList
+	let web = Prelude.head $ filter (\r -> uuid r == webUUID) rs
+	fromMaybe web <$> firstM checkclaim rs
+  where
+	checkclaim = maybe (pure False) (flip id url) . claimUrl
diff --git a/Remote/External.hs b/Remote/External.hs
index baae1ab..6267175 100644
--- a/Remote/External.hs
+++ b/Remote/External.hs
@@ -12,7 +12,6 @@ import qualified Annex
 import Common.Annex
 import Types.Remote
 import Types.CleanupActions
-import Types.URLClaim
 import qualified Git
 import Config
 import Remote.Helper.Special
@@ -422,13 +421,12 @@ getAvailability external r gc = maybe query return (remoteAnnexAvailability gc)
 		setRemoteAvailability r avail
 		return avail
 
-claimurl :: External -> URLString -> Annex (Maybe URLClaim)
+claimurl :: External -> URLString -> Annex Bool
 claimurl external url =
 	handleRequest external (CLAIMURL url) Nothing $ \req -> case req of
-		CLAIMURL_SUCCESS -> Just $ return $ Just URLClaimed
-		(CLAIMURL_AS f) -> Just $ return $ Just $ URLClaimedAs f
-		CLAIMURL_FAILURE -> Just $ return Nothing
-		UNSUPPORTED_REQUEST -> Just $ return Nothing
+		CLAIMURL_SUCCESS -> Just $ return True
+		CLAIMURL_FAILURE -> Just $ return False
+		UNSUPPORTED_REQUEST -> Just $ return False
 		_ -> Nothing
 

(Diff truncated)
Added a comment: Trying to make git gc pass with a huge swap
diff --git a/doc/forum/repair_stuck_on_ls-tree_command/comment_6_061770159851c0f06a962937dff035b9._comment b/doc/forum/repair_stuck_on_ls-tree_command/comment_6_061770159851c0f06a962937dff035b9._comment
new file mode 100644
index 0000000..5f73e65
--- /dev/null
+++ b/doc/forum/repair_stuck_on_ls-tree_command/comment_6_061770159851c0f06a962937dff035b9._comment
@@ -0,0 +1,30 @@
+[[!comment format=mdwn
+ username="vho"
+ subject="Trying to make git gc pass with a huge swap"
+ date="2014-12-11T16:52:16Z"
+ content="""
+I have created a 100 GB swap file and ran git gc again.
+It ate my 8 GB of RAM + 38 GB of swap. It ended successfully:
+
+    vincent@berlioz:~/photos2$ time git gc --aggressive
+    Décompte des objets: 2263783, fait.
+    Compression des objets: 100% (2253014/2253014), fait.
+    Écriture des objets: 100% (2263783/2263783), fait.
+    Total 2263783 (delta 1567807), reused 284965 (delta 0)
+    Suppression des objets dupliqués: 100% (256/256), fait.
+    Vérification de la connectivité: 2263783, fait.
+    
+    real	110m18.565s
+    user	37m38.225s
+    sys	3m23.457s
+
+Yet the following git annex sync didn't do better than previous ones and took 285 minutes to complete.
+
+I am going to check my hard disk drive to see if there aren't any third party problem.
+
+    vincent@berlioz:~/photos2$ du -sh --exclude=.git/annex
+    510M	.
+
+Yet it shrank by 300 MB.
+
+"""]]

let url claims optionally include a suggested filename
diff --git a/Annex/URLClaim.hs b/Annex/URLClaim.hs
new file mode 100644
index 0000000..3acb28e
--- /dev/null
+++ b/Annex/URLClaim.hs
@@ -0,0 +1,29 @@
+{- Url claim checking.
+ -
+ - Copyright 2013-2014 Joey Hess <joey@kitenet.net>
+ -
+ - Licensed under the GNU GPL version 3 or higher.
+ -}
+
+module Annex.URLClaim (
+	URLClaim(..),
+	urlClaim
+) where
+
+import Common.Annex
+import Types.URLClaim
+import Logs.Web
+import Remote
+import qualified Types.Remote as Remote
+
+urlClaim :: URLString -> Annex (Remote, URLClaim)
+urlClaim url = do
+	rs <- remoteList
+	-- The web special remote claims urls by default.
+	let web = Prelude.head $ filter (\r -> uuid r == webUUID) rs
+	fromMaybe (web, URLClaimed) <$> getM (\r -> ret r <$> checkclaim r) rs
+  where
+	checkclaim = maybe (pure Nothing) (flip id url) . Remote.claimUrl
+
+	ret _ Nothing = Nothing
+	ret r (Just c) = Just (r, c)
diff --git a/Command/AddUrl.hs b/Command/AddUrl.hs
index 76095d6..27c8359 100644
--- a/Command/AddUrl.hs
+++ b/Command/AddUrl.hs
@@ -21,6 +21,7 @@ import qualified Annex.Url as Url
 import qualified Backend.URL
 import qualified Remote
 import qualified Types.Remote as Remote
+import Annex.URLClaim
 import Annex.Content
 import Logs.Web
 import Types.Key
@@ -58,23 +59,23 @@ seek ps = do
 
 start :: Bool -> Maybe FilePath -> Maybe Int -> String -> CommandStart
 start relaxed optfile pathdepth s = do
-	r <- Remote.claimingUrl s
+	(r, claim) <- urlClaim s
 	if Remote.uuid r == webUUID
 		then startWeb relaxed optfile pathdepth s
-		else startRemote r relaxed optfile pathdepth s
+		else startRemote r claim relaxed optfile pathdepth s
 
-startRemote :: Remote -> Bool -> Maybe FilePath -> Maybe Int -> String -> CommandStart
-startRemote r relaxed optfile pathdepth s = do
+startRemote :: Remote -> URLClaim -> Bool -> Maybe FilePath -> Maybe Int -> String -> CommandStart
+startRemote r claim relaxed optfile pathdepth s = do
 	url <- case Url.parseURIRelaxed s of
 		Nothing -> error $ "bad uri " ++ s
 		Just u -> pure u
 	pathmax <- liftIO $ fileNameLengthLimit "."
-	let file = choosefile $ url2file url pathdepth pathmax
+	let file = flip fromMaybe optfile $ case claim of
+		URLClaimedAs f -> f
+		URLClaimed -> url2file url pathdepth pathmax
 	showStart "addurl" file
 	showNote $ "using " ++ Remote.name r 
 	next $ performRemote r relaxed s file
-  where
-	choosefile = flip fromMaybe optfile
 
 performRemote :: Remote -> Bool -> URLString -> FilePath -> CommandPerform
 performRemote r relaxed uri file = ifAnnexed file adduri geturi
diff --git a/Command/ReKey.hs b/Command/ReKey.hs
index 5dadf4e..a0348d8 100644
--- a/Command/ReKey.hs
+++ b/Command/ReKey.hs
@@ -16,6 +16,7 @@ import qualified Command.Add
 import Logs.Web
 import Logs.Location
 import Utility.CopyFile
+import Annex.URLClaim
 import qualified Remote
 
 cmd :: [Command]
@@ -63,7 +64,7 @@ cleanup file oldkey newkey = do
 	-- the new key as well.
 	urls <- getUrls oldkey
 	forM_ urls $ \url -> do
-		r <- Remote.claimingUrl url
+		r <- fst <$> urlClaim url
 		setUrlPresent (Remote.uuid r) newkey url
 
 	-- Update symlink to use the new key.
diff --git a/Command/RmUrl.hs b/Command/RmUrl.hs
index 5700042..737c935 100644
--- a/Command/RmUrl.hs
+++ b/Command/RmUrl.hs
@@ -10,6 +10,7 @@ module Command.RmUrl where
 import Common.Annex
 import Command
 import Logs.Web
+import Annex.URLClaim
 import qualified Remote
 
 cmd :: [Command]
@@ -27,7 +28,7 @@ start (file, url) = flip whenAnnexed file $ \_ key -> do
 
 cleanup :: String -> Key -> CommandCleanup
 cleanup url key = do
-	r <- Remote.claimingUrl url
+	r <- fst <$> urlClaim url
 	let url' = if Remote.uuid r == webUUID
 		then url
 		else setDownloader url OtherDownloader
diff --git a/Command/Whereis.hs b/Command/Whereis.hs
index 314c204..5f75bad 100644
--- a/Command/Whereis.hs
+++ b/Command/Whereis.hs
@@ -14,6 +14,7 @@ import Command
 import Remote
 import Logs.Trust
 import Logs.Web
+import Annex.URLClaim
 
 cmd :: [Command]
 cmd = [noCommit $ withOptions (jsonOption : keyOptions) $
@@ -71,4 +72,4 @@ performRemote key remote = do
 			. filter (\(_, d) -> d == OtherDownloader)
 			. map getDownloader
 			<$> getUrls key
-		filterM (\u -> (==) <$> pure remote <*> claimingUrl u) us
+		filterM (\u -> (==) <$> pure remote <*> (fst <$> urlClaim u)) us
diff --git a/Remote.hs b/Remote.hs
index 65e7253..dd68249 100644
--- a/Remote.hs
+++ b/Remote.hs
@@ -46,7 +46,6 @@ module Remote (
 	logStatus,
 	checkAvailable,
 	isXMPPRemote,
-	claimingUrl,
 ) where
 
 import qualified Data.Map as M
@@ -61,7 +60,6 @@ import Annex.UUID
 import Logs.UUID
 import Logs.Trust
 import Logs.Location hiding (logStatus)
-import Logs.Web
 import Remote.List
 import Config
 import Git.Types (RemoteName)
@@ -320,12 +318,3 @@ hasKey r k = either (Left  . show) Right <$> tryNonAsync (checkPresent r k)
 
 hasKeyCheap :: Remote -> Bool
 hasKeyCheap = checkPresentCheap
-
-{- The web special remote claims urls by default. -}
-claimingUrl :: URLString -> Annex Remote
-claimingUrl url = do
-	rs <- remoteList
-	let web = Prelude.head $ filter (\r -> uuid r == webUUID) rs
-	fromMaybe web <$> firstM checkclaim rs
-  where
-	checkclaim = maybe (pure False) (flip id url) . claimUrl
diff --git a/Remote/External.hs b/Remote/External.hs
index 6267175..baae1ab 100644
--- a/Remote/External.hs
+++ b/Remote/External.hs
@@ -12,6 +12,7 @@ import qualified Annex
 import Common.Annex
 import Types.Remote
 import Types.CleanupActions
+import Types.URLClaim
 import qualified Git
 import Config
 import Remote.Helper.Special
@@ -421,12 +422,13 @@ getAvailability external r gc = maybe query return (remoteAnnexAvailability gc)
 		setRemoteAvailability r avail
 		return avail
 
-claimurl :: External -> URLString -> Annex Bool
+claimurl :: External -> URLString -> Annex (Maybe URLClaim)
 claimurl external url =
 	handleRequest external (CLAIMURL url) Nothing $ \req -> case req of
-		CLAIMURL_SUCCESS -> Just $ return True
-		CLAIMURL_FAILURE -> Just $ return False
-		UNSUPPORTED_REQUEST -> Just $ return False
+		CLAIMURL_SUCCESS -> Just $ return $ Just URLClaimed
+		(CLAIMURL_AS f) -> Just $ return $ Just $ URLClaimedAs f
+		CLAIMURL_FAILURE -> Just $ return Nothing
+		UNSUPPORTED_REQUEST -> Just $ return Nothing
 		_ -> Nothing
 

(Diff truncated)
Added a comment
diff --git a/doc/bugs/git_annex_assistant_leaks_a_lot_of_memory/comment_4_d597e15e9693ce6e8c1ca80cb225b044._comment b/doc/bugs/git_annex_assistant_leaks_a_lot_of_memory/comment_4_d597e15e9693ce6e8c1ca80cb225b044._comment
new file mode 100644
index 0000000..ccd3530
--- /dev/null
+++ b/doc/bugs/git_annex_assistant_leaks_a_lot_of_memory/comment_4_d597e15e9693ce6e8c1ca80cb225b044._comment
@@ -0,0 +1,21 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawntodrSgODU27WUCyN2PV7TC14YMkyaoxQ"
+ nickname="Dennis"
+ subject="comment 4"
+ date="2014-12-11T10:58:21Z"
+ content="""
+I suffer from the same issue.
+
+In ~/.profile I put the line `git-annex assistant --quiet --autostart --startdelay=5s > /dev/null &`.
+
+Htop shows:
+
+    26291 xxx      20   0 6584M 3123M  9864 S  0.0 81.6 29h29:17 git-annex assistant --startdelay=5s
+    26292 xxx      20   0 6584M 3123M  9864 S  0.0 81.6 12h38:32 git-annex assistant --startdelay=5s
+    26293 xxx      20   0 6584M 3123M  9864 S  0.0 81.6 21h45:49 git-annex assistant --startdelay=5s
+    26321 xxx      20   0 6584M 3123M  9864 D  0.0 81.6 18:34.12 git-annex assistant --startdelay=5s
+    21019 xxx      20   0 6584M 3123M  9864 S  0.0 81.6  0:00.08 git-annex assistant --startdelay=5s
+    21908 xxx      20   0 6584M 3123M  9864 S  0.0 81.6  0:00.00 git-annex assistant --startdelay=5s
+    26290 xxx      20   0 6584M 3123M  9864 S  0.0 81.6 64h33:50 git-annex assistant --startdelay=5s
+
+"""]]

add Baobáxia
diff --git a/doc/related_software.mdwn b/doc/related_software.mdwn
index 1e5c0f6..f2b8e60 100644
--- a/doc/related_software.mdwn
+++ b/doc/related_software.mdwn
@@ -15,3 +15,7 @@ designed to interoperate with it.
   [an extension](https://github.com/magit/magit-annex) for git annex.
 * [DataLad](http://datalad.org/) uses git-annex to provide access to
   scientific data available from various sources.
+* The [Baobáxia](https://github.com/RedeMocambos/baobaxia) project
+  built by the Brazilian [Mocambos network](http://www.mocambos.net/)
+  is [using git-annex to connect isolated communities](http://www.modspil.dk/itpolitik/baob_xia.html).
+  Repositories sync over satellite internet and/or sneakernet.

Added a comment: I think I got it working, however, same setup works different on fat and exfat
diff --git a/doc/forum/Bare_repo_on_USB_drive_not_providing_files/comment_4_3bbce5d12ece481b669be10ef9a70f40._comment b/doc/forum/Bare_repo_on_USB_drive_not_providing_files/comment_4_3bbce5d12ece481b669be10ef9a70f40._comment
new file mode 100644
index 0000000..e70066b
--- /dev/null
+++ b/doc/forum/Bare_repo_on_USB_drive_not_providing_files/comment_4_3bbce5d12ece481b669be10ef9a70f40._comment
@@ -0,0 +1,28 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawmTNrhkVQ26GBLaLD5-zNuEiR8syTj4mI8"
+ nickname="Juan"
+ subject="I think I got it working, however, same setup works different on fat and exfat"
+ date="2014-12-09T15:04:37Z"
+ content="""
+After struggling a lot, I tried to do the same but with an fat32 drive. Seems that some of the problems I'm experiencing are due to some (linux) filesystem functions working different with fat and exfat.
+Just to get this right, using repos on USB drive formatted as fat32 works perfectly under linux and mac. When I go to the exfat drive, linux fails, and osx keeps working.
+Are you able to test it?
+Steps to reproduce it: 
+1)make a test repo on linux: cd /tmp; mkdir test_repo; cd test repo; git init; git annex init \"test repo on linux box\" 
+2) go to exfat drive and clone it: git clone /tmp/rest_repo
+
+> git clone /home/juan/juanc/tmp/prueba_git 
+Cloning into 'prueba_git'... 
+error: chmod on /media/juan/USB_LIVE/tmp/test1/prueba_git/.git/config.lock failed: Function not implemented 
+error: chmod on /media/juan/USB_LIVE/tmp/test1/prueba_git/.git/config.lock failed: Function not implemented 
+error: chmod on /media/juan/USB_LIVE/tmp/test1/prueba_git/.git/config.lock failed: Function not implemented 
+error: chmod on /media/juan/USB_LIVE/tmp/test1/prueba_git/.git/config.lock failed: Function not implemented 
+error: chmod on /media/juan/USB_LIVE/tmp/test1/prueba_git/.git/config.lock failed: Function not implemented  
+error: chmod on /media/juan/USB_LIVE/tmp/test1/prueba_git/.git/config.lock failed: Function not implemented 
+fatal: 'origin' does not appear to be a git repository 
+fatal: Could not read from remote repository. 
+ 
+Please make sure you have the correct access rights 
+and the repository exists. 
+
+"""]]

Added a comment: Be eager for the TagFS features on git-annex.
diff --git a/doc/forum/git-annex_and_tagfs/comment_2_8769d6e57061023c0828af3e38faafba._comment b/doc/forum/git-annex_and_tagfs/comment_2_8769d6e57061023c0828af3e38faafba._comment
new file mode 100644
index 0000000..2bb4ec2
--- /dev/null
+++ b/doc/forum/git-annex_and_tagfs/comment_2_8769d6e57061023c0828af3e38faafba._comment
@@ -0,0 +1,7 @@
+[[!comment format=mdwn
+ username="exaos"
+ subject="Be eager for the TagFS features on git-annex."
+ date="2014-12-09T01:15:23Z"
+ content="""
+Cann't wait to see these implemented. But I don't know programming. :-(
+"""]]

typo
diff --git a/doc/design/external_special_remote_protocol.mdwn b/doc/design/external_special_remote_protocol.mdwn
index 6c06fd9..072c5a1 100644
--- a/doc/design/external_special_remote_protocol.mdwn
+++ b/doc/design/external_special_remote_protocol.mdwn
@@ -134,7 +134,7 @@ replying with `UNSUPPORTED-REQUEST` is acceptable.
   (without downloading it). If the url is not accessible, send
   `CHECKURL-FAILURE`. If the url is accessible and the size is known,
   send the size in `CHECKURL-SIZE`. If the url is accessible, but the size
-  is unknown, send `CHECKURL-SIZEUNOWN`.
+  is unknown, send `CHECKURL-SIZEUNKOWN`.
 
 More optional requests may be added, without changing the protocol version,
 so if an unknown request is seen, reply with `UNSUPPORTED-REQUEST`.

Urls can now be claimed by remotes. This will allow creating, for example, a external special remote that handles magnet: and *.torrent urls.
diff --git a/Annex.hs b/Annex.hs
index a04bfd1..82a378f 100644
--- a/Annex.hs
+++ b/Annex.hs
@@ -63,6 +63,7 @@ import Types.CleanupActions
 import Utility.Quvi (QuviVersion)
 #endif
 import Utility.InodeCache
+import Utility.Url
 
 import "mtl" Control.Monad.Reader
 import Control.Concurrent
@@ -128,6 +129,7 @@ data AnnexState = AnnexState
 	, useragent :: Maybe String
 	, errcounter :: Integer
 	, unusedkeys :: Maybe (S.Set Key)
+	, tempurls :: M.Map Key URLString
 #ifdef WITH_QUVI
 	, quviversion :: Maybe QuviVersion
 #endif
@@ -173,6 +175,7 @@ newState c r = AnnexState
 	, useragent = Nothing
 	, errcounter = 0
 	, unusedkeys = Nothing
+	, tempurls = M.empty
 #ifdef WITH_QUVI
 	, quviversion = Nothing
 #endif
diff --git a/Assistant/Upgrade.hs b/Assistant/Upgrade.hs
index a05d5e3..c92cc30 100644
--- a/Assistant/Upgrade.hs
+++ b/Assistant/Upgrade.hs
@@ -80,7 +80,7 @@ startDistributionDownload d = go =<< liftIO . newVersionLocation d =<< liftIO ol
   where
 	go Nothing = debug ["Skipping redundant upgrade"]
 	go (Just dest) = do
-		liftAnnex $ setUrlPresent k u
+		liftAnnex $ setUrlPresent webUUID k u
 		hook <- asIO1 $ distributionDownloadComplete d dest cleanup
 		modifyDaemonStatus_ $ \s -> s
 			{ transferHook = M.insert k hook (transferHook s) }
@@ -97,7 +97,7 @@ startDistributionDownload d = go =<< liftIO . newVersionLocation d =<< liftIO ol
 		}
 	cleanup = liftAnnex $ do
 		lockContent k removeAnnex
-		setUrlMissing k u
+		setUrlMissing webUUID k u
 		logStatus k InfoMissing
 
 {- Called once the download is done.
diff --git a/Command/AddUrl.hs b/Command/AddUrl.hs
index 81da676..76095d6 100644
--- a/Command/AddUrl.hs
+++ b/Command/AddUrl.hs
@@ -1,6 +1,6 @@
 {- git-annex command
  -
- - Copyright 2011-2013 Joey Hess <joey@kitenet.net>
+ - Copyright 2011-2014 Joey Hess <joey@kitenet.net>
  -
  - Licensed under the GNU GPL version 3 or higher.
  -}
@@ -19,6 +19,8 @@ import qualified Annex
 import qualified Annex.Queue
 import qualified Annex.Url as Url
 import qualified Backend.URL
+import qualified Remote
+import qualified Types.Remote as Remote
 import Annex.Content
 import Logs.Web
 import Types.Key
@@ -26,6 +28,7 @@ import Types.KeySource
 import Config
 import Annex.Content.Direct
 import Logs.Location
+import Utility.Metered
 import qualified Annex.Transfer as Transfer
 #ifdef WITH_QUVI
 import Annex.Quvi
@@ -54,7 +57,71 @@ seek ps = do
 	withStrings (start relaxed f d) ps
 
 start :: Bool -> Maybe FilePath -> Maybe Int -> String -> CommandStart
-start relaxed optfile pathdepth s = go $ fromMaybe bad $ parseURI s
+start relaxed optfile pathdepth s = do
+	r <- Remote.claimingUrl s
+	if Remote.uuid r == webUUID
+		then startWeb relaxed optfile pathdepth s
+		else startRemote r relaxed optfile pathdepth s
+
+startRemote :: Remote -> Bool -> Maybe FilePath -> Maybe Int -> String -> CommandStart
+startRemote r relaxed optfile pathdepth s = do
+	url <- case Url.parseURIRelaxed s of
+		Nothing -> error $ "bad uri " ++ s
+		Just u -> pure u
+	pathmax <- liftIO $ fileNameLengthLimit "."
+	let file = choosefile $ url2file url pathdepth pathmax
+	showStart "addurl" file
+	showNote $ "using " ++ Remote.name r 
+	next $ performRemote r relaxed s file
+  where
+	choosefile = flip fromMaybe optfile
+
+performRemote :: Remote -> Bool -> URLString -> FilePath -> CommandPerform
+performRemote r relaxed uri file = ifAnnexed file adduri geturi
+  where
+	loguri = setDownloader uri OtherDownloader
+	adduri = addUrlChecked relaxed loguri (Remote.uuid r) checkexistssize
+	checkexistssize key = do
+		res <- tryNonAsync $ Remote.checkUrl r uri
+		case res of
+			Left e -> do
+				warning (show e)
+				return (False, False)
+			Right Nothing ->
+				return (True, True)
+			Right (Just sz) ->
+				return (True, sz == fromMaybe sz (keySize key))
+	geturi = do
+		dummykey <- Backend.URL.fromUrl uri =<<
+			if relaxed
+				then return Nothing
+				else Remote.checkUrl r uri
+		liftIO $ createDirectoryIfMissing True (parentDir file)
+		next $ ifM (Annex.getState Annex.fast <||> pure relaxed)
+			( do
+				res <- tryNonAsync $ Remote.checkUrl r uri
+				case res of
+					Left e -> do
+						warning (show e)
+						return False
+					Right size -> do
+						key <- Backend.URL.fromUrl uri size
+						cleanup (Remote.uuid r) loguri file key Nothing
+						return True
+			, do
+				-- Set temporary url for the dummy key
+				-- so that the remote knows what url it
+				-- should use to download it.
+				setTempUrl dummykey uri
+				let downloader = Remote.retrieveKeyFile r dummykey (Just file)
+				ok <- isJust <$>
+					downloadWith downloader dummykey (Remote.uuid r) loguri file
+				removeTempUrl dummykey
+				return ok
+			)
+
+startWeb :: Bool -> Maybe FilePath -> Maybe Int -> String -> CommandStart
+startWeb relaxed optfile pathdepth s = go $ fromMaybe bad $ parseURI s
   where
 	(s', downloader) = getDownloader s
 	bad = fromMaybe (error $ "bad url " ++ s') $
@@ -62,7 +129,7 @@ start relaxed optfile pathdepth s = go $ fromMaybe bad $ parseURI s
 	choosefile = flip fromMaybe optfile
 	go url = case downloader of
 		QuviDownloader -> usequvi
-		DefaultDownloader -> 
+		_ -> 
 #ifdef WITH_QUVI
 			ifM (quviSupported s')
 				( usequvi
@@ -75,7 +142,7 @@ start relaxed optfile pathdepth s = go $ fromMaybe bad $ parseURI s
 		pathmax <- liftIO $ fileNameLengthLimit "."
 		let file = choosefile $ url2file url pathdepth pathmax
 		showStart "addurl" file
-		next $ perform relaxed s' file
+		next $ performWeb relaxed s' file
 #ifdef WITH_QUVI
 	badquvi = error $ "quvi does not know how to download url " ++ s'
 	usequvi = do
@@ -96,7 +163,9 @@ performQuvi :: Bool -> URLString -> URLString -> FilePath -> CommandPerform
 performQuvi relaxed pageurl videourl file = ifAnnexed file addurl geturl
   where
 	quviurl = setDownloader pageurl QuviDownloader
-	addurl key = next $ cleanup quviurl file key Nothing
+	addurl key = next $ do
+		cleanup webUUID quviurl file key Nothing
+		return True
 	geturl = next $ isJust <$> addUrlFileQuvi relaxed quviurl videourl file
 #endif
 
@@ -106,7 +175,7 @@ addUrlFileQuvi relaxed quviurl videourl file = do
 	key <- Backend.URL.fromUrl quviurl Nothing
 	ifM (pure relaxed <||> Annex.getState Annex.fast)
 		( do
-			cleanup' quviurl file key Nothing
+			cleanup webUUID quviurl file key Nothing
 			return (Just key)
 		, do
 			{- Get the size, and use that to check
@@ -124,55 +193,65 @@ addUrlFileQuvi relaxed quviurl videourl file = do
 						downloadUrl [videourl] tmp
 				if ok
 					then do
-						cleanup' quviurl file key (Just tmp)
+						cleanup webUUID quviurl file key (Just tmp)
 						return (Just key)
 					else return Nothing
 		)
 #endif

(Diff truncated)
Added a comment: URL backend file paths hit the 260 character file path limit on Windows
diff --git a/doc/tips/using_the_web_as_a_special_remote/comment_10_14fb0b1ee50136e0f78ee2b2a6871467._comment b/doc/tips/using_the_web_as_a_special_remote/comment_10_14fb0b1ee50136e0f78ee2b2a6871467._comment
new file mode 100644
index 0000000..0d1790c
--- /dev/null
+++ b/doc/tips/using_the_web_as_a_special_remote/comment_10_14fb0b1ee50136e0f78ee2b2a6871467._comment
@@ -0,0 +1,9 @@
+[[!comment format=mdwn
+ username="edward"
+ subject="URL backend file paths hit the 260 character file path limit on Windows"
+ date="2014-12-08T19:13:39Z"
+ content="""
+It isn't possible to checkout a git annex repository on Windows that includes quvi videos because the file path is often greater than 260 characters.
+
+See [[bugs/\"git-annex: direct: 1 failed\" on Windows]].
+"""]]

implement CLAIMURL for external special remote
diff --git a/Remote/External.hs b/Remote/External.hs
index a852656..97aa247 100644
--- a/Remote/External.hs
+++ b/Remote/External.hs
@@ -70,7 +70,7 @@ gen r u c gc = do
 			mkUnavailable = gen r u c $
 				gc { remoteAnnexExternalType = Just "!dne!" },
 			getInfo = return [("externaltype", externaltype)],
-			claimUrl = Nothing
+			claimUrl = Just (claimurl external)
 		}
   where
 	externaltype = fromMaybe (error "missing externaltype") (remoteAnnexExternalType gc)
@@ -416,3 +416,12 @@ getAvailability external r gc = maybe query return (remoteAnnexAvailability gc)
 			_ -> Nothing
 		setRemoteAvailability r avail
 		return avail
+
+claimurl :: External -> URLString -> Annex Bool
+claimurl external url =
+	handleRequest external (CLAIMURL url) Nothing $ \req -> case req of
+		CLAIMURL_SUCCESS -> Just $ return True
+		CLAIMURL_FAILURE -> Just $ return False
+		UNSUPPORTED_REQUEST -> Just $ return False
+		_ -> Nothing
+
diff --git a/Remote/External/Types.hs b/Remote/External/Types.hs
index cdcb657..2fc29e5 100644
--- a/Remote/External/Types.hs
+++ b/Remote/External/Types.hs
@@ -39,6 +39,7 @@ import Logs.Transfer (Direction(..))
 import Config.Cost (Cost)
 import Types.Remote (RemoteConfig)
 import Types.Availability (Availability(..))
+import Utility.Url (URLString)
 import qualified Utility.SimpleProtocol as Proto
 
 import Control.Concurrent.STM
@@ -90,6 +91,7 @@ data Request
 	| INITREMOTE
 	| GETCOST
 	| GETAVAILABILITY
+	| CLAIMURL URLString
 	| TRANSFER Direction Key FilePath
 	| CHECKPRESENT Key
 	| REMOVE Key
@@ -106,6 +108,7 @@ instance Proto.Sendable Request where
 	formatMessage INITREMOTE = ["INITREMOTE"]
 	formatMessage GETCOST = ["GETCOST"]
 	formatMessage GETAVAILABILITY = ["GETAVAILABILITY"]
+	formatMessage (CLAIMURL url) = [ "CLAIMURL", Proto.serialize url ]
 	formatMessage (TRANSFER direction key file) =
 		[ "TRANSFER"
 		, Proto.serialize direction
@@ -130,6 +133,8 @@ data Response
 	| AVAILABILITY Availability
 	| INITREMOTE_SUCCESS
 	| INITREMOTE_FAILURE ErrorMsg
+	| CLAIMURL_SUCCESS
+	| CLAIMURL_FAILURE
 	| UNSUPPORTED_REQUEST
 	deriving (Show)
 
@@ -147,6 +152,8 @@ instance Proto.Receivable Response where
 	parseCommand "AVAILABILITY" = Proto.parse1 AVAILABILITY
 	parseCommand "INITREMOTE-SUCCESS" = Proto.parse0 INITREMOTE_SUCCESS
 	parseCommand "INITREMOTE-FAILURE" = Proto.parse1 INITREMOTE_FAILURE
+	parseCommand "CLAIMURL-SUCCESS" = Proto.parse0 CLAIMURL_SUCCESS
+	parseCommand "CLAIMURL-FAILURE" = Proto.parse0 CLAIMURL_FAILURE
 	parseCommand "UNSUPPORTED-REQUEST" = Proto.parse0 UNSUPPORTED_REQUEST
 	parseCommand _ = Proto.parseFail
 
@@ -165,8 +172,8 @@ data RemoteRequest
 	| GETWANTED
 	| SETSTATE Key String
 	| GETSTATE Key
-	| SETURLPRESENT Key String
-	| SETURLMISSING Key String
+	| SETURLPRESENT Key URLString
+	| SETURLMISSING Key URLString
 	| GETURLS Key String
 	| DEBUG String
 	deriving (Show)
diff --git a/Types/Remote.hs b/Types/Remote.hs
index 46a0648..3f71e1f 100644
--- a/Types/Remote.hs
+++ b/Types/Remote.hs
@@ -103,7 +103,7 @@ data RemoteA a = Remote {
 	-- Information about the remote, for git annex info to display.
 	getInfo :: a [(String, String)],
 	-- Some remotes can download from an url (or uri).
-	claimUrl :: Maybe (URLString -> IO Bool)
+	claimUrl :: Maybe (URLString -> a Bool)
 }
 
 instance Show (RemoteA a) where
diff --git a/doc/design/external_special_remote_protocol.mdwn b/doc/design/external_special_remote_protocol.mdwn
index 0053309..332cc37 100644
--- a/doc/design/external_special_remote_protocol.mdwn
+++ b/doc/design/external_special_remote_protocol.mdwn
@@ -125,6 +125,10 @@ replying with `UNSUPPORTED-REQUEST` is acceptable.
   If the remote replies with `UNSUPPORTED-REQUEST`, its availability
   is assumed to be global. So, only remotes that are only reachable
   locally need to worry about implementing this.
+* `CLAIMURL Value`  
+  Asks the remote if it wishes to claim responsibility for downloading
+  an url. If so, the remote should send back an `CLAIMURL-SUCCESS` reply.
+  If not, it can send `CLAIMURL-FAILURE`.
 
 More optional requests may be added, without changing the protocol version,
 so if an unknown request is seen, reply with `UNSUPPORTED-REQUEST`.
@@ -167,6 +171,10 @@ while it's handling a request.
   Indicates the INITREMOTE succeeded and the remote is ready to use.
 * `INITREMOTE-FAILURE ErrorMsg`  
   Indicates that INITREMOTE failed.
+* `CLAIMURL-SUCCESS`  
+  Indicates that the CLAIMURL url will be handled by this remote.
+* `CLAIMURL-FAILURE`  
+  Indicates that the CLAIMURL url wil not be handled by this remote.
 * `UNSUPPORTED-REQUEST`  
   Indicates that the special remote does not know how to handle a request.
 
diff --git a/doc/todo/extensible_addurl.mdwn b/doc/todo/extensible_addurl.mdwn
index 0db4085..e9a8d07 100644
--- a/doc/todo/extensible_addurl.mdwn
+++ b/doc/todo/extensible_addurl.mdwn
@@ -22,7 +22,7 @@ both available from CERN and from a torrent, for example.
 
 Solution: Add a new method to remotes:
 
-	claimUrl :: Maybe (URLString -> IO Bool)
+	claimUrl :: Maybe (URLString -> Annex Bool)
 
 Remotes that implement this method (including special remotes) will
 be queried when such an uri is added, to see which claims it. Once the

add stub claimUrl
diff --git a/Logs/Web.hs b/Logs/Web.hs
index f31215a..19a3084 100644
--- a/Logs/Web.hs
+++ b/Logs/Web.hs
@@ -29,8 +29,7 @@ import qualified Annex.Branch
 import Annex.CatFile
 import qualified Git
 import qualified Git.LsFiles
-
-type URLString = String
+import Utility.Url
 
 -- Dummy uuid for the whole web. Do not alter.
 webUUID :: UUID
diff --git a/Remote/Bup.hs b/Remote/Bup.hs
index 4f2ddf3..8744aa3 100644
--- a/Remote/Bup.hs
+++ b/Remote/Bup.hs
@@ -74,6 +74,7 @@ gen r u c gc = do
 		, readonly = False
 		, mkUnavailable = return Nothing
 		, getInfo = return [("repo", buprepo)]
+		, claimUrl = Nothing
 		}
 	return $ Just $ specialRemote' specialcfg c
 		(simplyPrepare $ store this buprepo)
diff --git a/Remote/Ddar.hs b/Remote/Ddar.hs
index d73919b..a57f5f6 100644
--- a/Remote/Ddar.hs
+++ b/Remote/Ddar.hs
@@ -71,6 +71,7 @@ gen r u c gc = do
 		, readonly = False
 		, mkUnavailable = return Nothing
 		, getInfo = return [("repo", ddarrepo)]
+		, claimUrl = Nothing
 		}
 	ddarrepo = fromMaybe (error "missing ddarrepo") $ remoteAnnexDdarRepo gc
 	specialcfg = (specialRemoteCfg c)
diff --git a/Remote/Directory.hs b/Remote/Directory.hs
index 2e9e013..d83ab2d 100644
--- a/Remote/Directory.hs
+++ b/Remote/Directory.hs
@@ -68,7 +68,8 @@ gen r u c gc = do
 			remotetype = remote,
 			mkUnavailable = gen r u c $
 				gc { remoteAnnexDirectory = Just "/dev/null" },
-			getInfo = return [("directory", dir)]
+			getInfo = return [("directory", dir)],
+			claimUrl = Nothing
 		}
   where
 	dir = fromMaybe (error "missing directory") $ remoteAnnexDirectory gc
diff --git a/Remote/External.hs b/Remote/External.hs
index dca273d..a852656 100644
--- a/Remote/External.hs
+++ b/Remote/External.hs
@@ -68,8 +68,9 @@ gen r u c gc = do
 			availability = avail,
 			remotetype = remote,
 			mkUnavailable = gen r u c $
-				gc { remoteAnnexExternalType = Just "!dne!" }
-			, getInfo = return [("externaltype", externaltype)]
+				gc { remoteAnnexExternalType = Just "!dne!" },
+			getInfo = return [("externaltype", externaltype)],
+			claimUrl = Nothing
 		}
   where
 	externaltype = fromMaybe (error "missing externaltype") (remoteAnnexExternalType gc)
diff --git a/Remote/GCrypt.hs b/Remote/GCrypt.hs
index 9aa70d5..43e3d8b 100644
--- a/Remote/GCrypt.hs
+++ b/Remote/GCrypt.hs
@@ -122,6 +122,7 @@ gen' r u c gc = do
 		, remotetype = remote
 		, mkUnavailable = return Nothing
 		, getInfo = return $ gitRepoInfo r
+		, claimUrl = Nothing
 	}
 	return $ Just $ specialRemote' specialcfg c
 		(simplyPrepare $ store this rsyncopts)
diff --git a/Remote/Git.hs b/Remote/Git.hs
index 50c34a2..fdadac2 100644
--- a/Remote/Git.hs
+++ b/Remote/Git.hs
@@ -160,6 +160,7 @@ gen r u c gc
 			, remotetype = remote
 			, mkUnavailable = unavailable r u c gc
 			, getInfo = return $ gitRepoInfo r
+			, claimUrl = Nothing
 			}
 
 unavailable :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
diff --git a/Remote/Glacier.hs b/Remote/Glacier.hs
index 99003f2..5484a0d 100644
--- a/Remote/Glacier.hs
+++ b/Remote/Glacier.hs
@@ -68,7 +68,8 @@ gen r u c gc = new <$> remoteCost gc veryExpensiveRemoteCost
 			remotetype = remote,
 			mkUnavailable = return Nothing,
 			getInfo = includeCredsInfo c (AWS.creds u) $
-				[ ("glacier vault", getVault c) ]
+				[ ("glacier vault", getVault c) ],
+			claimUrl = Nothing
 		}
 	specialcfg = (specialRemoteCfg c)
 		-- Disabled until jobList gets support for chunks.
diff --git a/Remote/Hook.hs b/Remote/Hook.hs
index f7c428e..a84ee85 100644
--- a/Remote/Hook.hs
+++ b/Remote/Hook.hs
@@ -61,7 +61,8 @@ gen r u c gc = do
 			remotetype = remote,
 			mkUnavailable = gen r u c $
 				gc { remoteAnnexHookType = Just "!dne!" },
-			getInfo = return [("hooktype", hooktype)]
+			getInfo = return [("hooktype", hooktype)],
+			claimUrl = Nothing
 		}
   where
 	hooktype = fromMaybe (error "missing hooktype") $ remoteAnnexHookType gc
diff --git a/Remote/Rsync.hs b/Remote/Rsync.hs
index a87d05a..6e71cb2 100644
--- a/Remote/Rsync.hs
+++ b/Remote/Rsync.hs
@@ -84,6 +84,7 @@ gen r u c gc = do
 			, remotetype = remote
 			, mkUnavailable = return Nothing
 			, getInfo = return [("url", url)]
+			, claimUrl = Nothing
 			}
   where
 	specialcfg = (specialRemoteCfg c)
diff --git a/Remote/S3.hs b/Remote/S3.hs
index 844d879..42f4f1f 100644
--- a/Remote/S3.hs
+++ b/Remote/S3.hs
@@ -92,7 +92,8 @@ gen r u c gc = do
 					then Just ("internet archive item", iaItemUrl $ fromMaybe "unknown" $ getBucketName c)
 					else Nothing
 				, Just ("partsize", maybe "unlimited" (roughSize storageUnits False) (getPartSize c))
-				]
+				],
+			claimUrl = Nothing
 		}
 
 s3Setup :: Maybe UUID -> Maybe CredPair -> RemoteConfig -> Annex (RemoteConfig, UUID)
diff --git a/Remote/Tahoe.hs b/Remote/Tahoe.hs
index 7dd231c..8df590f 100644
--- a/Remote/Tahoe.hs
+++ b/Remote/Tahoe.hs
@@ -85,7 +85,8 @@ gen r u c gc = do
 		availability = GloballyAvailable,
 		remotetype = remote,
 		mkUnavailable = return Nothing,
-		getInfo = return []
+		getInfo = return [],
+		claimUrl = Nothing
 	}
 
 tahoeSetup :: Maybe UUID -> Maybe CredPair -> RemoteConfig -> Annex (RemoteConfig, UUID)
diff --git a/Remote/Web.hs b/Remote/Web.hs
index 4d4b43c..6ddf1a4 100644
--- a/Remote/Web.hs
+++ b/Remote/Web.hs
@@ -63,7 +63,8 @@ gen r _ c gc =
 		availability = GloballyAvailable,
 		remotetype = remote,
 		mkUnavailable = return Nothing,
-		getInfo = return []
+		getInfo = return [],
+		claimUrl = Nothing -- implicitly claims all urls
 	}
 
 downloadKey :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex Bool
diff --git a/Remote/WebDAV.hs b/Remote/WebDAV.hs
index 932ed81..6b56acc 100644
--- a/Remote/WebDAV.hs
+++ b/Remote/WebDAV.hs
@@ -73,7 +73,8 @@ gen r u c gc = new <$> remoteCost gc expensiveRemoteCost
 			remotetype = remote,
 			mkUnavailable = gen r u (M.insert "url" "http://!dne!/" c) gc,
 			getInfo = includeCredsInfo c (davCreds u) $
-				[("url", fromMaybe "unknown" (M.lookup "url" c))]
+				[("url", fromMaybe "unknown" (M.lookup "url" c))],
+			claimUrl = Nothing
 		}
 		chunkconfig = getChunkConfig c
 
diff --git a/Types/Remote.hs b/Types/Remote.hs
index 7951217..46a0648 100644
--- a/Types/Remote.hs
+++ b/Types/Remote.hs
@@ -29,6 +29,7 @@ import Config.Cost
 import Utility.Metered
 import Git.Types
 import Utility.SafeCommand
+import Utility.Url
 
 type RemoteConfigKey = String
 type RemoteConfig = M.Map RemoteConfigKey String

(Diff truncated)
External special remote protocol now includes commands for setting and getting the urls associated with a key.
diff --git a/Logs/Web.hs b/Logs/Web.hs
index 1d16e10..f31215a 100644
--- a/Logs/Web.hs
+++ b/Logs/Web.hs
@@ -9,6 +9,7 @@ module Logs.Web (
 	URLString,
 	webUUID,
 	getUrls,
+	getUrlsWithPrefix,
 	setUrlPresent,
 	setUrlMissing,
 	knownUrls,
@@ -46,6 +47,9 @@ getUrls key = go $ urlLogFile key : oldurlLogs key
 			then go ls
 			else return us
 
+getUrlsWithPrefix :: Key -> String -> Annex [URLString]
+getUrlsWithPrefix key prefix = filter (prefix `isPrefixOf`) <$> getUrls key
+
 setUrlPresent :: Key -> URLString -> Annex ()
 setUrlPresent key url = do
 	us <- getUrls key
diff --git a/Remote/External.hs b/Remote/External.hs
index e907ab0..dca273d 100644
--- a/Remote/External.hs
+++ b/Remote/External.hs
@@ -19,6 +19,7 @@ import Utility.Metered
 import Logs.Transfer
 import Logs.PreferredContent.Raw
 import Logs.RemoteState
+import Logs.Web
 import Config.Cost
 import Annex.UUID
 import Creds
@@ -215,6 +216,11 @@ handleRequest' lck external req mp responsehandler
 		state <- fromMaybe ""
 			<$> getRemoteState (externalUUID external) key
 		send $ VALUE state
+	handleRemoteRequest (SETURLPRESENT key url) = setUrlPresent key url
+	handleRemoteRequest (SETURLMISSING key url) = setUrlMissing key url
+	handleRemoteRequest (GETURLS key prefix) = do
+		mapM_ (send . VALUE) =<< getUrlsWithPrefix key prefix
+		send (VALUE "") -- end of list
 	handleRemoteRequest (DEBUG msg) = liftIO $ debugM "external" msg
 	handleRemoteRequest (VERSION _) =
 		sendMessage lck external $ ERROR "too late to send VERSION"
diff --git a/Remote/External/Types.hs b/Remote/External/Types.hs
index 3a69ae9..cdcb657 100644
--- a/Remote/External/Types.hs
+++ b/Remote/External/Types.hs
@@ -165,6 +165,9 @@ data RemoteRequest
 	| GETWANTED
 	| SETSTATE Key String
 	| GETSTATE Key
+	| SETURLPRESENT Key String
+	| SETURLMISSING Key String
+	| GETURLS Key String
 	| DEBUG String
 	deriving (Show)
 
@@ -182,6 +185,9 @@ instance Proto.Receivable RemoteRequest where
 	parseCommand "GETWANTED" = Proto.parse0 GETWANTED
 	parseCommand "SETSTATE" = Proto.parse2 SETSTATE
 	parseCommand "GETSTATE" = Proto.parse1 GETSTATE
+	parseCommand "SETURLPRESENT" = Proto.parse2 SETURLPRESENT
+	parseCommand "SETURLMISSING" = Proto.parse2 SETURLMISSING
+	parseCommand "GETURLS" = Proto.parse2 GETURLS
 	parseCommand "DEBUG" = Proto.parse1 DEBUG
 	parseCommand _ = Proto.parseFail
 
diff --git a/debian/changelog b/debian/changelog
index 0a28d9a..270d318 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -2,6 +2,8 @@ git-annex (5.20141204) UNRELEASED; urgency=medium
 
   * Webapp: When adding a new box.com remote, use the new style chunking.
     Thanks, Jon Ander Peñalba.
+  * External special remote protocol now includes commands for setting
+    and getting the urls associated with a key.
 
  -- Joey Hess <id@joeyh.name>  Fri, 05 Dec 2014 13:42:08 -0400
 
diff --git a/doc/design/external_special_remote_protocol.mdwn b/doc/design/external_special_remote_protocol.mdwn
index 4219f11..0053309 100644
--- a/doc/design/external_special_remote_protocol.mdwn
+++ b/doc/design/external_special_remote_protocol.mdwn
@@ -247,6 +247,17 @@ in control.
 * `GETSTATE Key`  
   Gets any state that has been stored for the key.  
   (git-annex replies with VALUE followed by the state.)
+* `SETURLPRESENT Key Value`  
+  Records an url (or uri) where the Key can be downloaded from.
+* `SETURLMISSING Key Value`  
+  Records that the key can no longer be downloaded from the specified
+  url (or uri).
+* `GETURLS Key Value`  
+  Gets the recorded urls where a Key can be downloaded from.
+  Only urls that start with the Value will be returned. The Value
+  may be empty to get all urls.
+  (git-annex replies one or more times with VALUE for each url.
+  The final VALUE has an empty value, indicating the end of the url list.)
 * `DEBUG message`
   Tells git-annex to display the message if --debug is enabled.
 
@@ -288,7 +299,5 @@ start a new process the next time it needs to use a remote.
   the remote. However, \n and probably \0 need to be escaped somehow in the
   file data, which adds complication.
 * uuid discovery during INITREMOTE.
-* Support for getting and setting the list of urls that can be associated
-  with a key.
 * Hook into webapp. Needs a way to provide some kind of prompt to the user
   in the webapp, etc.
diff --git a/doc/todo/extensible_addurl.mdwn b/doc/todo/extensible_addurl.mdwn
index b040c11..63b03e4 100644
--- a/doc/todo/extensible_addurl.mdwn
+++ b/doc/todo/extensible_addurl.mdwn
@@ -36,7 +36,7 @@ and so know where to download from. (Much as the web special remote already
 does.)
 
 Prerequisite: Expand the external special remote interface to support
-accessing the url log.
+accessing the url log. (done)
 
 ----
 

Added a comment
diff --git a/doc/bugs/S3_upload_not_using_multipart/comment_12_bf98d0c771dfdd15ddafdba2d94d911f._comment b/doc/bugs/S3_upload_not_using_multipart/comment_12_bf98d0c771dfdd15ddafdba2d94d911f._comment
new file mode 100644
index 0000000..2be62f6
--- /dev/null
+++ b/doc/bugs/S3_upload_not_using_multipart/comment_12_bf98d0c771dfdd15ddafdba2d94d911f._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawnWvnTWY6LrcPB4BzYEBn5mRTpNhg5EtEg"
+ nickname="Bence"
+ subject="comment 12"
+ date="2014-12-08T17:28:52Z"
+ content="""
+Linked this bug to [[special remotes/S3|/special_remotes/S3]].
+"""]]

IMHO: there is no multipart parameter in the source but partsize (I don't speek Haskell, sorry) as of now.
diff --git a/doc/bugs/S3_upload_not_using_multipart.mdwn b/doc/bugs/S3_upload_not_using_multipart.mdwn
index cd40e9d..ac3817c 100644
--- a/doc/bugs/S3_upload_not_using_multipart.mdwn
+++ b/doc/bugs/S3_upload_not_using_multipart.mdwn
@@ -57,6 +57,6 @@ Please provide any additional information below.
 > enough version of the aws library. You need to configure the remote to
 > use an appropriate value for multipart, eg:
 > 
-> git annex enableremote cloud multipart=1GiB
+> git annex enableremote cloud partsize=1GiB
 > 
 > --[[Joey]]

diff --git a/doc/special_remotes/S3.mdwn b/doc/special_remotes/S3.mdwn
index 5d161c3..f397125 100644
--- a/doc/special_remotes/S3.mdwn
+++ b/doc/special_remotes/S3.mdwn
@@ -60,6 +60,8 @@ the S3 remote.
   but can be enabled or changed at any time.
   time.
 
+  NOTE: there is a [[bug|/bugs/S3_upload_not_using_multipart/]] which depends on the AWS library. See [[this comment|http://git-annex.branchable.com/bugs/S3_upload_not_using_multipart/#comment-4c45dac68866d3550c0b32ed466e2c6a]] (the latest as of now).
+
 * `fileprefix` - By default, git-annex places files in a tree rooted at the
   top of the S3 bucket. When this is set, it's prefixed to the filenames
   used. For example, you could set it to "foo/" in one special remote,

expand design, enough detail to start implementation
diff --git a/doc/todo/extensible_addurl.mdwn b/doc/todo/extensible_addurl.mdwn
index 6eb0903..b040c11 100644
--- a/doc/todo/extensible_addurl.mdwn
+++ b/doc/todo/extensible_addurl.mdwn
@@ -7,16 +7,38 @@ from scientific data repositories that use their own APIs.
 
 The basic idea is to have external special remotes (or perhaps built-in
 ones in some cases), which addurl can use to download an object, referred
-to by some uri-like thing. The uri starts with "$downloader:"
+to by some uri-like thing. The uri starts with "$downloader:" to indicate
+that it's not a regular url and so is not handled by the web special
+remote.
 
 	git annex addurl torrent:$foo
 	git annex addurl CERN:$bar
 
 Problem: This requires mapping from the name of the downloader, which is
 probably the same as the git-annex-remote-$downloader program implementing
-the special remote protocol, to the UUID of a remote. That's assuming we
-want location tracking to be able to know that a file is both available
-from CERN and from a torrent, for example.
+the special remote protocol (but not always), to the UUID of a remote.
+That's assuming we want location tracking to be able to know that a file is
+both available from CERN and from a torrent, for example.
+
+Solution: Add a new method to remotes:
+
+	claimUri :: Maybe (Uri -> Bool)
+
+Remotes that implement this method (including special remotes) will
+be queried when such an uri is added, to see which claims it. Once the
+remote is known, addurl will record that the Key is present on that remote,
+and record the uri in the url log. 
+
+Then retrieval of the Key works more or less as usual. The only
+difference being that remotes that support this interface can look
+at the url log to find the one with the right "$downloader:" prefix,
+and so know where to download from. (Much as the web special remote already
+does.)
+
+Prerequisite: Expand the external special remote interface to support
+accessing the url log.
+
+----
 
 It would also be nice to be able to easily configure a regexp that normal
 urls, if they match, are made to use a particular downloader. So, for
@@ -29,7 +51,7 @@ special remote interface, and let a downloader be specified simply by:
 
 	git config annex.downloader.torrent.command 'aria2c %url $file'
 
-In this case, the UUID used would be the UUID of the web special remote, I
-suppose?
+This could be implemented in either the web special remote or even in an
+external special remote.
 
 Some other discussion at <https://github.com/datalad/datalad/issues/10>

diff --git a/doc/bugs/Cannot_set_direct_mode_with_non_default_worktree.mdwn b/doc/bugs/Cannot_set_direct_mode_with_non_default_worktree.mdwn
new file mode 100644
index 0000000..ce3ae58
--- /dev/null
+++ b/doc/bugs/Cannot_set_direct_mode_with_non_default_worktree.mdwn
@@ -0,0 +1,45 @@
+### Please describe the problem.
+I am trying to switch to direct mode with the git work tree in a different directory than default much like described in http://git-annex.branchable.com/forum/Detached_git_work_tree__63__/
+
+
+### What steps will reproduce the problem?
+- Create a new git repo with the GIT_WORK_TREE and GIT_DIR set.
+- git annex init test
+- git annex direct
+
+
+### What version of git-annex are you using? On what operating system?
+5.20141125 package in Debian unstable
+
+
+### Please provide any additional information below.
+
+[[!format sh """
++dbn@loaner:~/annex $ mkdir -p test/worktree
++dbn@loaner:~/annex $ cd test/
++dbn@loaner:~/annex/test $ git init
+Initialized empty Git repository in /home/dbn/annex/test/.git/
++dbn@loaner:~/annex/test $ git annex init test
+init test ok
+(Recording state in git...)
++dbn@loaner:~/annex/test $ git annex direct --debug
+commit  
+[2014-12-08 03:05:45 PST] call: git ["--git-dir=/home/dbn/annex/test/.git","--work-tree=/home/dbn/annex/test/worktree","commit","-a","-m","commit before switching to direct mode"]
+On branch master
+
+Initial commit
+
+nothing to commit
+ok
+[2014-12-08 03:05:45 PST] read: git ["--git-dir=/home/dbn/annex/test/.git","--work-tree=/home/dbn/annex/test/worktree","ls-files","--cached","-z","--","/home/dbn/annex/test/worktree"]
+direct  [2014-12-08 03:05:45 PST] read: git ["--git-dir=/home/dbn/annex/test/.git","--work-tree=/home/dbn/annex/test/worktree","symbolic-ref","HEAD"]
+[2014-12-08 03:05:45 PST] read: git ["--git-dir=/home/dbn/annex/test/.git","--work-tree=/home/dbn/annex/test/worktree","show-ref","--hash","refs/heads/master"]
+[2014-12-08 03:05:45 PST] call: git ["--git-dir=/home/dbn/annex/test/.git","--work-tree=/home/dbn/annex/test/worktree","checkout","-q","-B","annex/direct/master"]
+[2014-12-08 03:05:45 PST] call: git ["--git-dir=/home/dbn/annex/test/.git","--work-tree=/home/dbn/annex/test/worktree","config","core.bare","true"]
+[2014-12-08 03:05:45 PST] read: git ["config","--null","--list"]
+fatal: core.bare and core.worktree do not make sense
+
+git-annex: user error (git ["config","--null","--list"] exited 128)
+failed
+git-annex: direct: 1 failed
+"""]]

diff --git a/doc/bugs/Build_error_when_S3_is_disabled.mdwn b/doc/bugs/Build_error_when_S3_is_disabled.mdwn
new file mode 100644
index 0000000..9b72afe
--- /dev/null
+++ b/doc/bugs/Build_error_when_S3_is_disabled.mdwn
@@ -0,0 +1,37 @@
+With release 5.20141203, I'm getting the following build error.
+
+    Remote/Helper/AWS.hs:15:18:
+        Could not find module ‘Aws’
+        Use -v to see a list of the files searched for.
+
+    Remote/Helper/AWS.hs:16:18:
+        Could not find module ‘Aws.S3’
+        Use -v to see a list of the files searched for.
+
+I'm installing dependencies with cabal but have disabled S3 support
+('-f-S3').  This setup has worked for previous releases (I'm on a machine running Arch Linux).
+
+    _features=(-f-Android
+               -f-Assistant
+               -fDbus
+               -fDNS
+               -fInotify
+               -fPairing
+               -fProduction
+               -f-S3
+               -fTestSuite
+               -fTDFA
+               -f-Webapp
+               -f-WebDAV
+               -fXMPP
+               -fFeed
+               -fQuvi
+               -fCryptoHash)
+
+    cabal update
+    cabal install c2hs
+
+    cabal install --user --force-reinstalls --only-dependencies "${_features[@]}"
+    cabal configure "${_features[@]}"
+
+    make

Added a comment: another example
diff --git a/doc/bugs/__34__git-annex:_direct:_1_failed__34___on_Windows/comment_1_ce2355485f2610b6a7a79914dcd365be._comment b/doc/bugs/__34__git-annex:_direct:_1_failed__34___on_Windows/comment_1_ce2355485f2610b6a7a79914dcd365be._comment
new file mode 100644
index 0000000..127d060
--- /dev/null
+++ b/doc/bugs/__34__git-annex:_direct:_1_failed__34___on_Windows/comment_1_ce2355485f2610b6a7a79914dcd365be._comment
@@ -0,0 +1,17 @@
+[[!comment format=mdwn
+ username="edward"
+ subject="another example"
+ date="2014-12-06T21:01:03Z"
+ content="""
+I'm having the same problem:
+
+> git-annex: c:\Users\TV\annex\.git\annex\objects\566\a33\URL--quvi&chttps&c%%www.youtube.com%watch,63v,61XS-kKX9wQk0,38index,615,38list,61PLQ-uHSnFig5NCQkhJfkn8ogXFwzrP4SIf\: openTempFile: does not exist (No such file or directory)    
+> failed    
+> git-annex: init: 1 failed    
+
+In my case the filename is slightly shorter, 154 characters, for Aaron the offending filename was 162 characters.
+
+I think the full filename that git annex is trying to write is 270 characters:
+
+> c:\Users\TV\annex\.git\annex\objects\566\a33\URL--quvi&chttps&c%%www.youtube.com%watch,63v,61XS-kKX9wQk0,38index,615,38list,61PLQ-uHSnFig5NCQkhJfkn8ogXFwzrP4SIf/URL--quvi&chttps&c%%www.youtube.com%watch,63v,61XS-kKX9wQk0,38index,615,38list,61PLQ-uHSnFig5NCQkhJfkn8ogXFwzrP4SIf
+"""]]

comment
diff --git a/doc/forum/new_linux_arm_tarball_build/comment_14_67021b6d239690c9d18e8630aa2254ff._comment b/doc/forum/new_linux_arm_tarball_build/comment_14_67021b6d239690c9d18e8630aa2254ff._comment
new file mode 100644
index 0000000..fd9591d
--- /dev/null
+++ b/doc/forum/new_linux_arm_tarball_build/comment_14_67021b6d239690c9d18e8630aa2254ff._comment
@@ -0,0 +1,9 @@
+[[!comment format=mdwn
+ username="joey"
+ subject="""comment 14"""
+ date="2014-12-06T20:25:06Z"
+ content="""
+@Julian, shimmed/git-annex-shell/git-annex-shell is supposed to be a hard
+link to shimmed/git-annex/git-annex. Maybe there's a problem with hard
+links on your NAS? A symlink would also work I think.
+"""]]

Added a comment
diff --git a/doc/bugs/S3_memory_leaks/comment_7_1ac572b79caa23e3f791e4f8461fcddd._comment b/doc/bugs/S3_memory_leaks/comment_7_1ac572b79caa23e3f791e4f8461fcddd._comment
new file mode 100644
index 0000000..5aaab2b
--- /dev/null
+++ b/doc/bugs/S3_memory_leaks/comment_7_1ac572b79caa23e3f791e4f8461fcddd._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawmUJBh1lYmvfCCiGr3yrdx-QhuLCSRnU5c"
+ nickname="Justin"
+ subject="comment 7"
+ date="2014-12-06T18:15:50Z"
+ content="""
+The new version works really great for me.  I've been copying to S3 over the past few days with no issues on my raspberry pi.
+
+Thanks a ton for getting this out.
+"""]]

Added a comment
diff --git a/doc/forum/new_linux_arm_tarball_build/comment_13_36f48c30894b9b225b812ba5e5b2f504._comment b/doc/forum/new_linux_arm_tarball_build/comment_13_36f48c30894b9b225b812ba5e5b2f504._comment
new file mode 100644
index 0000000..312082a
--- /dev/null
+++ b/doc/forum/new_linux_arm_tarball_build/comment_13_36f48c30894b9b225b812ba5e5b2f504._comment
@@ -0,0 +1,17 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawnClfG_kAo0drU5dVZiTRXo9WnqjW4I5dA"
+ nickname="Julian"
+ subject="comment 13"
+ date="2014-12-06T17:04:04Z"
+ content="""
+Unfortunately, it isn't working for me on DS214 (MARVELL Armada XP MV78230) and DSM 5.1-5004 Update 2.
+git is installed, git-annex tar extracted and runshell worked. Within this shell git-annex seems to work (e.g. git-annex init works)
+
+However, git-annex is complaining that git-annex is not installed on remote when I try to add the NAS as a remote.
+
+I guess it is related to the fact that git-annex-shell is not working on the NAS. When I execute git-annex-shell it gives:
+/volume1/homes/julian/bin/git-annex.linux/shimmed/git-annex-shell/git-annex-shell: error while loading shared libraries: /volume1/homes/julian/bin/git-annex.linux/shimmed/git-annex-shell/git-annex-shell: file too short
+Actually /volume1/homes/julian/bin/git-annex.linux/shimmed/git-annex-shell/git-annex-shell has 0 Bytes.
+
+I can't execute git-annex-shell inside the bin folder due to permission issues. I tried to change the file permissions but it didn't help.
+"""]]

diff --git a/doc/forum/Auto_update_not_working.mdwn b/doc/forum/Auto_update_not_working.mdwn
new file mode 100644
index 0000000..1c9fa60
--- /dev/null
+++ b/doc/forum/Auto_update_not_working.mdwn
@@ -0,0 +1,6 @@
+Hello,
+
+I've installed to ~/software/ using the prebuilt tarballs. I'm using the assistant with auto updates set to ask. Everytime I start the assistant it claims a new that a new version of git-annex has been installed. I click Finish Upgrade, it does the upgrade and it says to have finished upgrading to version 5.20141105-g8b19598. Next boot / restart everything starts again and it upgrades always to the same version.
+
+Thanks!
+Florian

comment
diff --git a/doc/todo/extensible_addurl/comment_2_0e27f12c998a3ac4f0d4c3d4c9898d26._comment b/doc/todo/extensible_addurl/comment_2_0e27f12c998a3ac4f0d4c3d4c9898d26._comment
new file mode 100644
index 0000000..191cc3f
--- /dev/null
+++ b/doc/todo/extensible_addurl/comment_2_0e27f12c998a3ac4f0d4c3d4c9898d26._comment
@@ -0,0 +1,13 @@
+[[!comment format=mdwn
+ username="joey"
+ subject="""comment 2"""
+ date="2014-12-05T17:45:57Z"
+ content="""
+I think this issue of multiple files in a torrent is another place that
+using an external special remote  (or maybe one built into git-annex)
+is better than just specifying a download command. A special remote for
+torrents could use a temp directory that accumulates all the files in the
+torrent, and then pluck out specific files as git-annex requests them.
+
+When git-annex exits, the special remote could clean up any unused files.
+"""]]

Webapp: When adding a new box.com remote, use the new style chunking. Thanks, Jon Ander Peñalba.
diff --git a/debian/changelog b/debian/changelog
index 754d504..0a28d9a 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,10 @@
+git-annex (5.20141204) UNRELEASED; urgency=medium
+
+  * Webapp: When adding a new box.com remote, use the new style chunking.
+    Thanks, Jon Ander Peñalba.
+
+ -- Joey Hess <id@joeyh.name>  Fri, 05 Dec 2014 13:42:08 -0400
+
 git-annex (5.20141203) unstable; urgency=medium
 
   * proxy: New command for direct mode repositories, allows bypassing
diff --git a/doc/bugs/When_adding_a_box.com_repo_with_the_webapp_the_old-style_chunking_is_used.mdwn b/doc/bugs/When_adding_a_box.com_repo_with_the_webapp_the_old-style_chunking_is_used.mdwn
index 4e5f357..7f00a2e 100644
--- a/doc/bugs/When_adding_a_box.com_repo_with_the_webapp_the_old-style_chunking_is_used.mdwn
+++ b/doc/bugs/When_adding_a_box.com_repo_with_the_webapp_the_old-style_chunking_is_used.mdwn
@@ -1,3 +1,5 @@
 Last week I added a new Box.com repository through the webapp with the latest git-annex version, and I've noticed that the remote uses the old chunksize param.
 
 Is this correct? AFAIK it should use the chunk param: http://git-annex.branchable.com/chunking/
+
+>  [[fixed|done]] --[[Joey]] 

fix language
diff --git a/doc/design/external_special_remote_protocol.mdwn b/doc/design/external_special_remote_protocol.mdwn
index 70ece65..4219f11 100644
--- a/doc/design/external_special_remote_protocol.mdwn
+++ b/doc/design/external_special_remote_protocol.mdwn
@@ -112,7 +112,7 @@ The following requests *must* all be supported by the special remote.
 * `CHECKPRESENT Key`  
   Requests the remote to check if a key is present in it.
 * `REMOVE Key`  
-  Requests the remote to remove key's contents.
+  Requests the remote to remove a key's contents.
 
 The following requests can optionally be supported. If not handled,
 replying with `UNSUPPORTED-REQUEST` is acceptable.

DOC: minor typos and rewording in few docs
diff --git a/doc/design/external_special_remote_protocol.mdwn b/doc/design/external_special_remote_protocol.mdwn
index 01ffe7f..70ece65 100644
--- a/doc/design/external_special_remote_protocol.mdwn
+++ b/doc/design/external_special_remote_protocol.mdwn
@@ -91,14 +91,14 @@ send one of the corresponding replies listed in the next section.
 The following requests *must* all be supported by the special remote.
 
 * `INITREMOTE`  
-  Request that the remote initialize itself. This is where any one-time
+  Requests the remote to initialize itself. This is where any one-time
   setup tasks can be done, for example creating an Amazon S3 bucket.  
   Note: This may be run repeatedly over time, as a remote is initialized in
   different repositories, or as the configuration of a remote is changed.
   (Both `git annex initremote` and `git-annex enableremote` run this.)
   So any one-time setup tasks should be done idempotently.
 * `PREPARE`  
-  Tells the special remote it's time to prepare itself to be used.  
+  Tells the remote that it's time to prepare itself to be used.  
   Only INITREMOTE can come before this.
 * `TRANSFER STORE|RETRIEVE Key File`  
   Requests the transfer of a key. For STORE, the File is the file to upload;
@@ -110,20 +110,20 @@ The following requests *must* all be supported by the special remote.
   Multiple transfers might be requested by git-annex, but it's fine for the 
   program to serialize them and only do one at a time.  
 * `CHECKPRESENT Key`  
-  Requests the remote check if a key is present in it.
+  Requests the remote to check if a key is present in it.
 * `REMOVE Key`  
-  Requests the remote remove a key's contents.
+  Requests the remote to remove key's contents.
 
 The following requests can optionally be supported. If not handled,
 replying with `UNSUPPORTED-REQUEST` is acceptable.
 
 * `GETCOST`  
-  Requests the remote return a use cost. Higher costs are more expensive.
+  Requests the remote to return a use cost. Higher costs are more expensive.
   (See Config/Cost.hs for some standard costs.)
 * `GETAVAILABILITY`
-  Requests the remote send back an `AVAILABILITY` reply.
+  Requests the remote to send back an `AVAILABILITY` reply.
   If the remote replies with `UNSUPPORTED-REQUEST`, its availability
-  is asssumed to be global. So, only remotes that are only reachable
+  is assumed to be global. So, only remotes that are only reachable
   locally need to worry about implementing this.
 
 More optional requests may be added, without changing the protocol version,
diff --git a/doc/internals.mdwn b/doc/internals.mdwn
index ca2b7b4..9970a0b 100644
--- a/doc/internals.mdwn
+++ b/doc/internals.mdwn
@@ -17,7 +17,7 @@ See [[hashing]] for details.
 Each subdirectory has the [[name_of_a_key|key_format]] in one of the
 [[key-value_backends|backends]]. The file inside also has the name of the key.
 This two-level structure is used because it allows the write bit to be removed
-from the subdirectories as well as from the files. That prevents accidentially
+from the subdirectories as well as from the files. That prevents accidentally
 deleting or changing the file contents. See [[lockdown]] for details.
 
 In [[direct_mode]], file contents are not stored in here, and instead
@@ -158,7 +158,7 @@ File format is identical to preferred-content.log.
 ## `group-preferred-content.log`
 
 Contains standard preferred content settings for groups. (Overriding or
-supplimenting the ones built into git-annex.)
+supplementing the ones built into git-annex.)
 
 The file format is one line per group, staring with a timestamp, then a
 space, then the group name followed by a space and then the preferred
@@ -205,7 +205,7 @@ values.
 Lines are timestamped, and record when values are added (`field +value`),
 but also when values are removed (`field -value`). Removed values
 are retained in the log so that when merging an old line that sets a value
-that was later unset, the value is not accidentially added back.
+that was later unset, the value is not accidentally added back.
 
 For example:
 
@@ -214,8 +214,8 @@ For example:
 
 The value can be completely arbitrary data, although it's typically
 reasonably short. If the value contains any whitespace
-(including \r or \r), it will be base64 encoded. Base64 encoded values
-are indicated by prefixing them with "!" 
+(including \r or \n), it will be base64 encoded. Base64 encoded values
+are indicated by prefixing them with "!".
 
 ## `aaa/bbb/*.log.cnk`
 
@@ -237,7 +237,7 @@ Used to record scheduled events, such as periodic fscks.
 The file format is simply one line per repository, with the uuid followed by a
 space and then its schedule, followed by a timestamp.
 
-There can be multiple events in the schedule, separated by "; "
+There can be multiple events in the schedule, separated by "; ".
 
 The format of the scheduled events is the same described in
 the SCHEDULED JOBS section of the man page.

Added a comment: allow multiple urls
diff --git a/doc/todo/extensible_addurl/comment_1_5dca2eb8ee9e8676d372cd4bc6934975._comment b/doc/todo/extensible_addurl/comment_1_5dca2eb8ee9e8676d372cd4bc6934975._comment
new file mode 100644
index 0000000..857df42
--- /dev/null
+++ b/doc/todo/extensible_addurl/comment_1_5dca2eb8ee9e8676d372cd4bc6934975._comment
@@ -0,0 +1,14 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawnx8kHW66N3BqmkVpgtXDlYMvr8TJ5VvfY"
+ nickname="Yaroslav"
+ subject="allow multiple urls"
+ date="2014-12-04T21:59:25Z"
+ content="""
+echoing https://github.com/datalad/datalad/issues/9 \"parallel download of a file from multiple URLs (and special remotes?)\"
+might be worth adding e.g.
+
+git config annex.downloader.torrent.allowmultiple True
+
+and then using downloader which could fetch from multiple originating URLs simultaneously.  In respect to aria2 (which seems to not support ATM specification of the output filename) see
+https://github.com/tatsuhiro-t/aria2/issues/190
+"""]]

Added a comment
diff --git a/doc/internals/hashing/comment_2_086ea37acf15e2a8694b8386222b73f6._comment b/doc/internals/hashing/comment_2_086ea37acf15e2a8694b8386222b73f6._comment
new file mode 100644
index 0000000..04a3523
--- /dev/null
+++ b/doc/internals/hashing/comment_2_086ea37acf15e2a8694b8386222b73f6._comment
@@ -0,0 +1,11 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawnx8kHW66N3BqmkVpgtXDlYMvr8TJ5VvfY"
+ nickname="Yaroslav"
+ subject="comment 2"
+ date="2014-12-04T20:26:47Z"
+ content="""
+1c to support  Péter's statement:
+
+    $> git annex examinekey --format='${hashdirmixed}' \"SHA256E-s0--e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\"
+    pX/ZJ/%  
+"""]]

Added a comment: bup-join local-arch/2014-12-03-235617
diff --git a/doc/special_remotes/bup/comment_12_fca579678edde073716f099c767767e1._comment b/doc/special_remotes/bup/comment_12_fca579678edde073716f099c767767e1._comment
new file mode 100644
index 0000000..0babee6
--- /dev/null
+++ b/doc/special_remotes/bup/comment_12_fca579678edde073716f099c767767e1._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawkEYZEqLf3Aj_FGV7S0FvsMplmGqqb555M"
+ nickname="Sergiusz"
+ subject="bup-join local-arch/2014-12-03-235617 "
+ date="2014-12-04T19:38:07Z"
+ content="""
+How can I restore the previous commit from bup archives created with bup-split? Yes, I know I can use bup-join local-arch~1 git notation, but I would like to use bup-join local-arch/2014-12-03-235617 (bup-ls local-arch results) ...but this method does not work ...
+
+s.
+"""]]

design
diff --git a/doc/todo/extensible_addurl.mdwn b/doc/todo/extensible_addurl.mdwn
new file mode 100644
index 0000000..6eb0903
--- /dev/null
+++ b/doc/todo/extensible_addurl.mdwn
@@ -0,0 +1,35 @@
+`git annex addurl` supports regular urls, as well as detecting videos that
+quvi can download. We'd like to extend this to support extensible uri
+handling. 
+
+Use cases range from torrent download support, to pulling data
+from scientific data repositories that use their own APIs.
+
+The basic idea is to have external special remotes (or perhaps built-in
+ones in some cases), which addurl can use to download an object, referred
+to by some uri-like thing. The uri starts with "$downloader:"
+
+	git annex addurl torrent:$foo
+	git annex addurl CERN:$bar
+
+Problem: This requires mapping from the name of the downloader, which is
+probably the same as the git-annex-remote-$downloader program implementing
+the special remote protocol, to the UUID of a remote. That's assuming we
+want location tracking to be able to know that a file is both available
+from CERN and from a torrent, for example.
+
+It would also be nice to be able to easily configure a regexp that normal
+urls, if they match, are made to use a particular downloader. So, for
+torrents, this would make matching urls have torrent: prefixed to them.
+
+	git config annex.downloader.torrent.regexp '(^magnet:|\.torrent$)'
+
+It might also be useful to allow bypassing the complexity of the external
+special remote interface, and let a downloader be specified simply by:
+
+	git config annex.downloader.torrent.command 'aria2c %url $file'
+
+In this case, the UUID used would be the UUID of the web special remote, I
+suppose?
+
+Some other discussion at <https://github.com/datalad/datalad/issues/10>

diff --git a/doc/bugs/When_adding_a_box.com_repo_with_the_webapp_the_old-style_chunking_is_used.mdwn b/doc/bugs/When_adding_a_box.com_repo_with_the_webapp_the_old-style_chunking_is_used.mdwn
new file mode 100644
index 0000000..4e5f357
--- /dev/null
+++ b/doc/bugs/When_adding_a_box.com_repo_with_the_webapp_the_old-style_chunking_is_used.mdwn
@@ -0,0 +1,3 @@
+Last week I added a new Box.com repository through the webapp with the latest git-annex version, and I've noticed that the remote uses the old chunksize param.
+
+Is this correct? AFAIK it should use the chunk param: http://git-annex.branchable.com/chunking/

really docker, 31 character size limit on image names?
diff --git a/doc/install/Docker.mdwn b/doc/install/Docker.mdwn
index aae3959..d79f951 100644
--- a/doc/install/Docker.mdwn
+++ b/doc/install/Docker.mdwn
@@ -15,7 +15,7 @@ built, it is published. `docker pull joeyh/git-annex-android-builder`
 
 So's the armel autobuilder container. 
 `docker pull joeyh/git-annex-armel-builder`, and its companion container
-`docker pull joeyh/git-annex-armel-builder-companion`
+`docker pull joeyh/git-annex-armel-companion`
 
 # building autobuilder containers using Propellor
 

devblog
diff --git a/doc/devblog/day_236__release_day.mdwn b/doc/devblog/day_236__release_day.mdwn
new file mode 100644
index 0000000..48cbce0
--- /dev/null
+++ b/doc/devblog/day_236__release_day.mdwn
@@ -0,0 +1,10 @@
+Today's release has a month's accumulated changes, including several nice
+new features: `git annex undo`, `git annex proxy`, `git annex diffdriver`,
+and I was able to land the s3-aws branch in this release too, so lots of
+improvements to the S3 support.
+
+Spent several hours getting the autobuilders updated, with the haskell
+`aws` library installed. Android and armel builds are still out of date.
+
+Also fixed two Windows bugs related to the location of the bundled ssh
+program.

Added a comment: More details.
diff --git a/doc/bugs/Uninstalling_removes_libcrypto.dll/comment_5_4741efcc77b1bf1125ef230a0278ca51._comment b/doc/bugs/Uninstalling_removes_libcrypto.dll/comment_5_4741efcc77b1bf1125ef230a0278ca51._comment
new file mode 100644
index 0000000..06b160a
--- /dev/null
+++ b/doc/bugs/Uninstalling_removes_libcrypto.dll/comment_5_4741efcc77b1bf1125ef230a0278ca51._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawktFJRkdqZvsZB59DrY-N1988HkjSIIZ5w"
+ nickname="Michał"
+ subject="More details."
+ date="2014-12-03T21:56:35Z"
+ content="""
+Just to put more info: Yes, I installed git-annex when git been already installed.
+"""]]