summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Assistant/Alert.hs12
-rw-r--r--Assistant/DaemonStatus.hs5
-rw-r--r--Assistant/Drop.hs19
-rw-r--r--Assistant/NetMessager.hs4
-rw-r--r--Assistant/Threads/ConfigMonitor.hs5
-rw-r--r--Assistant/Threads/TransferScanner.hs2
-rw-r--r--Assistant/Threads/TransferWatcher.hs8
-rw-r--r--Assistant/Threads/Transferrer.hs9
-rw-r--r--Assistant/Threads/Watcher.hs11
-rw-r--r--Assistant/Threads/WebApp.hs7
-rw-r--r--Assistant/Types/DaemonStatus.hs3
-rw-r--r--Assistant/WebApp.hs40
-rw-r--r--Assistant/WebApp/Common.hs17
-rw-r--r--Assistant/WebApp/Configurators.hs67
-rw-r--r--Assistant/WebApp/Configurators/AWS.hs164
-rw-r--r--Assistant/WebApp/Configurators/Edit.hs29
-rw-r--r--Assistant/WebApp/Configurators/Local.hs41
-rw-r--r--Assistant/WebApp/Configurators/Pairing.hs19
-rw-r--r--Assistant/WebApp/Configurators/S3.hs125
-rw-r--r--Assistant/WebApp/Configurators/Ssh.hs25
-rw-r--r--Assistant/WebApp/Configurators/WebDAV.hs114
-rw-r--r--Assistant/WebApp/Configurators/XMPP.hs34
-rw-r--r--Assistant/WebApp/DashBoard.hs17
-rw-r--r--Assistant/WebApp/Documentation.hs12
-rw-r--r--Assistant/WebApp/Notifications.hs22
-rw-r--r--Assistant/WebApp/Page.hs66
-rw-r--r--Assistant/WebApp/SideBar.hs10
-rw-r--r--Assistant/WebApp/Types.hs18
-rw-r--r--Assistant/WebApp/routes7
-rw-r--r--Assistant/XMPP/Buddies.hs4
-rw-r--r--Assistant/XMPP/Client.hs25
-rw-r--r--Assistant/XMPP/Git.hs24
-rw-r--r--CHANGELOG30
-rw-r--r--Command/Drop.hs21
-rw-r--r--Command/DropUnused.hs2
-rw-r--r--Command/Find.hs2
-rw-r--r--Command/Fix.hs2
-rw-r--r--Command/SendKey.hs1
-rw-r--r--Command/Unannex.hs1
-rw-r--r--Command/Uninit.hs6
-rw-r--r--Config.hs2
-rw-r--r--Creds.hs151
-rw-r--r--Crypto.hs62
-rw-r--r--GitAnnex.hs1
-rw-r--r--Locations.hs2
-rw-r--r--Makefile2
-rw-r--r--Messages.hs43
-rw-r--r--Meters.hs40
-rw-r--r--Remote/Bup.hs5
-rw-r--r--Remote/Directory.hs155
-rw-r--r--Remote/Glacier.hs254
-rw-r--r--Remote/Helper/AWS.hs21
-rw-r--r--Remote/Helper/Chunked.hs125
-rw-r--r--Remote/Helper/Encryptable.hs33
-rw-r--r--Remote/Hook.hs6
-rw-r--r--Remote/List.hs8
-rw-r--r--Remote/Rsync.hs6
-rw-r--r--Remote/S3.hs169
-rw-r--r--Remote/WebDAV.hs334
-rw-r--r--Seek.hs12
-rw-r--r--Types.hs4
-rw-r--r--Types/Messages.hs6
-rw-r--r--Types/Meters.hs12
-rw-r--r--Types/Remote.hs8
-rw-r--r--Types/StandardGroups.hs18
-rw-r--r--Utility/Gpg.hs18
-rw-r--r--Utility/OSX.hs1
-rw-r--r--Utility/Observed.hs43
-rw-r--r--Utility/Path.hs32
-rw-r--r--Utility/Process.hs2
-rw-r--r--Utility/TempFile.hs38
-rw-r--r--Utility/ThreadScheduler.hs15
-rw-r--r--debian/changelog30
-rw-r--r--debian/control5
-rw-r--r--doc/assistant.mdwn4
-rw-r--r--doc/assistant/archival_walkthrough.mdwn28
-rw-r--r--doc/assistant/release_notes.mdwn2
-rw-r--r--doc/assistant/repogroup.pngbin0 -> 10986 bytes
-rw-r--r--doc/bugs/3.20121112:_build_error_in_assistant.mdwn432
-rw-r--r--doc/bugs/3.20121112_build_fails_on_Ubuntu_12.04.mdwn97
-rw-r--r--doc/bugs/3.20121113_build_error___39__not_in_scope_getAddBoxComR__39__.mdwn33
-rw-r--r--doc/bugs/Detection_assumes_that_shell_is_bash.mdwn2
-rw-r--r--doc/bugs/Install_of_git-annex-3.20121112_fails.mdwn20
-rw-r--r--doc/bugs/Is_there_any_way_to_rate_limit_uploads_to_an_S3_backend__63__.mdwn19
-rw-r--r--doc/bugs/another_build_error_in_assistant.mdwn79
-rw-r--r--doc/bugs/com.branchable.git-annex.assistant.plist_is_invalid.mdwn15
-rw-r--r--doc/bugs/dropping_and_re-adding_from_web_remotes_doesn__39__t_work.mdwn131
-rw-r--r--doc/bugs/fat_support/comment_8_acc947643a635eb10a1bff92083a3506._comment10
-rw-r--r--doc/bugs/glacier_with_assistant_bugs.mdwn13
-rw-r--r--doc/bugs/map_not_respecting_annex_ssh_options__63__.mdwn37
-rw-r--r--doc/bugs/migrated_files_not_showing_up_in_unused_list.mdwn3
-rw-r--r--doc/bugs/unlock_fails_silently_with_directory_symlinks.mdwn53
-rw-r--r--doc/design/assistant.mdwn2
-rw-r--r--doc/design/assistant/OSX.mdwn4
-rw-r--r--doc/design/assistant/blog/day_129__release.mdwn4
-rw-r--r--doc/design/assistant/blog/day_130__what_now.mdwn36
-rw-r--r--doc/design/assistant/blog/day_131__webdav_groundwork.mdwn28
-rw-r--r--doc/design/assistant/blog/day_132__webdav_continued.mdwn22
-rw-r--r--doc/design/assistant/blog/day_133__webdav_working.mdwn31
-rw-r--r--doc/design/assistant/blog/day_134__box.com_configurator.mdwn8
-rw-r--r--doc/design/assistant/blog/day_135__progress_revisited.mdwn37
-rw-r--r--doc/design/assistant/blog/day_136__misc.mdwn14
-rw-r--r--doc/design/assistant/blog/day_137__Glacier.mdwn30
-rw-r--r--doc/design/assistant/blog/day_138__back.mdwn25
-rw-r--r--doc/design/assistant/blog/day_139__catch_up.mdwn11
-rw-r--r--doc/design/assistant/blog/day_36__minimal_test_case.mdwn2
-rw-r--r--doc/design/assistant/cloud.mdwn17
-rw-r--r--doc/design/assistant/more_cloud_providers.mdwn16
-rw-r--r--doc/design/assistant/polls/prioritizing_special_remotes.mdwn2
-rw-r--r--doc/design/assistant/progressbars.mdwn5
-rw-r--r--doc/design/assistant/transfer_control.mdwn3
-rw-r--r--doc/design/assistant/xmpp.mdwn4
-rw-r--r--doc/forum/Building_git-annex-3.20121112-19309.mdwn78
-rw-r--r--doc/forum/Default_text__47__html_handler.mdwn2
-rw-r--r--doc/forum/How_to_restore_symlinks.mdwn1
-rw-r--r--doc/forum/Push__47__Pull_with_the_Assistant.mdwn1
-rw-r--r--doc/forum/Setup_of_rsync_special_remote_with_non-standard_ssh_port.mdwn13
-rw-r--r--doc/forum/Special_remote_without_chmod.mdwn12
-rw-r--r--doc/forum/get_and_copy_with_bare_repositories.mdwn7
-rw-r--r--doc/git-annex.mdwn29
-rw-r--r--doc/install/Debian/comment_5_38e6399083e10a6a274f35bddc15d4ac._comment18
-rw-r--r--doc/install/Debian/comment_6_2e7bbdbaabbfb9d89de22e913066e822._comment8
-rw-r--r--doc/install/OSX/comment_10_4d15bfc4fc26e7249953bebfbb09e0aa._comment11
-rw-r--r--doc/install/OSX/comment_9_c6b1b31d16f2144ad08abd8c767b6ab9._comment23
-rw-r--r--doc/install/comment_2_fd560811c57df5cbc3976639642b8b19._comment8
-rw-r--r--doc/install/comment_3_08613b2e2318680508483d204a43da76._comment75
-rw-r--r--doc/install/fromscratch.mdwn3
-rw-r--r--doc/news/version_3.20121009.mdwn25
-rw-r--r--doc/news/version_3.20121126.mdwn27
-rw-r--r--doc/preferred_content.mdwn23
-rw-r--r--doc/special_remotes.mdwn3
-rw-r--r--doc/special_remotes/S3.mdwn28
-rw-r--r--doc/special_remotes/bup.mdwn8
-rw-r--r--doc/special_remotes/directory.mdwn12
-rw-r--r--doc/special_remotes/glacier.mdwn50
-rw-r--r--doc/special_remotes/hook.mdwn10
-rw-r--r--doc/special_remotes/rsync.mdwn12
-rw-r--r--doc/special_remotes/webdav.mdwn45
-rw-r--r--doc/tips/using_Amazon_Glacier.mdwn75
-rw-r--r--doc/tips/using_Amazon_S3.mdwn2
-rw-r--r--doc/tips/using_box.com_as_a_special_remote.mdwn15
-rw-r--r--doc/todo/resuming_encrypted_uploads.mdwn22
-rw-r--r--doc/todo/special_remote_for_amazon_glacier.mdwn5
-rw-r--r--git-annex.126
-rw-r--r--git-annex.cabal24
-rw-r--r--templates/bootstrap.hamlet8
-rw-r--r--templates/configurators/addbox.com.hamlet26
-rw-r--r--templates/configurators/adddrive.hamlet2
-rw-r--r--templates/configurators/addglacier.hamlet40
-rw-r--r--templates/configurators/addrsync.net.hamlet2
-rw-r--r--templates/configurators/adds3.hamlet2
-rw-r--r--templates/configurators/editrepository.hamlet2
-rw-r--r--templates/configurators/enableaws.hamlet (renamed from templates/configurators/enables3.hamlet)12
-rw-r--r--templates/configurators/enablewebdav.hamlet22
-rw-r--r--templates/configurators/needglaciercli.hamlet10
-rw-r--r--templates/configurators/pairing/local/prompt.hamlet2
-rw-r--r--templates/configurators/pairing/xmpp/end.hamlet39
-rw-r--r--templates/configurators/repositories.hamlet4
-rw-r--r--templates/configurators/repositories/cloud.hamlet16
-rw-r--r--templates/configurators/repositories/list.hamlet36
-rw-r--r--templates/configurators/repositories/table.hamlet25
-rw-r--r--templates/configurators/ssh/add.hamlet2
-rw-r--r--templates/configurators/ssh/enable.hamlet2
-rw-r--r--templates/configurators/xmpp.hamlet8
164 files changed, 3982 insertions, 951 deletions
diff --git a/Assistant/Alert.hs b/Assistant/Alert.hs
index 7e825d8..e953e1a 100644
--- a/Assistant/Alert.hs
+++ b/Assistant/Alert.hs
@@ -18,7 +18,6 @@ import qualified Data.Text as T
import Data.Text (Text)
import qualified Data.Map as M
import Data.String
-import Yesod
{- Different classes of alerts are displayed differently. -}
data AlertClass = Success | Message | Activity | Warning | Error
@@ -57,17 +56,6 @@ data Alert = Alert
data AlertIcon = ActivityIcon | SuccessIcon | ErrorIcon | InfoIcon | TheCloud
-htmlIcon :: AlertIcon -> GWidget sub master ()
-htmlIcon ActivityIcon = bootStrapIcon "refresh"
-htmlIcon InfoIcon = bootStrapIcon "info-sign"
-htmlIcon SuccessIcon = bootStrapIcon "ok"
-htmlIcon ErrorIcon = bootStrapIcon "exclamation-sign"
--- utf-8 umbrella (utf-8 cloud looks too stormy)
-htmlIcon TheCloud = [whamlet|☂|]
-
-bootStrapIcon :: Text -> GWidget sub master ()
-bootStrapIcon name = [whamlet|<i .icon-#{name}></i>|]
-
{- When clicked, a button always redirects to a URL
- It may also run an IO action in the background, which is useful
- to make the button close or otherwise change the alert. -}
diff --git a/Assistant/DaemonStatus.hs b/Assistant/DaemonStatus.hs
index 8a4a7a1..cb9133b 100644
--- a/Assistant/DaemonStatus.hs
+++ b/Assistant/DaemonStatus.hs
@@ -44,6 +44,7 @@ modifyDaemonStatus a = do
sendNotification $ changeNotifier s
return b
+
{- Returns a function that updates the lists of syncable remotes. -}
calcSyncRemotes :: Annex (DaemonStatus -> DaemonStatus)
calcSyncRemotes = do
@@ -60,7 +61,9 @@ calcSyncRemotes = do
{- Updates the sycRemotes list from the list of all remotes in Annex state. -}
updateSyncRemotes :: Assistant ()
-updateSyncRemotes = modifyDaemonStatus_ =<< liftAnnex calcSyncRemotes
+updateSyncRemotes = do
+ modifyDaemonStatus_ =<< liftAnnex calcSyncRemotes
+ liftIO . sendNotification =<< syncRemotesNotifier <$> getDaemonStatus
{- Load any previous daemon status file, and store it in a MVar for this
- process to use as its DaemonStatus. Also gets current transfer status. -}
diff --git a/Assistant/Drop.hs b/Assistant/Drop.hs
index 66e738a..4551f49 100644
--- a/Assistant/Drop.hs
+++ b/Assistant/Drop.hs
@@ -19,18 +19,19 @@ import Annex.Wanted
import Config
{- Drop from local and/or remote when allowed by the preferred content and
- - numcopies settings. -}
-handleDrops :: Bool -> Key -> AssociatedFile -> Assistant ()
-handleDrops _ _ Nothing = noop
-handleDrops fromhere key f = do
+ - numcopies settings. If it's known to be present on a particular remote,
+ - -}
+handleDrops :: Bool -> Key -> AssociatedFile -> Maybe Remote -> Assistant ()
+handleDrops _ _ Nothing _ = noop
+handleDrops fromhere key f knownpresentremote = do
syncrs <- syncDataRemotes <$> getDaemonStatus
liftAnnex $ do
locs <- loggedLocations key
- handleDrops' locs syncrs fromhere key f
+ handleDrops' locs syncrs fromhere key f knownpresentremote
-handleDrops' :: [UUID] -> [Remote] -> Bool -> Key -> AssociatedFile -> Annex ()
-handleDrops' _ _ _ _ Nothing = noop
-handleDrops' locs rs fromhere key (Just f)
+handleDrops' :: [UUID] -> [Remote] -> Bool -> Key -> AssociatedFile -> Maybe Remote -> Annex ()
+handleDrops' _ _ _ _ Nothing _ = noop
+handleDrops' locs rs fromhere key (Just f) knownpresentremote
| fromhere = do
n <- getcopies
if checkcopies n
@@ -59,7 +60,7 @@ handleDrops' locs rs fromhere key (Just f)
)
dropl n = checkdrop n Nothing $ \numcopies ->
- Command.Drop.startLocal f numcopies key
+ Command.Drop.startLocal f numcopies key knownpresentremote
dropr r n = checkdrop n (Just $ Remote.uuid r) $ \numcopies ->
Command.Drop.startRemote f numcopies key r
diff --git a/Assistant/NetMessager.hs b/Assistant/NetMessager.hs
index d9450ad..2191e06 100644
--- a/Assistant/NetMessager.hs
+++ b/Assistant/NetMessager.hs
@@ -9,15 +9,12 @@ module Assistant.NetMessager where
import Assistant.Common
import Assistant.Types.NetMessager
-import qualified Types.Remote as Remote
-import qualified Git
import Control.Concurrent
import Control.Concurrent.STM
import Control.Concurrent.MSampleVar
import Control.Exception as E
import qualified Data.Set as S
-import qualified Data.Text as T
sendNetMessage :: NetMessage -> Assistant ()
sendNetMessage m =
@@ -95,3 +92,4 @@ queueNetPushMessage _ = return False
waitNetPushMessage :: PushSide -> Assistant (NetMessage)
waitNetPushMessage side = (atomically . readTChan)
<<~ (getSide side . netMessagesPush . netMessager)
+
diff --git a/Assistant/Threads/ConfigMonitor.hs b/Assistant/Threads/ConfigMonitor.hs
index 2d012ad..47e1971 100644
--- a/Assistant/Threads/ConfigMonitor.hs
+++ b/Assistant/Threads/ConfigMonitor.hs
@@ -72,8 +72,9 @@ reloadConfigs changedconfigs = do
sequence_ as
void preferredContentMapLoad
{- Changes to the remote log, or the trust log, can affect the
- - syncRemotes list -}
- when (Logs.Remote.remoteLog `elem` fs || Logs.Trust.trustLog `elem` fs) $
+ - syncRemotes list. Changes to the uuid log may affect its
+ - display so are also included. -}
+ when (any (`elem` fs) [remoteLog, trustLog, uuidLog]) $
updateSyncRemotes
where
(fs, as) = unzip $ filter (flip S.member changedfiles . fst)
diff --git a/Assistant/Threads/TransferScanner.hs b/Assistant/Threads/TransferScanner.hs
index 918a266..6cbb5cc 100644
--- a/Assistant/Threads/TransferScanner.hs
+++ b/Assistant/Threads/TransferScanner.hs
@@ -119,7 +119,7 @@ expensiveScan rs = unless onlyweb $ do
locs <- loggedLocations key
present <- inAnnex key
- handleDrops' locs syncrs present key (Just f)
+ handleDrops' locs syncrs present key (Just f) Nothing
let slocs = S.fromList locs
let use a = return $ catMaybes $ map (a key slocs) syncrs
diff --git a/Assistant/Threads/TransferWatcher.hs b/Assistant/Threads/TransferWatcher.hs
index 7deafb1..6f040ba 100644
--- a/Assistant/Threads/TransferWatcher.hs
+++ b/Assistant/Threads/TransferWatcher.hs
@@ -102,11 +102,11 @@ onDel file = case parseTransferFile file of
threadDelay 10000000 -- 10 seconds
finished t minfo
-{- Queue uploads of files we successfully downloaded, spreading them
+{- Queue uploads of files downloaded to us, spreading them
- out to other reachable remotes.
-
- Downloading a file may have caused a remote to not want it;
- - so drop it from the remote.
+ - so check for drops from remotes.
-
- Uploading a file may cause the local repo, or some other remote to not
- want it; handle that too.
@@ -115,9 +115,9 @@ finishedTransfer :: Transfer -> Maybe TransferInfo -> Assistant ()
finishedTransfer t (Just info)
| transferDirection t == Download =
whenM (liftAnnex $ inAnnex $ transferKey t) $ do
- handleDrops False (transferKey t) (associatedFile info)
+ handleDrops False (transferKey t) (associatedFile info) Nothing
queueTransfersMatching (/= transferUUID t) Later
(transferKey t) (associatedFile info) Upload
- | otherwise = handleDrops True (transferKey t) (associatedFile info)
+ | otherwise = handleDrops True (transferKey t) (associatedFile info) Nothing
finishedTransfer _ _ = noop
diff --git a/Assistant/Threads/Transferrer.hs b/Assistant/Threads/Transferrer.hs
index 1d23487..5c70565 100644
--- a/Assistant/Threads/Transferrer.hs
+++ b/Assistant/Threads/Transferrer.hs
@@ -13,6 +13,7 @@ import Assistant.TransferQueue
import Assistant.TransferSlots
import Assistant.Alert
import Assistant.Commits
+import Assistant.Drop
import Logs.Transfer
import Logs.Location
import Annex.Content
@@ -65,6 +66,10 @@ startTransfer program t info = case (transferRemote info, associatedFile info) o
- so there's no point in bothering the user about
- those. The assistant should recover.
-
+ - After a successful upload, handle dropping it from
+ - here, if desired. In this case, the remote it was
+ - uploaded to is known to have it.
+ -
- Also, after a successful transfer, the location
- log has changed. Indicate that a commit has been
- made, in order to queue a push of the git-annex
@@ -74,6 +79,10 @@ startTransfer program t info = case (transferRemote info, associatedFile info) o
whenM (liftIO $ (==) ExitSuccess <$> waitForProcess pid) $ do
void $ addAlert $ makeAlertFiller True $
transferFileAlert direction True file
+ unless isdownload $
+ handleDrops True (transferKey t)
+ (associatedFile info)
+ (Just remote)
recordCommit
where
params =
diff --git a/Assistant/Threads/Watcher.hs b/Assistant/Threads/Watcher.hs
index a74976d..f7e4e2d 100644
--- a/Assistant/Threads/Watcher.hs
+++ b/Assistant/Threads/Watcher.hs
@@ -178,13 +178,16 @@ onAddSymlink file filestatus = go =<< liftAnnex (Backend.lookupFile file)
madeChange file LinkChange
{- When a new link appears, or a link is changed, after the startup
- - scan, handle getting or dropping the key's content. -}
+ - scan, handle getting or dropping the key's content.
+ - Also, moving or copying a link may caused it be be transferred
+ - elsewhere, so check that too. -}
checkcontent key daemonstatus
| scanComplete daemonstatus = do
present <- liftAnnex $ inAnnex key
- unless present $
- queueTransfers Next key (Just file) Download
- handleDrops present key (Just file)
+ if present
+ then queueTransfers Next key (Just file) Upload
+ else queueTransfers Next key (Just file) Download
+ handleDrops present key (Just file) Nothing
| otherwise = noop
onDel :: Handler
diff --git a/Assistant/Threads/WebApp.hs b/Assistant/Threads/WebApp.hs
index be9a9a1..a91a198 100644
--- a/Assistant/Threads/WebApp.hs
+++ b/Assistant/Threads/WebApp.hs
@@ -5,7 +5,7 @@
- Licensed under the GNU GPL version 3 or higher.
-}
-{-# LANGUAGE TypeFamilies, QuasiQuotes, MultiParamTypeClasses, TemplateHaskell, OverloadedStrings, RankNTypes, CPP #-}
+{-# LANGUAGE TypeFamilies, QuasiQuotes, MultiParamTypeClasses, TemplateHaskell, OverloadedStrings, RankNTypes #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Assistant.Threads.WebApp where
@@ -21,9 +21,8 @@ import Assistant.WebApp.Configurators.Edit
import Assistant.WebApp.Configurators.Local
import Assistant.WebApp.Configurators.Ssh
import Assistant.WebApp.Configurators.Pairing
-#ifdef WITH_S3
-import Assistant.WebApp.Configurators.S3
-#endif
+import Assistant.WebApp.Configurators.AWS
+import Assistant.WebApp.Configurators.WebDAV
import Assistant.WebApp.Configurators.XMPP
import Assistant.WebApp.Documentation
import Assistant.WebApp.OtherRepos
diff --git a/Assistant/Types/DaemonStatus.hs b/Assistant/Types/DaemonStatus.hs
index df0928d..7f868d9 100644
--- a/Assistant/Types/DaemonStatus.hs
+++ b/Assistant/Types/DaemonStatus.hs
@@ -47,6 +47,8 @@ data DaemonStatus = DaemonStatus
, transferNotifier :: NotificationBroadcaster
-- Broadcasts notifications when there's a change to the alerts
, alertNotifier :: NotificationBroadcaster
+ -- Broadcasts notifications when the syncRemotes change
+ , syncRemotesNotifier :: NotificationBroadcaster
}
type TransferMap = M.Map Transfer TransferInfo
@@ -70,3 +72,4 @@ newDaemonStatus = DaemonStatus
<*> newNotificationBroadcaster
<*> newNotificationBroadcaster
<*> newNotificationBroadcaster
+ <*> newNotificationBroadcaster
diff --git a/Assistant/WebApp.hs b/Assistant/WebApp.hs
index 325f27f..c311cb2 100644
--- a/Assistant/WebApp.hs
+++ b/Assistant/WebApp.hs
@@ -16,53 +16,13 @@ import Utility.Yesod
import Locations.UserConfig
import Yesod
-import Text.Hamlet
import Data.Text (Text)
import Control.Concurrent.STM
import Control.Concurrent
-data NavBarItem = DashBoard | Config | About
- deriving (Eq)
-
-navBarName :: NavBarItem -> Text
-navBarName DashBoard = "Dashboard"
-navBarName Config = "Configuration"
-navBarName About = "About"
-
-navBarRoute :: NavBarItem -> Route WebApp
-navBarRoute DashBoard = HomeR
-navBarRoute Config = ConfigR
-navBarRoute About = AboutR
-
-defaultNavBar :: [NavBarItem]
-defaultNavBar = [DashBoard, Config, About]
-
-firstRunNavBar :: [NavBarItem]
-firstRunNavBar = [Config, About]
-
-selectNavBar :: Handler [NavBarItem]
-selectNavBar = ifM (inFirstRun) (return firstRunNavBar, return defaultNavBar)
-
inFirstRun :: Handler Bool
inFirstRun = isNothing . relDir <$> getYesod
-{- Used instead of defaultContent; highlights the current page if it's
- - on the navbar. -}
-bootstrap :: Maybe NavBarItem -> Widget -> Handler RepHtml
-bootstrap navbaritem content = do
- webapp <- getYesod
- navbar <- map navdetails <$> selectNavBar
- page <- widgetToPageContent $ do
- addStylesheet $ StaticR css_bootstrap_css
- addStylesheet $ StaticR css_bootstrap_responsive_css
- addScript $ StaticR jquery_full_js
- addScript $ StaticR js_bootstrap_dropdown_js
- addScript $ StaticR js_bootstrap_modal_js
- $(widgetFile "page")
- hamletToRepHtml $(hamletFile $ hamletTemplate "bootstrap")
- where
- navdetails i = (navBarName i, navBarRoute i, Just i == navbaritem)
-
newWebAppState :: IO (TMVar WebAppState)
newWebAppState = do
otherrepos <- listOtherRepos
diff --git a/Assistant/WebApp/Common.hs b/Assistant/WebApp/Common.hs
new file mode 100644
index 0000000..b8f3782
--- /dev/null
+++ b/Assistant/WebApp/Common.hs
@@ -0,0 +1,17 @@
+{- git-annex assistant webapp, common imports
+ -
+ - Copyright 2012 Joey Hess <joey@kitenet.net>
+ -
+ - Licensed under the GNU AGPL version 3 or higher.
+ -}
+
+module Assistant.WebApp.Common (module X) where
+
+import Assistant.Common as X
+import Assistant.WebApp as X
+import Assistant.WebApp.Page as X
+import Assistant.WebApp.Types as X
+import Utility.Yesod as X
+
+import Data.Text as X (Text)
+
diff --git a/Assistant/WebApp/Configurators.hs b/Assistant/WebApp/Configurators.hs
index 89ce503..eaf0212 100644
--- a/Assistant/WebApp/Configurators.hs
+++ b/Assistant/WebApp/Configurators.hs
@@ -9,14 +9,11 @@
module Assistant.WebApp.Configurators where
-import Assistant.Common
+import Assistant.WebApp.Common
import Assistant.DaemonStatus
-import Assistant.WebApp
-import Assistant.WebApp.Types
-import Assistant.WebApp.SideBar
+import Assistant.WebApp.Notifications
import Assistant.WebApp.Utility
import Assistant.WebApp.Configurators.Local
-import Utility.Yesod
import qualified Remote
import qualified Types.Remote as Remote
import Annex.UUID (getUUID)
@@ -29,21 +26,18 @@ import Assistant.XMPP.Client
#endif
import Yesod
-import Data.Text (Text)
import qualified Data.Map as M
{- The main configuration screen. -}
getConfigR :: Handler RepHtml
getConfigR = ifM (inFirstRun)
( getFirstRepositoryR
- , bootstrap (Just Config) $ do
+ , page "Configuration" (Just Config) $ do
#ifdef WITH_XMPP
xmppconfigured <- lift $ runAnnex False $ isJust <$> getXMPPCreds
#else
let xmppconfigured = False
#endif
- sideBarDisplay
- setTitle "Configuration"
$(widgetFile "configurators/main")
)
@@ -51,7 +45,11 @@ getConfigR = ifM (inFirstRun)
introDisplay :: Text -> Widget
introDisplay ident = do
webapp <- lift getYesod
- repolist <- lift $ repoList False True False
+ repolist <- lift $ repoList $ RepoSelector
+ { onlyCloud = False
+ , onlyConfigured = True
+ , includeHere = False
+ }
let n = length repolist
let numrepos = show n
$(widgetFile "configurators/intro")
@@ -65,10 +63,12 @@ makeCloudRepositories = $(widgetFile "configurators/repositories/cloud")
{- Lists known repositories, followed by options to add more. -}
getRepositoriesR :: Handler RepHtml
-getRepositoriesR = bootstrap (Just Config) $ do
- sideBarDisplay
- setTitle "Repositories"
- repolist <- lift $ repoList False False True
+getRepositoriesR = page "Repositories" (Just Config) $ do
+ let repolist = repoListDisplay $ RepoSelector
+ { onlyCloud = False
+ , onlyConfigured = False
+ , includeHere = True
+ }
$(widgetFile "configurators/repositories")
data Actions
@@ -103,16 +103,35 @@ notSyncing :: Actions -> Bool
notSyncing (SyncingRepoActions _ _) = False
notSyncing _ = True
-repoTable :: RepoList -> Widget
-repoTable repolist = $(widgetFile "configurators/repositories/table")
+{- Called by client to get a list of repos, that refreshes
+ - when new repos as added.
+ -
+ - Returns a div, which will be inserted into the calling page.
+ -}
+getRepoListR :: RepoListNotificationId -> Handler RepHtml
+getRepoListR (RepoListNotificationId nid reposelector) = do
+ waitNotifier getRepoListBroadcaster nid
+ p <- widgetToPageContent $ repoListDisplay reposelector
+ hamletToRepHtml $ [hamlet|^{pageBody p}|]
+
+repoListDisplay :: RepoSelector -> Widget
+repoListDisplay reposelector = do
+ autoUpdate ident (NotifierRepoListR reposelector) (10 :: Int) (10 :: Int)
+
+ repolist <- lift $ repoList reposelector
+
+ $(widgetFile "configurators/repositories/list")
+
+ where
+ ident = "repolist"
type RepoList = [(String, String, Actions)]
{- A numbered list of known repositories,
- with actions that can be taken on them. -}
-repoList :: Bool -> Bool -> Bool -> Handler RepoList
-repoList onlycloud onlyconfigured includehere
- | onlyconfigured = list =<< configured
+repoList :: RepoSelector -> Handler RepoList
+repoList reposelector
+ | onlyConfigured reposelector = list =<< configured
| otherwise = list =<< (++) <$> configured <*> rest
where
configured = do
@@ -121,7 +140,7 @@ repoList onlycloud onlyconfigured includehere
runAnnex [] $ do
u <- getUUID
let l = map Remote.uuid rs
- let l' = if includehere then u : l else l
+ let l' = if includeHere reposelector then u : l else l
return $ zip l' $ map mkSyncingRepoActions l'
rest = runAnnex [] $ do
m <- readRemoteLog
@@ -134,11 +153,11 @@ repoList onlycloud onlyconfigured includehere
return $ zip unsyncable (map mkNotSyncingRepoActions unsyncable) ++ unconfigured
wantedrepo r
| Remote.readonly r = False
- | onlycloud = Git.repoIsUrl (Remote.repo r) && not (isXMPPRemote r)
+ | onlyCloud reposelector = Git.repoIsUrl (Remote.repo r) && not (isXMPPRemote r)
| otherwise = True
wantedremote Nothing = False
wantedremote (Just (iscloud, _))
- | onlycloud = iscloud
+ | onlyCloud reposelector = iscloud
| otherwise = True
findinfo m u = case M.lookup u m of
Nothing -> Nothing
@@ -148,6 +167,10 @@ repoList onlycloud onlyconfigured includehere
#ifdef WITH_S3
Just "S3" -> val True EnableS3R
#endif
+ Just "glacier" -> val True EnableGlacierR
+#ifdef WITH_WEBDAV
+ Just "webdav" -> val True EnableWebDAVR
+#endif
_ -> Nothing
where
val iscloud r = Just (iscloud, (u, DisabledRepoActions $ r u))
diff --git a/Assistant/WebApp/Configurators/AWS.hs b/Assistant/WebApp/Configurators/AWS.hs
new file mode 100644
index 0000000..56a95d1
--- /dev/null
+++ b/Assistant/WebApp/Configurators/AWS.hs
@@ -0,0 +1,164 @@
+{- git-annex assistant webapp configurators for Amazon AWS services
+ -
+ - Copyright 2012 Joey Hess <joey@kitenet.net>
+ -
+ - Licensed under the GNU AGPL version 3 or higher.
+ -}
+
+{-# LANGUAGE CPP, TypeFamilies, QuasiQuotes, MultiParamTypeClasses, TemplateHaskell, OverloadedStrings, RankNTypes #-}
+
+module Assistant.WebApp.Configurators.AWS where
+
+import Assistant.WebApp.Common
+import Assistant.MakeRemote
+import Assistant.Sync
+#ifdef WITH_S3
+import qualified Remote.S3 as S3
+#endif
+import qualified Remote.Glacier as Glacier
+import qualified Remote.Helper.AWS as AWS
+import Logs.Remote
+import qualified Remote
+import Types.Remote (RemoteConfig)
+import Types.StandardGroups
+import Logs.PreferredContent
+
+import Yesod
+import qualified Data.Text as T
+import qualified Data.Map as M
+
+awsConfigurator :: Widget -> Handler RepHtml
+awsConfigurator = page "Add an Amazon repository" (Just Config)
+
+glacierConfigurator :: Widget -> Handler RepHtml
+glacierConfigurator a = do
+ ifM (liftIO $ inPath "glacier")
+ ( awsConfigurator a
+ , awsConfigurator needglaciercli
+ )
+ where
+ needglaciercli = $(widgetFile "configurators/needglaciercli")
+
+data StorageClass = StandardRedundancy | ReducedRedundancy
+ deriving (Eq, Enum, Bounded)
+
+instance Show StorageClass where
+ show StandardRedundancy = "STANDARD"
+ show ReducedRedundancy = "REDUCED_REDUNDANCY"
+
+data AWSInput = AWSInput
+ { accessKeyID :: Text
+ , secretAccessKey :: Text
+ -- Free form text for datacenter because Amazon adds new ones.
+ , datacenter :: Text
+ -- Only used for S3, not Glacier.
+ , storageClass :: StorageClass
+ , repoName :: Text
+ }
+
+data AWSCreds = AWSCreds Text Text
+
+extractCreds :: AWSInput -> AWSCreds
+extractCreds i = AWSCreds (accessKeyID i) (secretAccessKey i)
+
+s3InputAForm :: AForm WebApp WebApp AWSInput
+s3InputAForm = AWSInput
+ <$> areq textField "Access Key ID" Nothing
+ <*> areq passwordField "Secret Access Key" Nothing
+ <*> areq textField "Datacenter" (Just "US")
+ <*> areq (selectFieldList storageclasses) "Storage class" (Just StandardRedundancy)
+ <*> areq textField "Repository name" (Just "S3")
+ where
+ storageclasses :: [(Text, StorageClass)]
+ storageclasses =
+ [ ("Standard redundancy", StandardRedundancy)
+ , ("Reduced redundancy (costs less)", ReducedRedundancy)
+ ]
+
+glacierInputAForm :: AForm WebApp WebApp AWSInput
+glacierInputAForm = AWSInput
+ <$> areq textField "Access Key ID" Nothing
+ <*> areq passwordField "Secret Access Key" Nothing
+ <*> areq textField "Datacenter" (Just "us-east-1")
+ <*> pure StandardRedundancy
+ <*> areq textField "Repository name" (Just "glacier")
+
+awsCredsAForm :: AForm WebApp WebApp AWSCreds
+awsCredsAForm = AWSCreds
+ <$> areq textField "Access Key ID" Nothing
+ <*> areq passwordField "Secret Access Key" Nothing
+
+getAddS3R :: Handler RepHtml
+#ifdef WITH_S3
+getAddS3R = awsConfigurator $ do
+ ((result, form), enctype) <- lift $
+ runFormGet $ renderBootstrap s3InputAForm
+ case result of
+ FormSuccess input -> lift $ do
+ let name = T.unpack $ repoName input
+ makeAWSRemote S3.remote (extractCreds input) name setgroup $ M.fromList
+ [ ("encryption", "shared")
+ , ("type", "S3")
+ , ("datacenter", T.unpack $ datacenter input)
+ , ("storageclass", show $ storageClass input)
+ ]
+ _ -> $(widgetFile "configurators/adds3")
+ where
+ setgroup r = runAnnex () $
+ setStandardGroup (Remote.uuid r) TransferGroup
+#else
+getAddS3R = error "S3 not supported by this build"
+#endif
+
+getAddGlacierR :: Handler RepHtml
+getAddGlacierR = glacierConfigurator $ do
+ ((result, form), enctype) <- lift $
+ runFormGet $ renderBootstrap glacierInputAForm
+ case result of
+ FormSuccess input -> lift $ do
+ let name = T.unpack $ repoName input
+ makeAWSRemote Glacier.remote (extractCreds input) name setgroup $ M.fromList
+ [ ("encryption", "shared")
+ , ("type", "glacier")
+ , ("datacenter", T.unpack $ datacenter input)
+ ]
+ _ -> $(widgetFile "configurators/addglacier")
+ where
+ setgroup r = runAnnex () $
+ setStandardGroup (Remote.uuid r) SmallArchiveGroup
+
+getEnableS3R :: UUID -> Handler RepHtml
+#ifdef WITH_S3
+getEnableS3R = awsConfigurator . enableAWSRemote S3.remote
+#else
+getEnableS3R _ = error "S3 not supported by this build"
+#endif
+
+getEnableGlacierR :: UUID -> Handler RepHtml
+getEnableGlacierR = glacierConfigurator . enableAWSRemote Glacier.remote
+
+enableAWSRemote :: RemoteType -> UUID -> Widget
+enableAWSRemote remotetype uuid = do
+ ((result, form), enctype) <- lift $
+ runFormGet $ renderBootstrap awsCredsAForm
+ case result of
+ FormSuccess creds -> lift $ do
+ m <- runAnnex M.empty readRemoteLog
+ let name = fromJust $ M.lookup "name" $
+ fromJust $ M.lookup uuid m
+ makeAWSRemote remotetype creds name (const noop) M.empty
+ _ -> do
+ description <- lift $ runAnnex "" $
+ T.pack . concat <$> Remote.prettyListUUIDs [uuid]
+ $(widgetFile "configurators/enableaws")
+
+makeAWSRemote :: RemoteType -> AWSCreds -> String -> (Remote -> Handler ()) -> RemoteConfig -> Handler ()
+makeAWSRemote remotetype (AWSCreds ak sk) name setup config = do
+ remotename <- runAnnex name $ fromRepo $ uniqueRemoteName name 0
+ liftIO $ AWS.setCredsEnv (T.unpack ak, T.unpack sk)
+ r <- liftAssistant $ liftAnnex $ addRemote $ do
+ makeSpecialRemote name remotetype config
+ return remotename
+ setup r
+ liftAssistant $ syncNewRemote r
+ redirect $ EditNewCloudRepositoryR $ Remote.uuid r
diff --git a/Assistant/WebApp/Configurators/Edit.hs b/Assistant/WebApp/Configurators/Edit.hs
index e732123..ca939fe 100644
--- a/Assistant/WebApp/Configurators/Edit.hs
+++ b/Assistant/WebApp/Configurators/Edit.hs
@@ -9,15 +9,11 @@
module Assistant.WebApp.Configurators.Edit where
-import Assistant.Common
-import Assistant.WebApp
-import Assistant.WebApp.Types
-import Assistant.WebApp.SideBar
+import Assistant.WebApp.Common
import Assistant.WebApp.Utility
import Assistant.DaemonStatus
import Assistant.MakeRemote (uniqueRemoteName)
import Assistant.WebApp.Configurators.XMPP (xmppNeeded)
-import Utility.Yesod
import qualified Remote
import qualified Remote.List as Remote
import Logs.UUID
@@ -30,7 +26,6 @@ import qualified Git.Command
import qualified Git.Config
import Yesod
-import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Map as M
import qualified Data.Set as S
@@ -116,21 +111,33 @@ getEditNewCloudRepositoryR :: UUID -> Handler RepHtml
getEditNewCloudRepositoryR uuid = xmppNeeded >> editForm True uuid
editForm :: Bool -> UUID -> Handler RepHtml
-editForm new uuid = bootstrap (Just Config) $ do
- sideBarDisplay
- setTitle "Configure repository"
-
+editForm new uuid = page "Configure repository" (Just Config) $ do
(repo, mremote) <- lift $ runAnnex undefined $ Remote.repoFromUUID uuid
curr <- lift $ runAnnex undefined $ getRepoConfig uuid repo mremote
+ lift $ checkarchivedirectory curr
((result, form), enctype) <- lift $
runFormGet $ renderBootstrap $ editRepositoryAForm curr
case result of
FormSuccess input -> lift $ do
+ checkarchivedirectory input
setRepoConfig uuid mremote curr input
redirect RepositoriesR
_ -> showform form enctype curr
where
showform form enctype curr = do
let istransfer = repoGroup curr == RepoGroupStandard TransferGroup
- let authtoken = webAppFormAuthToken
$(widgetFile "configurators/editrepository")
+
+ {- Makes a toplevel archive directory, so the user can get on with
+ - using it. This is done both when displaying the form, as well
+ - as after it's posted, because the user may not post the form,
+ - but may see that the repo is set up to use the archive
+ - directory. -}
+ checkarchivedirectory cfg
+ | repoGroup cfg == RepoGroupStandard SmallArchiveGroup = go
+ | repoGroup cfg == RepoGroupStandard FullArchiveGroup = go
+ | otherwise = noop
+ where
+ go = runAnnex undefined $ inRepo $ \g ->
+ createDirectoryIfMissing True $
+ Git.repoPath g </> "archive"
diff --git a/Assistant/WebApp/Configurators/Local.hs b/Assistant/WebApp/Configurators/Local.hs
index f146504..2468da5 100644
--- a/Assistant/WebApp/Configurators/Local.hs
+++ b/Assistant/WebApp/Configurators/Local.hs
@@ -5,17 +5,13 @@
- Licensed under the GNU AGPL version 3 or higher.
-}
-{-# LANGUAGE TypeFamilies, QuasiQuotes, MultiParamTypeClasses, TemplateHaskell, OverloadedStrings, RankNTypes #-}
+{-# LANGUAGE CPP, TypeFamilies, QuasiQuotes, MultiParamTypeClasses, TemplateHaskell, OverloadedStrings, RankNTypes #-}
module Assistant.WebApp.Configurators.Local where
-import Assistant.Common
-import Assistant.WebApp
-import Assistant.WebApp.Types
-import Assistant.WebApp.SideBar
+import Assistant.WebApp.Common
import Assistant.WebApp.Utility
import Assistant.MakeRemote
-import Utility.Yesod
import Init
import qualified Git
import qualified Git.Construct
@@ -35,7 +31,6 @@ import Logs.PreferredContent
import Utility.UserInfo
import Yesod
-import Data.Text (Text)
import qualified Data.Text as T
import Data.Char
import System.Posix.Directory
@@ -49,7 +44,17 @@ data RepositoryPath = RepositoryPath Text
- Validates that the path entered is not empty, and is a safe value
- to use as a repository. -}
repositoryPathField :: forall sub. Bool -> Field sub WebApp Text
-repositoryPathField autofocus = Field { fieldParse = parse, fieldView = view }
+repositoryPathField autofocus = Field
+#ifdef WITH_OLD_YESOD
+ { fieldParse = parse
+#else
+ { fieldParse = \l _ -> parse l
+#endif
+ , fieldView = view
+#ifndef WITH_OLD_YESOD
+ , fieldEnctype = UrlEncoded
+#endif
+ }
where
view idAttr nameAttr attrs val isReq =
[whamlet|<input type="text" *{attrs} id="#{idAttr}" name="#{nameAttr}" :isReq:required :autofocus:autofocus value="#{either id id val}">|]
@@ -123,9 +128,7 @@ newRepositoryForm defpath msg = do
{- Making the first repository, when starting the webapp for the first time. -}
getFirstRepositoryR :: Handler RepHtml
-getFirstRepositoryR = bootstrap (Just Config) $ do
- sideBarDisplay
- setTitle "Getting started"
+getFirstRepositoryR = page "Getting started" (Just Config) $ do
path <- liftIO . defaultRepositoryPath =<< lift inFirstRun
((res, form), enctype) <- lift $ runFormGet $ newRepositoryForm path
case res of
@@ -135,9 +138,7 @@ getFirstRepositoryR = bootstrap (Just Config) $ do
{- Adding a new, separate repository. -}
getNewRepositoryR :: Handler RepHtml
-getNewRepositoryR = bootstrap (Just Config) $ do
- sideBarDisplay
- setTitle "Add another repository"
+getNewRepositoryR = page "Add another repository" (Just Config) $ do
home <- liftIO myHomeDir
((res, form), enctype) <- lift $ runFormGet $ newRepositoryForm home
case res of
@@ -174,9 +175,7 @@ selectDriveForm drives def = renderBootstrap $ RemovableDrive
{- Adding a removable drive. -}
getAddDriveR :: Handler RepHtml
-getAddDriveR = bootstrap (Just Config) $ do
- sideBarDisplay
- setTitle "Add a removable drive"
+getAddDriveR = page "AAdd a removable drive" (Just Config) $ do
removabledrives <- liftIO $ driveList
writabledrives <- liftIO $
filterM (canWrite . T.unpack . mountPoint) removabledrives
@@ -185,9 +184,7 @@ getAddDriveR = bootstrap (Just Config) $ do
case res of
FormSuccess (RemovableDrive { mountPoint = d }) -> lift $
make (T.unpack d) >>= redirect . EditNewRepositoryR
- _ -> do
- let authtoken = webAppFormAuthToken
- $(widgetFile "configurators/adddrive")
+ _ -> $(widgetFile "configurators/adddrive")
where
make mountpoint = do
liftIO $ makerepo dir
@@ -216,9 +213,7 @@ getAddDriveR = bootstrap (Just Config) $ do
addRemote $ makeGitRemote name dir
getEnableDirectoryR :: UUID -> Handler RepHtml
-getEnableDirectoryR uuid = bootstrap (Just Config) $ do
- sideBarDisplay
- setTitle "Enable a repository"
+getEnableDirectoryR uuid = page "Enable a repository" (Just Config) $ do
description <- lift $ runAnnex "" $
T.pack . concat <$> prettyListUUIDs [uuid]
$(widgetFile "configurators/enabledirectory")
diff --git a/Assistant/WebApp/Configurators/Pairing.hs b/Assistant/WebApp/Configurators/Pairing.hs
index c6e9874..38ba475 100644
--- a/Assistant/WebApp/Configurators/Pairing.hs
+++ b/Assistant/WebApp/Configurators/Pairing.hs
@@ -11,13 +11,9 @@
module Assistant.WebApp.Configurators.Pairing where
import Assistant.Pairing
-import Assistant.WebApp
-import Assistant.WebApp.Types
-import Assistant.WebApp.SideBar
+import Assistant.WebApp.Common
import Assistant.Types.Buddies
-import Utility.Yesod
#ifdef WITH_PAIRING
-import Assistant.Common
import Assistant.Pairing.Network
import Assistant.Pairing.MakeRemote
import Assistant.Ssh
@@ -42,7 +38,6 @@ import Utility.UserInfo
import Git
import Yesod
-import Data.Text (Text)
#ifdef WITH_PAIRING
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
@@ -158,7 +153,11 @@ getFinishXMPPPairR _ = noXMPPPairing
xmppPairEnd :: Bool -> Maybe JID -> Handler RepHtml
xmppPairEnd inprogress theirjid = pairPage $ do
let friend = buddyName <$> theirjid
- cloudrepolist <- lift $ repoList True False False
+ let cloudrepolist = repoListDisplay $ RepoSelector
+ { onlyCloud = True
+ , onlyConfigured = False
+ , includeHere = False
+ }
$(widgetFile "configurators/pairing/xmpp/end")
#endif
@@ -260,7 +259,6 @@ promptSecret msg cont = pairPage $ do
(verifiableVal . fromPairMsg <$> msg)
u <- T.pack <$> liftIO myUserName
let sameusername = username == u
- let authtoken = webAppFormAuthToken
$(widgetFile "configurators/pairing/local/prompt")
{- This counts unicode characters as more than one character,
@@ -289,10 +287,7 @@ sampleQuote = T.unwords
#endif
pairPage :: Widget -> Handler RepHtml
-pairPage w = bootstrap (Just Config) $ do
- sideBarDisplay
- setTitle "Pairing"
- w
+pairPage = page "Pairing" (Just Config)
noPairing :: Text -> Handler RepHtml
noPairing pairingtype = pairPage $
diff --git a/Assistant/WebApp/Configurators/S3.hs b/Assistant/WebApp/Configurators/S3.hs
deleted file mode 100644
index 42355ea..0000000
--- a/Assistant/WebApp/Configurators/S3.hs
+++ /dev/null
@@ -1,125 +0,0 @@
-{- git-annex assistant webapp configurator for Amazon S3
- -
- - Copyright 2012 Joey Hess <joey@kitenet.net>
- -
- - Licensed under the GNU AGPL version 3 or higher.
- -}
-
-{-# LANGUAGE TypeFamilies, QuasiQuotes, MultiParamTypeClasses, TemplateHaskell, OverloadedStrings, RankNTypes #-}
-
-module Assistant.WebApp.Configurators.S3 where
-
-import Assistant.Common
-import Assistant.MakeRemote
-import Assistant.Sync
-import Assistant.WebApp
-import Assistant.WebApp.Types
-import Assistant.WebApp.SideBar
-import Utility.Yesod
-import qualified Remote.S3 as S3
-import Logs.Remote
-import qualified Remote
-import Types.Remote (RemoteConfig)
-import Types.StandardGroups
-import Logs.PreferredContent
-
-import Yesod
-import Data.Text (Text)
-import qualified Data.Text as T
-import qualified Data.Map as M
-
-s3Configurator :: Widget -> Handler RepHtml
-s3Configurator a = bootstrap (Just Config) $ do
- sideBarDisplay
- setTitle "Add an Amazon S3 repository"
- a
-
-data StorageClass = StandardRedundancy | ReducedRedundancy
- deriving (Eq, Enum, Bounded)
-
-instance Show StorageClass where
- show StandardRedundancy = "STANDARD"
- show ReducedRedundancy = "REDUCED_REDUNDANCY"
-
-data S3Input = S3Input
- { accessKeyID :: Text
- , secretAccessKey :: Text
- -- Free form text for datacenter because Amazon adds new ones.
- , datacenter :: Text
- , storageClass :: StorageClass
- , repoName :: Text
- }
-
-data S3Creds = S3Creds Text Text
-
-extractCreds :: S3Input -> S3Creds
-extractCreds i = S3Creds (accessKeyID i) (secretAccessKey i)
-
-s3InputAForm :: AForm WebApp WebApp S3Input
-s3InputAForm = S3Input
- <$> areq textField "Access Key ID" Nothing
- <*> areq passwordField "Secret Access Key" Nothing
- <*> areq textField "Datacenter" (Just "US")
- <*> areq (selectFieldList storageclasses) "Storage class" (Just StandardRedundancy)
- <*> areq textField "Repository name" (Just "S3")
- where
- storageclasses :: [(Text, StorageClass)]
- storageclasses =
- [ ("Standard redundancy", StandardRedundancy)
- , ("Reduced redundancy (costs less)", ReducedRedundancy)
- ]
-
-s3CredsAForm :: AForm WebApp WebApp S3Creds
-s3CredsAForm = S3Creds
- <$> areq textField "Access Key ID" Nothing
- <*> areq passwordField "Secret Access Key" Nothing
-
-getAddS3R :: Handler RepHtml
-getAddS3R = s3Configurator $ do
- ((result, form), enctype) <- lift $
- runFormGet $ renderBootstrap s3InputAForm
- case result of
- FormSuccess s3input -> lift $ do
- let name = T.unpack $ repoName s3input
- makeS3Remote (extractCreds s3input) name setgroup $ M.fromList
- [ ("encryption", "shared")
- , ("type", "S3")
- , ("datacenter", T.unpack $ datacenter s3input)
- , ("storageclass", show $ storageClass s3input)
- ]
- _ -> showform form enctype
- where
- showform form enctype = do
- let authtoken = webAppFormAuthToken
- $(widgetFile "configurators/adds3")
- setgroup r = runAnnex () $
- setStandardGroup (Remote.uuid r) TransferGroup
-
-getEnableS3R :: UUID -> Handler RepHtml
-getEnableS3R uuid = s3Configurator $ do
- ((result, form), enctype) <- lift $
- runFormGet $ renderBootstrap s3CredsAForm
- case result of
- FormSuccess s3creds -> lift $ do
- m <- runAnnex M.empty readRemoteLog
- let name = fromJust $ M.lookup "name" $
- fromJust $ M.lookup uuid m
- makeS3Remote s3creds name (const noop) M.empty
- _ -> showform form enctype
- where
- showform form enctype = do
- let authtoken = webAppFormAuthToken
- description <- lift $ runAnnex "" $
- T.pack . concat <$> Remote.prettyListUUIDs [uuid]
- $(widgetFile "configurators/enables3")
-
-makeS3Remote :: S3Creds -> String -> (Remote -> Handler ()) -> RemoteConfig -> Handler ()
-makeS3Remote (S3Creds ak sk) name setup config = do
- remotename <- runAnnex name $ fromRepo $ uniqueRemoteName name 0
- liftIO $ S3.s3SetCredsEnv ( T.unpack ak, T.unpack sk)
- r <- liftAssistant $ liftAnnex $ addRemote $ do
- makeSpecialRemote name S3.remote config
- return remotename
- setup r
- liftAssistant $ syncNewRemote r
- redirect $ EditNewCloudRepositoryR $ Remote.uuid r
diff --git a/Assistant/WebApp/Configurators/Ssh.hs b/Assistant/WebApp/Configurators/Ssh.hs
index 7353f61..5253b4f 100644
--- a/Assistant/WebApp/Configurators/Ssh.hs
+++ b/Assistant/WebApp/Configurators/Ssh.hs
@@ -9,13 +9,9 @@
module Assistant.WebApp.Configurators.Ssh where
-import Assistant.Common
+import Assistant.WebApp.Common
import Assistant.Ssh
import Assistant.MakeRemote
-import Assistant.WebApp
-import Assistant.WebApp.Types
-import Assistant.WebApp.SideBar
-import Utility.Yesod
import Utility.Rsync (rsyncUrlIsShell)
import Logs.Remote
import Remote
@@ -24,16 +20,12 @@ import Types.StandardGroups
import Utility.UserInfo
import Yesod
-import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Map as M
import Network.Socket
sshConfigurator :: Widget -> Handler RepHtml
-sshConfigurator a = bootstrap (Just Config) $ do
- sideBarDisplay
- setTitle "Add a remote server"
- a
+sshConfigurator = page "Add a remote server" (Just Config)
data SshInput = SshInput
{ hostname :: Maybe Text
@@ -108,9 +100,7 @@ getAddSshR = sshConfigurator $ do
Right sshdata -> lift $ redirect $ ConfirmSshR sshdata
_ -> showform form enctype UntestedServer
where
- showform form enctype status = do
- let authtoken = webAppFormAuthToken
- $(widgetFile "configurators/ssh/add")
+ showform form enctype status = $(widgetFile "configurators/ssh/add")
{- To enable an existing rsync special remote, parse the SshInput from
- its rsyncurl, and display a form whose only real purpose is to check
@@ -143,7 +133,6 @@ getEnableRsyncR u = do
showform form enctype status = do
description <- lift $ runAnnex "" $
T.pack . concat <$> prettyListUUIDs [u]
- let authtoken = webAppFormAuthToken
$(widgetFile "configurators/ssh/enable")
enable sshdata = lift $ redirect $ ConfirmSshR $
sshdata { rsyncOnly = True }
@@ -245,8 +234,7 @@ showSshErr msg = sshConfigurator $
$(widgetFile "configurators/ssh/error")
getConfirmSshR :: SshData -> Handler RepHtml
-getConfirmSshR sshdata = sshConfigurator $ do
- let authtoken = webAppFormAuthToken
+getConfirmSshR sshdata = sshConfigurator $
$(widgetFile "configurators/ssh/confirm")
getMakeSshGitR :: SshData -> Handler RepHtml
@@ -291,10 +279,7 @@ getAddRsyncNetR = do
((result, form), enctype) <- runFormGet $
renderBootstrap $ sshInputAForm $
SshInput Nothing Nothing Nothing
- let showform status = bootstrap (Just Config) $ do
- sideBarDisplay
- setTitle "Add a Rsync.net repository"
- let authtoken = webAppFormAuthToken
+ let showform status = page "Add a Rsync.net repository" (Just Config) $
$(widgetFile "configurators/addrsync.net")
case result of
FormSuccess sshinput
diff --git a/Assistant/WebApp/Configurators/WebDAV.hs b/Assistant/WebApp/Configurators/WebDAV.hs
new file mode 100644
index 0000000..c16abeb
--- /dev/null
+++ b/Assistant/WebApp/Configurators/WebDAV.hs
@@ -0,0 +1,114 @@
+{- git-annex assistant webapp configurators for WebDAV remotes
+ -
+ - Copyright 2012 Joey Hess <joey@kitenet.net>
+ -
+ - Licensed under the GNU AGPL version 3 or higher.
+ -}
+
+{-# LANGUAGE CPP, TypeFamilies, QuasiQuotes, MultiParamTypeClasses, TemplateHaskell, OverloadedStrings, RankNTypes #-}
+
+module Assistant.WebApp.Configurators.WebDAV where
+
+import Assistant.WebApp.Common
+import Assistant.MakeRemote
+import Assistant.Sync
+#ifdef WITH_WEBDAV
+import qualified Remote.WebDAV as WebDAV
+#endif
+import qualified Remote
+import Types.Remote (RemoteConfig)
+import Types.StandardGroups
+import Logs.PreferredContent
+import Logs.Remote
+
+import Yesod
+import qualified Data.Text as T
+import qualified Data.Map as M
+
+webDAVConfigurator :: Widget -> Handler RepHtml
+webDAVConfigurator = page "Add a WebDAV repository" (Just Config)
+
+boxConfigurator :: Widget -> Handler RepHtml
+boxConfigurator = page "Add a Box.com repository" (Just Config)
+
+data WebDAVInput = WebDAVInput
+ { user :: Text
+ , password :: Text
+ , directory :: Text
+ }
+
+boxComAForm :: AForm WebApp WebApp WebDAVInput
+boxComAForm = WebDAVInput
+ <$> areq textField "Username or Email" Nothing
+ <*> areq passwordField "Box.com Password" Nothing
+ <*> areq textField "Directory" (Just "annex")
+
+webDAVCredsAForm :: AForm WebApp WebApp WebDAVInput
+webDAVCredsAForm = WebDAVInput
+ <$> areq textField "Username or Email" Nothing
+ <*> areq passwordField "Password" Nothing
+ <*> pure (T.empty)
+
+getAddBoxComR :: Handler RepHtml
+#ifdef WITH_WEBDAV
+getAddBoxComR = boxConfigurator $ do
+ ((result, form), enctype) <- lift $
+ runFormGet $ renderBootstrap boxComAForm
+ case result of
+ FormSuccess input -> lift $
+ makeWebDavRemote "box.com" input setgroup $ M.fromList
+ [ ("encryption", "shared")
+ , ("type", "webdav")
+ , ("url", "https://www.box.com/dav/" ++ T.unpack (directory input))
+ -- Box.com has a max file size of 100 mb, but
+ -- using smaller chunks has better memory
+ -- performance.
+ , ("chunksize", "10mb")
+ ]
+ _ -> $(widgetFile "configurators/addbox.com")
+ where
+ setgroup r = runAnnex () $
+ setStandardGroup (Remote.uuid r) TransferGroup
+#else
+getAddBoxComR = error "WebDAV not supported by this build"
+#endif
+
+getEnableWebDAVR :: UUID -> Handler RepHtml
+#ifdef WITH_WEBDAV
+getEnableWebDAVR uuid = do
+ m <- runAnnex M.empty readRemoteLog
+ let c = fromJust $ M.lookup uuid m
+ let name = fromJust $ M.lookup "name" c
+ let url = fromJust $ M.lookup "url" c
+ go name url
+ where
+ go name url
+ | "box.com/" `isInfixOf` url = boxConfigurator $ enable name url
+ | otherwise = webDAVConfigurator $ enable name url
+
+ enable name url = do
+ ((result, form), enctype) <- lift $
+ runFormGet $ renderBootstrap webDAVCredsAForm
+ case result of
+ FormSuccess creds -> lift $
+ makeWebDavRemote name creds (const noop) M.empty
+ _ -> do
+ description <- lift $ runAnnex "" $
+ T.pack . concat <$> Remote.prettyListUUIDs [uuid]
+ $(widgetFile "configurators/enablewebdav")
+#else
+getEnableWebDAVR _ = error "WebDAV not supported by this build"
+#endif
+
+#ifdef WITH_WEBDAV
+makeWebDavRemote :: String -> WebDAVInput -> (Remote -> Handler ()) -> RemoteConfig -> Handler ()
+makeWebDavRemote name input setup config = do
+ remotename <- runAnnex name $ fromRepo $ uniqueRemoteName name 0
+ liftIO $ WebDAV.setCredsEnv (T.unpack $ user input, T.unpack $ password input)
+ r <- liftAssistant $ liftAnnex $ addRemote $ do
+ makeSpecialRemote name WebDAV.remote config
+ return remotename
+ setup r
+ liftAssistant $ syncNewRemote r
+ redirect $ EditNewCloudRepositoryR $ Remote.uuid r
+#endif
diff --git a/Assistant/WebApp/Configurators/XMPP.hs b/Assistant/WebApp/Configurators/XMPP.hs
index 9d41a85..d526880 100644
--- a/Assistant/WebApp/Configurators/XMPP.hs
+++ b/Assistant/WebApp/Configurators/XMPP.hs
@@ -10,14 +10,10 @@
module Assistant.WebApp.Configurators.XMPP where
-import Assistant.WebApp
-import Assistant.WebApp.Types
+import Assistant.WebApp.Common
import Assistant.WebApp.Notifications
-import Assistant.WebApp.SideBar
-import Utility.Yesod
import Utility.NotificationBroadcaster
#ifdef WITH_XMPP
-import Assistant.Common
import Assistant.XMPP.Client
import Assistant.XMPP.Buddies
import Assistant.Types.Buddies
@@ -31,8 +27,8 @@ import Yesod
#ifdef WITH_XMPP
import Network
import Network.Protocol.XMPP
-import Data.Text (Text)
import qualified Data.Text as T
+import Control.Exception (SomeException)
#endif
{- Displays an alert suggesting to configure XMPP, with a button. -}
@@ -74,13 +70,11 @@ getXMPPR' redirto = xmppPage $ do
oldcreds <- runAnnex Nothing getXMPPCreds
runFormGet $ renderBootstrap $ xmppAForm $
creds2Form <$> oldcreds
- let showform problem = do
- let authtoken = webAppFormAuthToken
- $(widgetFile "configurators/xmpp")
+ let showform problem = $(widgetFile "configurators/xmpp")
case result of
- FormSuccess f -> maybe (showform True) (lift . storecreds)
+ FormSuccess f -> either (showform . Just . show) (lift . storecreds)
=<< liftIO (validateForm f)
- _ -> showform False
+ _ -> showform Nothing
where
storecreds creds = do
void $ runAnnex undefined $ setXMPPCreds creds
@@ -96,8 +90,8 @@ getBuddyListR :: NotificationId -> Handler RepHtml
getBuddyListR nid = do
waitNotifier getBuddyListBroadcaster nid
- page <- widgetToPageContent $ buddyListDisplay
- hamletToRepHtml $ [hamlet|^{pageBody page}|]
+ p <- widgetToPageContent buddyListDisplay
+ hamletToRepHtml $ [hamlet|^{pageBody p}|]
buddyListDisplay :: Widget
buddyListDisplay = do
@@ -133,7 +127,7 @@ jidField = checkBool (isJust . parseJID) bad textField
bad :: Text
bad = "This should look like an email address.."
-validateForm :: XMPPForm -> IO (Maybe XMPPCreds)
+validateForm :: XMPPForm -> IO (Either SomeException XMPPCreds)
validateForm f = do
let jid = fromMaybe (error "bad JID") $ parseJID (formJID f)
let domain = T.unpack $ strDomain $ jidDomain jid
@@ -155,15 +149,11 @@ validateForm f = do
, xmppJID = formJID f
}
-testXMPP :: XMPPCreds -> IO (Maybe XMPPCreds)
-testXMPP creds = either (const $ return Nothing)
- (const $ return $ Just creds)
- =<< connectXMPP creds (const noop)
+testXMPP :: XMPPCreds -> IO (Either SomeException XMPPCreds)
+testXMPP creds = either Left (const $ Right creds)
+ <$> connectXMPP creds (const noop)
#endif
xmppPage :: Widget -> Handler RepHtml
-xmppPage w = bootstrap (Just Config) $ do
- sideBarDisplay
- setTitle "Jabber"
- w
+xmppPage = page "Jabber" (Just Config)
diff --git a/Assistant/WebApp/DashBoard.hs b/Assistant/WebApp/DashBoard.hs
index 44e6461..7713b34 100644
--- a/Assistant/WebApp/DashBoard.hs
+++ b/Assistant/WebApp/DashBoard.hs
@@ -9,16 +9,12 @@
module Assistant.WebApp.DashBoard where
-import Assistant.Common
-import Assistant.WebApp
-import Assistant.WebApp.Types
+import Assistant.WebApp.Common
import Assistant.WebApp.Utility
-import Assistant.WebApp.SideBar
import Assistant.WebApp.Notifications
import Assistant.WebApp.Configurators
import Assistant.TransferQueue
import Utility.NotificationBroadcaster
-import Utility.Yesod
import Logs.Transfer
import Utility.Percentage
import Utility.DataUnits
@@ -73,20 +69,19 @@ getTransfersR :: NotificationId -> Handler RepHtml
getTransfersR nid = do
waitNotifier getTransferBroadcaster nid
- page <- widgetToPageContent $ transfersDisplay False
- hamletToRepHtml $ [hamlet|^{pageBody page}|]
+ p <- widgetToPageContent $ transfersDisplay False
+ hamletToRepHtml $ [hamlet|^{pageBody p}|]
{- The main dashboard. -}
dashboard :: Bool -> Widget
dashboard warnNoScript = do
- sideBarDisplay
let content = transfersDisplay warnNoScript
$(widgetFile "dashboard/main")
getHomeR :: Handler RepHtml
getHomeR = ifM (inFirstRun)
( redirect ConfigR
- , bootstrap (Just DashBoard) $ dashboard True
+ , page "" (Just DashBoard) $ dashboard True
)
{- Used to test if the webapp is running. -}
@@ -95,11 +90,11 @@ headHomeR = noop
{- Same as HomeR, except no autorefresh at all (and no noscript warning). -}
getNoScriptR :: Handler RepHtml
-getNoScriptR = bootstrap (Just DashBoard) $ dashboard False
+getNoScriptR = page "" (Just DashBoard) $ dashboard False
{- Same as HomeR, except with autorefreshing via meta refresh. -}
getNoScriptAutoR :: Handler RepHtml
-getNoScriptAutoR = bootstrap (Just DashBoard) $ do
+getNoScriptAutoR = page "" (Just DashBoard) $ do
let ident = NoScriptR
let delayseconds = 3 :: Int
let this = NoScriptAutoR
diff --git a/Assistant/WebApp/Documentation.hs b/Assistant/WebApp/Documentation.hs
index 6cccea4..f5f222d 100644
--- a/Assistant/WebApp/Documentation.hs
+++ b/Assistant/WebApp/Documentation.hs
@@ -9,12 +9,8 @@
module Assistant.WebApp.Documentation where
-import Assistant.Common
-import Assistant.WebApp
-import Assistant.WebApp.Types
-import Assistant.WebApp.SideBar
+import Assistant.WebApp.Common
import Assistant.Install (standaloneAppBase)
-import Utility.Yesod
import Build.SysConfig (packageversion)
import Yesod
@@ -27,9 +23,7 @@ licenseFile = do
return $ (</> "LICENSE") <$> base
getAboutR :: Handler RepHtml
-getAboutR = bootstrap (Just About) $ do
- sideBarDisplay
- setTitle "About git-annex"
+getAboutR = page "About git-annex" (Just About) $ do
builtinlicense <- isJust <$> liftIO licenseFile
$(widgetFile "documentation/about")
@@ -38,7 +32,7 @@ getLicenseR = do
v <- liftIO licenseFile
case v of
Nothing -> redirect AboutR
- Just f -> bootstrap (Just About) $ do
+ Just f -> customPage (Just About) $ do
-- no sidebar, just pages of legalese..
setTitle "License"
license <- liftIO $ readFile f
diff --git a/Assistant/WebApp/Notifications.hs b/Assistant/WebApp/Notifications.hs
index c841049..5eb2283 100644
--- a/Assistant/WebApp/Notifications.hs
+++ b/Assistant/WebApp/Notifications.hs
@@ -5,7 +5,7 @@
- Licensed under the GNU AGPL version 3 or higher.
-}
-{-# LANGUAGE TypeFamilies, QuasiQuotes, MultiParamTypeClasses, TemplateHaskell, OverloadedStrings, RankNTypes #-}
+{-# LANGUAGE CPP, TypeFamilies, QuasiQuotes, MultiParamTypeClasses, TemplateHaskell, OverloadedStrings, RankNTypes #-}
module Assistant.WebApp.Notifications where
@@ -20,6 +20,9 @@ import Utility.Yesod
import Yesod
import Data.Text (Text)
import qualified Data.Text as T
+#ifndef WITH_OLD_YESOD
+import qualified Data.Aeson.Types as Aeson
+#endif
{- Add to any widget to make it auto-update using long polling.
-
@@ -32,9 +35,16 @@ import qualified Data.Text as T
- ms_startdelay is how long to delay before updating with AJAX at the start
-}
autoUpdate :: Text -> Route WebApp -> Int -> Int -> Widget
-autoUpdate ident geturl ms_delay ms_startdelay = do
+autoUpdate tident geturl ms_delay ms_startdelay = do
+#ifdef WITH_OLD_YESOD
let delay = show ms_delay
let startdelay = show ms_startdelay
+ let ident = tident
+#else
+ let delay = Aeson.String (T.pack (show ms_delay))
+ let startdelay = Aeson.String (T.pack (show ms_startdelay))
+ let ident = Aeson.String tident
+#endif
addScript $ StaticR longpolling_js
$(widgetFile "notifications/longpolling")
@@ -62,6 +72,11 @@ getNotifierSideBarR = notifierUrl SideBarR getAlertBroadcaster
getNotifierBuddyListR :: Handler RepPlain
getNotifierBuddyListR = notifierUrl BuddyListR getBuddyListBroadcaster
+getNotifierRepoListR :: RepoSelector -> Handler RepPlain
+getNotifierRepoListR reposelector = notifierUrl route getRepoListBroadcaster
+ where
+ route nid = RepoListR $ RepoListNotificationId nid reposelector
+
getTransferBroadcaster :: Assistant NotificationBroadcaster
getTransferBroadcaster = transferNotifier <$> getDaemonStatus
@@ -70,3 +85,6 @@ getAlertBroadcaster = alertNotifier <$> getDaemonStatus
getBuddyListBroadcaster :: Assistant NotificationBroadcaster
getBuddyListBroadcaster = getBuddyBroadcaster <$> getAssistant buddyList
+
+getRepoListBroadcaster :: Assistant NotificationBroadcaster
+getRepoListBroadcaster = syncRemotesNotifier <$> getDaemonStatus
diff --git a/Assistant/WebApp/Page.hs b/Assistant/WebApp/Page.hs
new file mode 100644
index 0000000..5787b83
--- /dev/null
+++ b/Assistant/WebApp/Page.hs
@@ -0,0 +1,66 @@
+{- git-annex assistant webapp page display
+ -
+ - Copyright 2012 Joey Hess <joey@kitenet.net>
+ -
+ - Licensed under the GNU AGPL version 3 or higher.
+ -}
+
+{-# LANGUAGE TypeFamilies, QuasiQuotes, MultiParamTypeClasses, TemplateHaskell, OverloadedStrings, RankNTypes #-}
+
+module Assistant.WebApp.Page where
+
+import Assistant.Common
+import Assistant.WebApp
+import Assistant.WebApp.Types
+import Assistant.WebApp.SideBar
+import Utility.Yesod
+
+import Yesod
+import Text.Hamlet
+import Data.Text (Text)
+
+data NavBarItem = DashBoard | Config | About
+ deriving (Eq)
+
+navBarName :: NavBarItem -> Text
+navBarName DashBoard = "Dashboard"
+navBarName Config = "Configuration"
+navBarName About = "About"
+
+navBarRoute :: NavBarItem -> Route WebApp
+navBarRoute DashBoard = HomeR
+navBarRoute Config = ConfigR
+navBarRoute About = AboutR
+
+defaultNavBar :: [NavBarItem]
+defaultNavBar = [DashBoard, Config, About]
+
+firstRunNavBar :: [NavBarItem]
+firstRunNavBar = [Config, About]
+
+selectNavBar :: Handler [NavBarItem]
+selectNavBar = ifM (inFirstRun) (return firstRunNavBar, return defaultNavBar)
+
+{- A standard page of the webapp, with a title, a sidebar, and that may
+ - be highlighted on the navbar. -}
+page :: Html -> Maybe NavBarItem -> Widget -> Handler RepHtml
+page title navbaritem content = customPage navbaritem $ do
+ setTitle title
+ sideBarDisplay
+ content
+
+{- A custom page, with no title or sidebar set. -}
+customPage :: Maybe NavBarItem -> Widget -> Handler RepHtml
+customPage navbaritem content = do
+ webapp <- getYesod
+ navbar <- map navdetails <$> selectNavBar
+ pageinfo <- widgetToPageContent $ do
+ addStylesheet $ StaticR css_bootstrap_css
+ addStylesheet $ StaticR css_bootstrap_responsive_css
+ addScript $ StaticR jquery_full_js
+ addScript $ StaticR js_bootstrap_dropdown_js
+ addScript $ StaticR js_bootstrap_modal_js
+ $(widgetFile "page")
+ hamletToRepHtml $(hamletFile $ hamletTemplate "bootstrap")
+ where
+ navdetails i = (navBarName i, navBarRoute i, Just i == navbaritem)
diff --git a/Assistant/WebApp/SideBar.hs b/Assistant/WebApp/SideBar.hs
index c8ccbed..a87065f 100644
--- a/Assistant/WebApp/SideBar.hs
+++ b/Assistant/WebApp/SideBar.hs
@@ -88,3 +88,13 @@ getClickAlert i = do
redirect $ buttonUrl b
_ -> redirectBack
+htmlIcon :: AlertIcon -> GWidget sub master ()
+htmlIcon ActivityIcon = bootstrapIcon "refresh"
+htmlIcon InfoIcon = bootstrapIcon "info-sign"
+htmlIcon SuccessIcon = bootstrapIcon "ok"
+htmlIcon ErrorIcon = bootstrapIcon "exclamation-sign"
+-- utf-8 umbrella (utf-8 cloud looks too stormy)
+htmlIcon TheCloud = [whamlet|&#9730;|]
+
+bootstrapIcon :: Text -> GWidget sub master ()
+bootstrapIcon name = [whamlet|<i .icon-#{name}></i>|]
diff --git a/Assistant/WebApp/Types.hs b/Assistant/WebApp/Types.hs
index b95b683..320438b 100644
--- a/Assistant/WebApp/Types.hs
+++ b/Assistant/WebApp/Types.hs
@@ -62,6 +62,16 @@ data WebAppState = WebAppState
, otherRepos :: [(String, String)] -- name and path to other repos
}
+data RepoSelector = RepoSelector
+ { onlyCloud :: Bool
+ , onlyConfigured :: Bool
+ , includeHere :: Bool
+ }
+ deriving (Read, Show, Eq)
+
+data RepoListNotificationId = RepoListNotificationId NotificationId RepoSelector
+ deriving (Read, Show, Eq)
+
instance PathPiece SshData where
toPathPiece = pack . show
fromPathPiece = readish . unpack
@@ -97,3 +107,11 @@ instance PathPiece BuddyKey where
instance PathPiece PairKey where
toPathPiece = pack . show
fromPathPiece = readish . unpack
+
+instance PathPiece RepoListNotificationId where
+ toPathPiece = pack . show
+ fromPathPiece = readish . unpack
+
+instance PathPiece RepoSelector where
+ toPathPiece = pack . show
+ fromPathPiece = readish . unpack
diff --git a/Assistant/WebApp/routes b/Assistant/WebApp/routes
index 2d64672..7fa63c1 100644
--- a/Assistant/WebApp/routes
+++ b/Assistant/WebApp/routes
@@ -25,6 +25,8 @@
/config/repository/add/ssh/make/rsync/#SshData MakeSshRsyncR GET
/config/repository/add/cloud/rsync.net AddRsyncNetR GET
/config/repository/add/cloud/S3 AddS3R GET
+/config/repository/add/cloud/glacier AddGlacierR GET
+/config/repository/add/cloud/box.com AddBoxComR GET
/config/repository/pair/local/start StartLocalPairR GET
/config/repository/pair/local/running/#SecretReminder RunningLocalPairR GET
@@ -37,6 +39,8 @@
/config/repository/enable/rsync/#UUID EnableRsyncR GET
/config/repository/enable/directory/#UUID EnableDirectoryR GET
/config/repository/enable/S3/#UUID EnableS3R GET
+/config/repository/enable/glacier/#UUID EnableGlacierR GET
+/config/repository/enable/webdav/#UUID EnableWebDAVR GET
/transfers/#NotificationId TransfersR GET
/notifier/transfers NotifierTransfersR GET
@@ -47,6 +51,9 @@
/buddylist/#NotificationId BuddyListR GET
/notifier/buddylist NotifierBuddyListR GET
+/repolist/#RepoListNotificationId RepoListR GET
+/notifier/repolist/#RepoSelector NotifierRepoListR GET
+
/alert/close/#AlertId CloseAlert GET
/alert/click/#AlertId ClickAlert GET
/filebrowser FileBrowserR GET POST
diff --git a/Assistant/XMPP/Buddies.hs b/Assistant/XMPP/Buddies.hs
index 7383c38..0c466e5 100644
--- a/Assistant/XMPP/Buddies.hs
+++ b/Assistant/XMPP/Buddies.hs
@@ -23,6 +23,10 @@ genBuddyKey j = BuddyKey $ formatJID $ baseJID j
buddyName :: JID -> Text
buddyName j = maybe (T.pack "") strNode (jidNode j)
+ucFirst :: Text -> Text
+ucFirst s = let (first, rest) = T.splitAt 1 s
+ in T.concat [T.toUpper first, rest]
+
{- Summary of info about a buddy.
-
- If the buddy has no clients at all anymore, returns Nothing. -}
diff --git a/Assistant/XMPP/Client.hs b/Assistant/XMPP/Client.hs
index 8ab0c28..c2a86cb 100644
--- a/Assistant/XMPP/Client.hs
+++ b/Assistant/XMPP/Client.hs
@@ -8,8 +8,8 @@
module Assistant.XMPP.Client where
import Assistant.Common
-import Utility.FileMode
import Utility.SRV
+import Creds
import Network.Protocol.XMPP
import Network
@@ -63,23 +63,12 @@ runClientError :: Server -> JID -> T.Text -> T.Text -> XMPP a -> IO a
runClientError s j u p x = either (error . show) return =<< runClient s j u p x
getXMPPCreds :: Annex (Maybe XMPPCreds)
-getXMPPCreds = do
- f <- xmppCredsFile
- s <- liftIO $ catchMaybeIO $ readFile f
- return $ readish =<< s
+getXMPPCreds = parse <$> readCacheCreds xmppCredsFile
+ where
+ parse s = readish =<< s
setXMPPCreds :: XMPPCreds -> Annex ()
-setXMPPCreds creds = do
- f <- xmppCredsFile
- liftIO $ do
- createDirectoryIfMissing True (parentDir f)
- h <- openFile f WriteMode
- modifyFileMode f $ removeModes
- [groupReadMode, otherReadMode]
- hPutStr h (show creds)
- hClose h
+setXMPPCreds creds = writeCacheCreds (show creds) xmppCredsFile
-xmppCredsFile :: Annex FilePath
-xmppCredsFile = do
- dir <- fromRepo gitAnnexCredsDir
- return $ dir </> "xmpp"
+xmppCredsFile :: FilePath
+xmppCredsFile = "xmpp"
diff --git a/Assistant/XMPP/Git.hs b/Assistant/XMPP/Git.hs
index da143ea..d3c8343 100644
--- a/Assistant/XMPP/Git.hs
+++ b/Assistant/XMPP/Git.hs
@@ -25,7 +25,6 @@ import qualified Git.Branch
import Locations.UserConfig
import qualified Types.Remote as Remote
import Utility.FileMode
-import Utility.ThreadScheduler
import Network.Protocol.XMPP
import qualified Data.Text as T
@@ -33,6 +32,7 @@ import System.Posix.Env
import System.Posix.Types
import System.Process (std_in, std_out, std_err)
import Control.Concurrent
+import System.Timeout
import qualified Data.ByteString as B
import qualified Data.Map as M
@@ -119,16 +119,16 @@ xmppPush cid gitpush = runPush SendPack cid handleDeferred $ do
then liftIO $ killThread =<< myThreadId
else sendNetMessage $ Pushing cid $ SendPackOutput b
fromxmpp outh controlh = forever $ do
- m <- runTimeout xmppTimeout <~> waitNetPushMessage SendPack
+ m <- timeout xmppTimeout <~> waitNetPushMessage SendPack
case m of
- (Right (Pushing _ (ReceivePackOutput b))) ->
+ (Just (Pushing _ (ReceivePackOutput b))) ->
liftIO $ writeChunk outh b
- (Right (Pushing _ (ReceivePackDone exitcode))) ->
+ (Just (Pushing _ (ReceivePackDone exitcode))) ->
liftIO $ do
hPrint controlh exitcode
hFlush controlh
- (Right _) -> noop
- (Left _) -> do
+ (Just _) -> noop
+ Nothing -> do
debug ["timeout waiting for git receive-pack output via XMPP"]
-- Send a synthetic exit code to git-annex
-- xmppgit, which will exit and cause git push
@@ -220,12 +220,12 @@ xmppReceivePack cid = runPush ReceivePack cid handleDeferred $ do
sendNetMessage $ Pushing cid $ ReceivePackOutput b
relaytoxmpp outh
relayfromxmpp inh = forever $ do
- m <- runTimeout xmppTimeout <~> waitNetPushMessage ReceivePack
+ m <- timeout xmppTimeout <~> waitNetPushMessage ReceivePack
case m of
- (Right (Pushing _ (SendPackOutput b))) ->
+ (Just (Pushing _ (SendPackOutput b))) ->
liftIO $ writeChunk inh b
- (Right _) -> noop
- (Left _) -> do
+ (Just _) -> noop
+ Nothing -> do
debug ["timeout waiting for git send-pack output via XMPP"]
-- closing the handle will make
-- git receive-pack exit
@@ -291,5 +291,5 @@ chunkSize = 4096
- delayed for running until the timeout is reached, so it should not be
- excessive.
-}
-xmppTimeout :: Seconds
-xmppTimeout = Seconds 120
+xmppTimeout :: Int
+xmppTimeout = 120000000 -- 120 seconds
diff --git a/CHANGELOG b/CHANGELOG
index c05d529..b24b102 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -1,3 +1,33 @@
+git-annex (3.20121126) unstable; urgency=low
+
+ * New webdav and Amazon glacier special remotes.
+ * Display a warning when a non-existing file or directory is specified.
+ * webapp: Added configurator for Box.com.
+ * webapp: Show error messages to user when testing XMPP creds.
+ * Fix build of assistant without yesod.
+ * webapp: The list of repositiories refreshes when new repositories are
+ added, including when new repository configurations are pushed in from
+ remotes.
+ * OSX: Fix RunAtLoad value in plist file.
+ * Getting a file from chunked directory special remotes no longer buffers
+ it all in memory.
+ * S3: Added progress display for uploading and downloading.
+ * directory special remote: Made more efficient and robust.
+ * Bugfix: directory special remote could loop forever storing a key
+ when a too small chunksize was configured.
+ * Allow controlling whether login credentials for S3 and webdav are
+ committed to the repository, by setting embedcreds=yes|no when running
+ initremote.
+ * Added smallarchive repository group, that only archives files that are
+ in archive directories. Used by default for glacier when set up in the
+ webapp.
+ * assistant: Fixed handling of toplevel archive directory and
+ client repository group.
+ * assistant: Apply preferred content settings when a new symlink
+ is created, or a symlink gets renamed. Made archive directories work.
+
+ -- Joey Hess <joeyh@debian.org> Mon, 26 Nov 2012 11:37:49 -0400
+
git-annex (3.20121112) unstable; urgency=low
* assistant: Can use XMPP to notify other nodes about pushes made to other
diff --git a/Command/Drop.hs b/Command/Drop.hs
index 6c210b1..e7b5212 100644
--- a/Command/Drop.hs
+++ b/Command/Drop.hs
@@ -34,29 +34,32 @@ start :: Maybe Remote -> FilePath -> (Key, Backend) -> CommandStart
start from file (key, _) = autoCopiesWith file key (>) $ \numcopies ->
stopUnless (checkAuto $ wantDrop (Remote.uuid <$> from) (Just file)) $
case from of
- Nothing -> startLocal file numcopies key
+ Nothing -> startLocal file numcopies key Nothing
Just remote -> do
u <- getUUID
if Remote.uuid remote == u
- then startLocal file numcopies key
+ then startLocal file numcopies key Nothing
else startRemote file numcopies key remote
-startLocal :: FilePath -> Maybe Int -> Key -> CommandStart
-startLocal file numcopies key = stopUnless (inAnnex key) $ do
+startLocal :: FilePath -> Maybe Int -> Key -> Maybe Remote -> CommandStart
+startLocal file numcopies key knownpresentremote = stopUnless (inAnnex key) $ do
showStart "drop" file
- next $ performLocal key numcopies
+ next $ performLocal key numcopies knownpresentremote
startRemote :: FilePath -> Maybe Int -> Key -> Remote -> CommandStart
startRemote file numcopies key remote = do
showStart ("drop " ++ Remote.name remote) file
next $ performRemote key numcopies remote
-performLocal :: Key -> Maybe Int -> CommandPerform
-performLocal key numcopies = lockContent key $ do
+performLocal :: Key -> Maybe Int -> Maybe Remote -> CommandPerform
+performLocal key numcopies knownpresentremote = lockContent key $ do
(remotes, trusteduuids) <- Remote.keyPossibilitiesTrusted key
+ let trusteduuids' = case knownpresentremote of
+ Nothing -> trusteduuids
+ Just r -> nub (Remote.uuid r:trusteduuids)
untrusteduuids <- trustGet UnTrusted
- let tocheck = Remote.remotesWithoutUUID remotes (trusteduuids++untrusteduuids)
- stopUnless (canDropKey key numcopies trusteduuids tocheck []) $ do
+ let tocheck = Remote.remotesWithoutUUID remotes (trusteduuids'++untrusteduuids)
+ stopUnless (canDropKey key numcopies trusteduuids' tocheck []) $ do
whenM (inAnnex key) $ removeAnnex key
next $ cleanupLocal key
diff --git a/Command/DropUnused.hs b/Command/DropUnused.hs
index 00c0eec..95af062 100644
--- a/Command/DropUnused.hs
+++ b/Command/DropUnused.hs
@@ -34,7 +34,7 @@ perform key = maybe droplocal dropremote =<< Remote.byName =<< from
showAction $ "from " ++ Remote.name r
ok <- Remote.removeKey r key
next $ Command.Drop.cleanupRemote key r ok
- droplocal = Command.Drop.performLocal key (Just 0) -- force drop
+ droplocal = Command.Drop.performLocal key (Just 0) Nothing -- force drop
from = Annex.getField $ Option.name Command.Drop.fromOption
performOther :: (Key -> Git.Repo -> FilePath) -> Key -> CommandPerform
diff --git a/Command/Find.hs b/Command/Find.hs
index 1e509d1..96f47ec 100644
--- a/Command/Find.hs
+++ b/Command/Find.hs
@@ -20,7 +20,7 @@ import Types.Key
import qualified Option
def :: [Command]
-def = [withOptions [formatOption, print0Option] $
+def = [noCommit $ withOptions [formatOption, print0Option] $
command "find" paramPaths seek "lists available files"]
formatOption :: Option
diff --git a/Command/Fix.hs b/Command/Fix.hs
index 227e08c..38c8ac6 100644
--- a/Command/Fix.hs
+++ b/Command/Fix.hs
@@ -13,7 +13,7 @@ import qualified Annex.Queue
import Annex.Content
def :: [Command]
-def = [command "fix" paramPaths seek
+def = [noCommit $ command "fix" paramPaths seek
"fix up symlinks to point to annexed content"]
seek :: [CommandSeek]
diff --git a/Command/SendKey.hs b/Command/SendKey.hs
index 2aae1ab..ccbfa90 100644
--- a/Command/SendKey.hs
+++ b/Command/SendKey.hs
@@ -12,7 +12,6 @@ import Command
import Annex.Content
import Utility.Rsync
import Logs.Transfer
-import Types.Remote
import qualified Fields
def :: [Command]
diff --git a/Command/Unannex.hs b/Command/Unannex.hs
index 67d81be..89134bb 100644
--- a/Command/Unannex.hs
+++ b/Command/Unannex.hs
@@ -41,7 +41,6 @@ cleanup file key = do
-- git as a normal, non-annexed file.
(s, clean) <- inRepo $ LsFiles.staged [file]
when (not $ null s) $ do
- showOutput
inRepo $ Git.Command.run "commit" [
Param "-q",
Params "-m", Param "content removed from git annex",
diff --git a/Command/Uninit.hs b/Command/Uninit.hs
index b365e8c..a860444 100644
--- a/Command/Uninit.hs
+++ b/Command/Uninit.hs
@@ -36,9 +36,9 @@ check = do
[Params "rev-parse --abbrev-ref HEAD"]
seek :: [CommandSeek]
-seek = [
- withFilesNotInGit $ whenAnnexed startCheckIncomplete,
- withFilesInGit $ whenAnnexed startUnannex
+seek =
+ [ withFilesNotInGit $ whenAnnexed startCheckIncomplete
+ , withFilesInGit $ whenAnnexed startUnannex
, withNothing start
]
diff --git a/Config.hs b/Config.hs
index 1077730..11b5f4c 100644
--- a/Config.hs
+++ b/Config.hs
@@ -65,6 +65,8 @@ semiCheapRemoteCost :: Int
semiCheapRemoteCost = 110
expensiveRemoteCost :: Int
expensiveRemoteCost = 200
+veryExpensiveRemoteCost :: Int
+veryExpensiveRemoteCost = 1000
{- Adjusts a remote's cost to reflect it being encrypted. -}
encryptedRemoteCostAdj :: Int
diff --git a/Creds.hs b/Creds.hs
new file mode 100644
index 0000000..f5ea550
--- /dev/null
+++ b/Creds.hs
@@ -0,0 +1,151 @@
+{- Credentials storage
+ -
+ - Copyright 2012 Joey Hess <joey@kitenet.net>
+ -
+ - Licensed under the GNU GPL version 3 or higher.
+ -}
+
+module Creds where
+
+import Common.Annex
+import Annex.Perms
+import Utility.FileMode
+import Crypto
+import Types.Remote (RemoteConfig, RemoteConfigKey)
+import Remote.Helper.Encryptable (remoteCipher, embedCreds)
+
+import System.Environment
+import System.Posix.Env (setEnv)
+import qualified Data.ByteString.Lazy.Char8 as L
+import qualified Data.Map as M
+import Utility.Base64
+
+type Creds = String -- can be any data
+type CredPair = (String, String) -- login, password
+
+{- A CredPair can be stored in a file, or in the environment, or perhaps
+ - in a remote's configuration. -}
+data CredPairStorage = CredPairStorage
+ { credPairFile :: FilePath
+ , credPairEnvironment :: (String, String)
+ , credPairRemoteKey :: Maybe RemoteConfigKey
+ }
+
+{- Stores creds in a remote's configuration, if the remote allows
+ - that. Otherwise, caches them locally. -}
+setRemoteCredPair :: RemoteConfig -> CredPairStorage -> Annex RemoteConfig
+setRemoteCredPair c storage = go =<< getRemoteCredPair' c storage
+ where
+ go (Just creds)
+ | embedCreds c = case credPairRemoteKey storage of
+ Nothing -> localcache creds
+ Just key -> storeconfig creds key =<< remoteCipher c
+ | otherwise = localcache creds
+ go Nothing = return c
+
+ localcache creds = do
+ writeCacheCredPair creds storage
+ return c
+
+ storeconfig creds key (Just cipher) = do
+ s <- liftIO $ encrypt cipher
+ (feedBytes $ L.pack $ encodeCredPair creds)
+ (readBytes $ return . L.unpack)
+ return $ M.insert key (toB64 s) c
+ storeconfig creds key Nothing =
+ return $ M.insert key (toB64 $ encodeCredPair creds) c
+
+{- Gets a remote's credpair, from the environment if set, otherwise
+ - from the cache in gitAnnexCredsDir, or failing that, from the
+ - value in RemoteConfig. -}
+getRemoteCredPair :: String -> RemoteConfig -> CredPairStorage -> Annex (Maybe CredPair)
+getRemoteCredPair this c storage = maybe missing (return . Just) =<< getRemoteCredPair' c storage
+ where
+ (loginvar, passwordvar) = credPairEnvironment storage
+ missing = do
+ warning $ unwords
+ [ "Set both", loginvar
+ , "and", passwordvar
+ , "to use", this
+ ]
+ return Nothing
+
+getRemoteCredPair' :: RemoteConfig -> CredPairStorage -> Annex (Maybe CredPair)
+getRemoteCredPair' c storage = maybe fromcache (return . Just) =<< fromenv
+ where
+ fromenv = liftIO $ getEnvCredPair storage
+ fromcache = maybe fromconfig (return . Just) =<< readCacheCredPair storage
+ fromconfig = case credPairRemoteKey storage of
+ Just key -> do
+ mcipher <- remoteCipher c
+ case (M.lookup key c, mcipher) of
+ (Nothing, _) -> return Nothing
+ (Just enccreds, Just cipher) -> do
+ creds <- liftIO $ decrypt cipher
+ (feedBytes $ L.pack $ fromB64 enccreds)
+ (readBytes $ return . L.unpack)
+ fromcreds creds
+ (Just bcreds, Nothing) ->
+ fromcreds $ fromB64 bcreds
+ Nothing -> return Nothing
+ fromcreds creds = case decodeCredPair creds of
+ Just credpair -> do
+ writeCacheCredPair credpair storage
+ return $ Just credpair
+ _ -> do error $ "bad creds"
+
+{- Gets a CredPair from the environment. -}
+getEnvCredPair :: CredPairStorage -> IO (Maybe CredPair)
+getEnvCredPair storage = liftM2 (,)
+ <$> get uenv
+ <*> get penv
+ where
+ (uenv, penv) = credPairEnvironment storage
+ get = catchMaybeIO . getEnv
+
+
+
+{- Stores a CredPair in the environment. -}
+setEnvCredPair :: CredPair -> CredPairStorage -> IO ()
+setEnvCredPair (l, p) storage = do
+ set uenv l
+ set penv p
+ where
+ (uenv, penv) = credPairEnvironment storage
+ set var val = setEnv var val True
+
+writeCacheCredPair :: CredPair -> CredPairStorage -> Annex ()
+writeCacheCredPair credpair storage =
+ writeCacheCreds (encodeCredPair credpair) (credPairFile storage)
+
+{- Stores the creds in a file inside gitAnnexCredsDir that only the user
+ - can read. -}
+writeCacheCreds :: Creds -> FilePath -> Annex ()
+writeCacheCreds creds file = do
+ d <- fromRepo gitAnnexCredsDir
+ createAnnexDirectory d
+ liftIO $ do
+ let f = d </> file
+ h <- openFile f WriteMode
+ modifyFileMode f $ removeModes
+ [groupReadMode, otherReadMode]
+ hPutStr h creds
+ hClose h
+
+readCacheCredPair :: CredPairStorage -> Annex (Maybe CredPair)
+readCacheCredPair storage = maybe Nothing decodeCredPair
+ <$> readCacheCreds (credPairFile storage)
+
+readCacheCreds :: FilePath -> Annex (Maybe Creds)
+readCacheCreds file = do
+ d <- fromRepo gitAnnexCredsDir
+ let f = d </> file
+ liftIO $ catchMaybeIO $ readFile f
+
+encodeCredPair :: CredPair -> Creds
+encodeCredPair (l, p) = unlines [l, p]
+
+decodeCredPair :: Creds -> Maybe CredPair
+decodeCredPair creds = case lines creds of
+ l:p:[] -> Just (l, p)
+ _ -> Nothing
diff --git a/Crypto.hs b/Crypto.hs
index 071fb7a..fe6c6d5 100644
--- a/Crypto.hs
+++ b/Crypto.hs
@@ -18,10 +18,11 @@ module Crypto (
describeCipher,
decryptCipher,
encryptKey,
- withEncryptedHandle,
- withDecryptedHandle,
- withEncryptedContent,
- withDecryptedContent,
+ feedFile,
+ feedBytes,
+ readBytes,
+ encrypt,
+ decrypt,
prop_hmacWithCipher_sane
) where
@@ -90,10 +91,9 @@ describeCipher (EncryptedCipher _ (KeyIds ks)) =
encryptCipher :: Cipher -> KeyIds -> IO StorableCipher
encryptCipher (Cipher c) (KeyIds ks) = do
let ks' = nub $ sort ks -- gpg complains about duplicate recipient keyids
- encipher <- Gpg.pipeStrict (encrypt++recipients ks') c
+ encipher <- Gpg.pipeStrict ([ Params "--encrypt" ] ++ recipients ks') c
return $ EncryptedCipher encipher (KeyIds ks')
where
- encrypt = [ Params "--encrypt" ]
recipients l = force_recipients :
concatMap (\k -> [Param "--recipient", Param k]) l
-- Force gpg to only encrypt to the specified
@@ -103,9 +103,7 @@ encryptCipher (Cipher c) (KeyIds ks) = do
{- Decrypting an EncryptedCipher is expensive; the Cipher should be cached. -}
decryptCipher :: StorableCipher -> IO Cipher
decryptCipher (SharedCipher t) = return $ Cipher t
-decryptCipher (EncryptedCipher t _) = Cipher <$> Gpg.pipeStrict decrypt t
- where
- decrypt = [ Param "--decrypt" ]
+decryptCipher (EncryptedCipher t _) = Cipher <$> Gpg.pipeStrict [ Param "--decrypt" ] t
{- Generates an encrypted form of a Key. The encryption does not need to be
- reversable, nor does it need to be the same type of encryption used
@@ -118,31 +116,27 @@ encryptKey c k = Key
, keyMtime = Nothing -- to avoid leaking data
}
-{- Runs an action, passing it a handle from which it can
- - stream encrypted content. -}
-withEncryptedHandle :: Cipher -> IO L.ByteString -> (Handle -> IO a) -> IO a
-withEncryptedHandle = Gpg.passphraseHandle [Params "--symmetric --force-mdc"] . cipherPassphrase
-
-{- Runs an action, passing it a handle from which it can
- - stream decrypted content. -}
-withDecryptedHandle :: Cipher -> IO L.ByteString -> (Handle -> IO a) -> IO a
-withDecryptedHandle = Gpg.passphraseHandle [Param "--decrypt"] . cipherPassphrase
-
-{- Streams encrypted content to an action. -}
-withEncryptedContent :: Cipher -> IO L.ByteString -> (L.ByteString -> IO a) -> IO a
-withEncryptedContent = pass withEncryptedHandle
-
-{- Streams decrypted content to an action. -}
-withDecryptedContent :: Cipher -> IO L.ByteString -> (L.ByteString -> IO a) -> IO a
-withDecryptedContent = pass withDecryptedHandle
-
-pass
- :: (Cipher -> IO L.ByteString -> (Handle -> IO a) -> IO a)
- -> Cipher
- -> IO L.ByteString
- -> (L.ByteString -> IO a)
- -> IO a
-pass to n s a = to n s $ a <=< L.hGetContents
+type Feeder = Handle -> IO ()
+type Reader a = Handle -> IO a
+
+feedFile :: FilePath -> Feeder
+feedFile f h = L.hPut h =<< L.readFile f
+
+feedBytes :: L.ByteString -> Feeder
+feedBytes = flip L.hPut
+
+readBytes :: (L.ByteString -> IO a) -> Reader a
+readBytes a h = L.hGetContents h >>= a
+
+{- Runs a Feeder action, that generates content that is encrypted with the
+ - Cipher, and read by the Reader action. -}
+encrypt :: Cipher -> Feeder -> Reader a -> IO a
+encrypt = Gpg.feedRead [Params "--symmetric --force-mdc"] . cipherPassphrase
+
+{- Runs a Feeder action, that generates content that is decrypted with the
+ - Cipher, and read by the Reader action. -}
+decrypt :: Cipher -> Feeder -> Reader a -> IO a
+decrypt = Gpg.feedRead [Param "--decrypt"] . cipherPassphrase
hmacWithCipher :: Cipher -> String -> String
hmacWithCipher c = hmacWithCipher' (cipherHmac c)
diff --git a/GitAnnex.hs b/GitAnnex.hs
index 81667ee..57f3b98 100644
--- a/GitAnnex.hs
+++ b/GitAnnex.hs
@@ -163,6 +163,7 @@ options = Option.common ++
"skip files smaller than a size"
, Option ['T'] ["time-limit"] (ReqArg Limit.addTimeLimit paramTime)
"stop after the specified amount of time"
+ , Option [] ["trust-glacier"] (NoArg (Annex.setFlag "trustglacier")) "Trust Amazon Glacier inventory"
] ++ Option.matcher
where
setnumcopies v = Annex.changeState $
diff --git a/Locations.hs b/Locations.hs
index 3a7c89e..db97bbe 100644
--- a/Locations.hs
+++ b/Locations.hs
@@ -9,8 +9,10 @@ module Locations (
keyFile,
fileKey,
keyPaths,
+ keyPath,
gitAnnexLocation,
annexLocations,
+ annexLocation,
gitAnnexDir,
gitAnnexObjectDir,
gitAnnexTmpDir,
diff --git a/Makefile b/Makefile
index a98949e..7a75598 100644
--- a/Makefile
+++ b/Makefile
@@ -7,7 +7,7 @@ BASEFLAGS=-Wall -outputdir $(GIT_ANNEX_TMP_BUILD_DIR) -IUtility
#
# If you're using an old version of yesod, enable -DWITH_OLD_YESOD
# Or with an old version of the uri library, enable -DWITH_OLD_URI
-FEATURES?=$(GIT_ANNEX_LOCAL_FEATURES) -DWITH_ASSISTANT -DWITH_S3 -DWITH_WEBAPP -DWITH_PAIRING -DWITH_XMPP -DWITH_DNS
+FEATURES?=$(GIT_ANNEX_LOCAL_FEATURES) -DWITH_ASSISTANT -DWITH_S3 -DWITH_WEBDAV -DWITH_WEBAPP -DWITH_PAIRING -DWITH_XMPP -DWITH_DNS
bins=git-annex
mans=git-annex.1 git-annex-shell.1
diff --git a/Messages.hs b/Messages.hs
index f3cd9fc..d75fe67 100644
--- a/Messages.hs
+++ b/Messages.hs
@@ -11,6 +11,7 @@ module Messages (
showAction,
showProgress,
metered,
+ meteredBytes,
showSideAction,
doSideAction,
doQuietSideAction,
@@ -22,6 +23,7 @@ module Messages (
showEndResult,
showErr,
warning,
+ fileNotFound,
indent,
maybeShowJSON,
showFullJSON,
@@ -41,9 +43,9 @@ import Common
import Types
import Types.Messages
import Types.Key
-import Types.Remote
import qualified Annex
import qualified Messages.JSON as JSON
+import qualified Data.Set as S
showStart :: String -> String -> Annex ()
showStart command file = handle (JSON.start command $ Just file) $
@@ -64,29 +66,38 @@ showProgress = handle q $
{- Shows a progress meter while performing a transfer of a key.
- The action is passed a callback to use to update the meter. -}
metered :: (Maybe MeterUpdate) -> Key -> (MeterUpdate -> Annex a) -> Annex a
-metered combinemeterupdate key a = withOutputType $ go (keySize key)
+metered combinemeterupdate key a = go (keySize key)
where
- go (Just size) NormalOutput = do
+ go (Just size) = meteredBytes combinemeterupdate size a
+ go _ = a (const noop)
+
+{- Shows a progress meter while performing an action on a given number
+ - of bytes. -}
+meteredBytes :: (Maybe MeterUpdate) -> Integer -> (MeterUpdate -> Annex a) -> Annex a
+meteredBytes combinemeterupdate size a = withOutputType go
+ where
+ go NormalOutput = do
progress <- liftIO $ newProgress "" size
meter <- liftIO $ newMeter progress "B" 25 (renderNums binaryOpts 1)
showOutput
- liftIO $ displayMeter stdout meter
r <- a $ \n -> liftIO $ do
incrP progress n
displayMeter stdout meter
maybe noop (\m -> m n) combinemeterupdate
liftIO $ clearMeter stdout meter
return r
- go _ _ = a (const noop)
+ go _ = a (const noop)
showSideAction :: String -> Annex ()
showSideAction m = Annex.getState Annex.output >>= go
where
- go (MessageState v StartBlock) = do
- p
- Annex.changeState $ \s -> s { Annex.output = MessageState v InBlock }
- go (MessageState _ InBlock) = return ()
- go _ = p
+ go st
+ | sideActionBlock st == StartBlock = do
+ p
+ let st' = st { sideActionBlock = InBlock }
+ Annex.changeState $ \s -> s { Annex.output = st' }
+ | sideActionBlock st == InBlock = return ()
+ | otherwise = p
p = handle q $ putStrLn $ "(" ++ m ++ "...)"
showStoringStateAction :: Annex ()
@@ -143,6 +154,18 @@ warning' w = do
hFlush stdout
hPutStrLn stderr w
+{- Displays a warning one time about a file the user specified not existing. -}
+fileNotFound :: FilePath -> Annex ()
+fileNotFound file = do
+ st <- Annex.getState Annex.output
+ let shown = fileNotFoundShown st
+ when (S.notMember file shown) $ do
+ let shown' = S.insert file shown
+ let st' = st { fileNotFoundShown = shown' }
+ Annex.changeState $ \s -> s { Annex.output = st' }
+ liftIO $ hPutStrLn stderr $ unwords
+ [ "git-annex:", file, "not found" ]
+
indent :: String -> String
indent = join "\n" . map (\l -> " " ++ l) . lines
diff --git a/Meters.hs b/Meters.hs
new file mode 100644
index 0000000..378e570
--- /dev/null
+++ b/Meters.hs
@@ -0,0 +1,40 @@
+{- git-annex meters
+ -
+ - Copyright 2012 Joey Hess <joey@kitenet.net>
+ -
+ - Licensed under the GNU GPL version 3 or higher.
+ -}
+
+module Meters where
+
+import Common
+import Types.Meters
+import Utility.Observed
+
+import qualified Data.ByteString.Lazy as L
+import qualified Data.ByteString as S
+
+{- Sends the content of a file to an action, updating the meter as it's
+ - consumed. -}
+withMeteredFile :: FilePath -> MeterUpdate -> (L.ByteString -> IO a) -> IO a
+withMeteredFile f meterupdate a = withBinaryFile f ReadMode $ \h ->
+ hGetContentsObserved h (meterupdate . toInteger) >>= a
+
+{- Sends the content of a file to a Handle, updating the meter as it's
+ - written. -}
+streamMeteredFile :: FilePath -> MeterUpdate -> Handle -> IO ()
+streamMeteredFile f meterupdate h = withMeteredFile f meterupdate $ L.hPut h
+
+{- Writes a ByteString to a Handle, updating a meter as it's written. -}
+meteredWrite :: MeterUpdate -> Handle -> L.ByteString -> IO ()
+meteredWrite meterupdate h = go . L.toChunks
+ where
+ go [] = return ()
+ go (c:cs) = do
+ S.hPut h c
+ meterupdate $ toInteger $ S.length c
+ go cs
+
+meteredWriteFile :: MeterUpdate -> FilePath -> L.ByteString -> IO ()
+meteredWriteFile meterupdate f b = withBinaryFile f WriteMode $ \h ->
+ meteredWrite meterupdate h b
diff --git a/Remote/Bup.hs b/Remote/Bup.hs
index f5bcc4f..62db01a 100644
--- a/Remote/Bup.hs
+++ b/Remote/Bup.hs
@@ -125,7 +125,7 @@ storeEncrypted r buprepo (cipher, enck) k _p = do
src <- inRepo $ gitAnnexLocation k
params <- bupSplitParams r buprepo enck []
liftIO $ catchBoolIO $
- withEncryptedHandle cipher (L.readFile src) $ \h ->
+ encrypt cipher (feedFile src) $ \h ->
pipeBup params (Just h) Nothing
retrieve :: BupRepo -> Key -> AssociatedFile -> FilePath -> Annex Bool
@@ -141,7 +141,8 @@ retrieveCheap _ _ _ = return False
retrieveEncrypted :: BupRepo -> (Cipher, Key) -> Key -> FilePath -> Annex Bool
retrieveEncrypted buprepo (cipher, enck) _ f = liftIO $ catchBoolIO $
withHandle StdoutHandle createProcessSuccess p $ \h -> do
- withDecryptedContent cipher (L.hGetContents h) $ L.writeFile f
+ decrypt cipher (\toh -> L.hPut toh =<< L.hGetContents h) $
+ readBytes $ L.writeFile f
return True
where
params = bupParams "join" buprepo [Param $ bupRef enck]
diff --git a/Remote/Directory.hs b/Remote/Directory.hs
index 006638a..737ae63 100644
--- a/Remote/Directory.hs
+++ b/Remote/Directory.hs
@@ -11,6 +11,7 @@ import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString as S
import qualified Data.Map as M
import qualified Control.Exception as E
+import Data.Int
import Common.Annex
import Types.Remote
@@ -19,10 +20,10 @@ import Config
import Utility.FileMode
import Remote.Helper.Special
import Remote.Helper.Encryptable
+import Remote.Helper.Chunked
import Crypto
-import Utility.DataUnits
-import Data.Int
import Annex.Content
+import Meters
remote :: RemoteType
remote = RemoteType {
@@ -47,7 +48,7 @@ gen r u c = do
storeKey = store dir chunksize,
retrieveKeyFile = retrieve dir chunksize,
retrieveKeyFileCheap = retrieveCheap dir chunksize,
- removeKey = remove dir chunksize,
+ removeKey = remove dir,
hasKey = checkPresent dir chunksize,
hasKeyCheap = True,
whereisKey = Nothing,
@@ -58,19 +59,6 @@ gen r u c = do
remotetype = remote
}
-type ChunkSize = Maybe Int64
-
-chunkSize :: Maybe RemoteConfig -> ChunkSize
-chunkSize Nothing = Nothing
-chunkSize (Just m) =
- case M.lookup "chunksize" m of
- Nothing -> Nothing
- Just v -> case readSize dataUnits v of
- Nothing -> error "bad chunksize"
- Just size
- | size <= 0 -> error "bad chunksize"
- | otherwise -> Just $ fromInteger size
-
directorySetup :: UUID -> RemoteConfig -> Annex RemoteConfig
directorySetup u c = do
-- verify configuration is sane
@@ -85,17 +73,18 @@ directorySetup u c = do
gitConfigSpecialRemote u c' "directory" dir
return $ M.delete "directory" c'
-{- Locations to try to access a given Key in the Directory. -}
+{- Locations to try to access a given Key in the Directory.
+ - We try more than since we used to write to different hash directories. -}
locations :: FilePath -> Key -> [FilePath]
locations d k = map (d </>) (keyPaths k)
-{- An infinite stream of chunks to use for a given file. -}
-chunkStream :: FilePath -> [FilePath]
-chunkStream f = map (\n -> f ++ ".chunk" ++ show n) [1 :: Integer ..]
+{- Directory where the file(s) for a key are stored. -}
+storeDir :: FilePath -> Key -> FilePath
+storeDir d k = addTrailingPathSeparator $ d </> hashDirLower k </> keyFile k
-{- A file that records the number of chunks used. -}
-chunkCount :: FilePath -> FilePath
-chunkCount f = f ++ ".chunkcount"
+{- Where we store temporary data for a key as it's being uploaded. -}
+tmpDir :: FilePath -> Key -> FilePath
+tmpDir d k = addTrailingPathSeparator $ d </> "tmp" </> keyFile k
withCheckedFiles :: (FilePath -> IO Bool) -> ChunkSize -> FilePath -> Key -> ([FilePath] -> IO Bool) -> IO Bool
withCheckedFiles _ _ [] _ _ = return False
@@ -107,18 +96,14 @@ withCheckedFiles check (Just _) d k a = go $ locations d k
where
go [] = return False
go (f:fs) = do
- let chunkcount = chunkCount f
+ let chunkcount = f ++ chunkCount
ifM (check chunkcount)
( do
- count <- readcount chunkcount
- let chunks = take count $ chunkStream f
+ chunks <- listChunks f <$> readFile chunkcount
ifM (all id <$> mapM check chunks)
( a chunks , return False )
, go fs
)
- readcount f = fromMaybe (error $ "cannot parse " ++ f)
- . (readish :: String -> Maybe Int)
- <$> readFile f
withStoredFiles :: ChunkSize -> FilePath -> Key -> ([FilePath] -> IO Bool) -> IO Bool
withStoredFiles = withCheckedFiles doesFileExist
@@ -143,13 +128,13 @@ storeEncrypted d chunksize (cipher, enck) k p = do
src <- inRepo $ gitAnnexLocation k
metered (Just p) k $ \meterupdate ->
storeHelper d chunksize enck $ \dests ->
- withEncryptedContent cipher (L.readFile src) $ \s ->
+ encrypt cipher (feedFile src) $ readBytes $ \b ->
case chunksize of
Nothing -> do
let dest = Prelude.head dests
- meteredWriteFile meterupdate dest s
+ meteredWriteFile meterupdate dest b
return [dest]
- Just _ -> storeSplit meterupdate chunksize dests s
+ Just _ -> storeSplit meterupdate chunksize dests b
{- Splits a ByteString into chunks and writes to dests, obeying configured
- chunk size (not to be confused with the L.ByteString chunk size).
@@ -173,95 +158,54 @@ storeSplit' meterupdate chunksize (d:dests) bs c = do
feed _ [] _ = return []
feed sz (l:ls) h = do
let s = fromIntegral $ S.length l
- if s <= sz
+ if s <= sz || sz == chunksize
then do
S.hPut h l
meterupdate $ toInteger s
feed (sz - s) ls h
else return (l:ls)
-{- Write a L.ByteString to a file, updating a progress meter
- - after each chunk of the L.ByteString, typically every 64 kb or so. -}
-meteredWriteFile :: MeterUpdate -> FilePath -> L.ByteString -> IO ()
-meteredWriteFile meterupdate dest b =
- meteredWriteFile' meterupdate dest (L.toChunks b) feeder
- where
- feeder chunks = return ([], chunks)
-
-{- Writes a series of S.ByteString chunks to a file, updating a progress
- - meter after each chunk. The feeder is called to get more chunks. -}
-meteredWriteFile' :: MeterUpdate -> FilePath -> s -> (s -> IO (s, [S.ByteString])) -> IO ()
-meteredWriteFile' meterupdate dest startstate feeder =
- E.bracket (openFile dest WriteMode) hClose (feed startstate [])
- where
- feed state [] h = do
- (state', cs) <- feeder state
- unless (null cs) $
- feed state' cs h
- feed state (c:cs) h = do
- S.hPut h c
- meterupdate $ toInteger $ S.length c
- feed state cs h
-
-{- Generates a list of destinations to write to in order to store a key.
- - When chunksize is specified, this list will be a list of chunks.
- - The action should store the file, and return a list of the destinations
- - it stored it to, or [] on error.
- - The stored files are only put into their final place once storage is
- - complete.
- -}
storeHelper :: FilePath -> ChunkSize -> Key -> ([FilePath] -> IO [FilePath]) -> Annex Bool
-storeHelper d chunksize key a = prep <&&> check <&&> go
+storeHelper d chunksize key storer = check <&&> go
where
- desttemplate = Prelude.head $ locations d key
- dir = parentDir desttemplate
- tmpdests = case chunksize of
- Nothing -> [desttemplate ++ tmpprefix]
- Just _ -> map (++ tmpprefix) (chunkStream desttemplate)
- tmpprefix = ".tmp"
- detmpprefix f = take (length f - tmpprefixlen) f
- tmpprefixlen = length tmpprefix
- prep = liftIO $ catchBoolIO $ do
- createDirectoryIfMissing True dir
- allowWrite dir
- return True
+ tmpdir = tmpDir d key
+ destdir = storeDir d key
{- The size is not exactly known when encrypting the key;
- this assumes that at least the size of the key is
- needed as free space. -}
- check = checkDiskSpace (Just dir) key 0
- go = liftIO $ catchBoolIO $ do
- stored <- a tmpdests
- forM_ stored $ \f -> do
- let dest = detmpprefix f
- renameFile f dest
- preventWrite dest
- when (chunksize /= Nothing) $ do
- let chunkcount = chunkCount desttemplate
- _ <- tryIO $ allowWrite chunkcount
- writeFile chunkcount (show $ length stored)
- preventWrite chunkcount
- preventWrite dir
- return (not $ null stored)
+ check = do
+ liftIO $ createDirectoryIfMissing True tmpdir
+ checkDiskSpace (Just tmpdir) key 0
+ go = liftIO $ catchBoolIO $
+ storeChunks key tmpdir destdir chunksize storer recorder finalizer
+ finalizer tmp dest = do
+ void $ tryIO $ allowWrite dest -- may already exist
+ void $ tryIO $ removeDirectoryRecursive dest -- or not exist
+ createDirectoryIfMissing True (parentDir dest)
+ renameDirectory tmp dest
+ mapM_ preventWrite =<< dirContents dest
+ preventWrite dest
+ recorder f s = do
+ void $ tryIO $ allowWrite f
+ writeFile f s
+ preventWrite f
retrieve :: FilePath -> ChunkSize -> Key -> AssociatedFile -> FilePath -> Annex Bool
retrieve d chunksize k _ f = metered Nothing k $ \meterupdate ->
liftIO $ withStoredFiles chunksize d k $ \files ->
catchBoolIO $ do
- meteredWriteFile' meterupdate f files feeder
+ meteredWriteFileChunks meterupdate f files $ L.readFile
return True
- where
- feeder [] = return ([], [])
- feeder (x:xs) = do
- chunks <- L.toChunks <$> L.readFile x
- return (xs, chunks)
retrieveEncrypted :: FilePath -> ChunkSize -> (Cipher, Key) -> Key -> FilePath -> Annex Bool
retrieveEncrypted d chunksize (cipher, enck) k f = metered Nothing k $ \meterupdate ->
liftIO $ withStoredFiles chunksize d enck $ \files ->
catchBoolIO $ do
- withDecryptedContent cipher (L.concat <$> mapM L.readFile files) $
- meteredWriteFile meterupdate f
+ decrypt cipher (feeder files) $
+ readBytes $ meteredWriteFile meterupdate f
return True
+ where
+ feeder files h = forM_ files $ \file -> L.hPut h =<< L.readFile file
retrieveCheap :: FilePath -> ChunkSize -> Key -> FilePath -> Annex Bool
retrieveCheap _ (Just _) _ _ = return False -- no cheap retrieval for chunks
@@ -270,16 +214,13 @@ retrieveCheap d _ k f = liftIO $ withStoredFiles Nothing d k go
go [file] = catchBoolIO $ createSymbolicLink file f >> return True
go _files = return False
-remove :: FilePath -> ChunkSize -> Key -> Annex Bool
-remove d chunksize k = liftIO $ withStoredFiles chunksize d k go
+remove :: FilePath -> Key -> Annex Bool
+remove d k = liftIO $ catchBoolIO $ do
+ allowWrite dir
+ removeDirectoryRecursive dir
+ return True
where
- go = all id <$$> mapM removefile
- removefile file = catchBoolIO $ do
- let dir = parentDir file
- allowWrite dir
- removeFile file
- _ <- tryIO $ removeDirectory dir
- return True
+ dir = storeDir d k
checkPresent :: FilePath -> ChunkSize -> Key -> Annex (Either String Bool)
checkPresent d chunksize k = liftIO $ catchMsgIO $ withStoredFiles chunksize d k $
diff --git a/Remote/Glacier.hs b/Remote/Glacier.hs
new file mode 100644
index 0000000..f960c51
--- /dev/null
+++ b/Remote/Glacier.hs
@@ -0,0 +1,254 @@
+{- Amazon Glacier remotes.
+ -
+ - Copyright 2012 Joey Hess <joey@kitenet.net>
+ -
+ - Licensed under the GNU GPL version 3 or higher.
+ -}
+
+module Remote.Glacier (remote) where
+
+import qualified Data.Map as M
+import System.Environment
+
+import Common.Annex
+import Types.Remote
+import Types.Key
+import qualified Git
+import Config
+import Remote.Helper.Special
+import Remote.Helper.Encryptable
+import qualified Remote.Helper.AWS as AWS
+import Crypto
+import Creds
+import Meters
+import qualified Annex
+
+import System.Process
+
+type Vault = String
+type Archive = FilePath
+
+remote :: RemoteType
+remote = RemoteType {
+ typename = "glacier",
+ enumerate = findSpecialRemotes "glacier",
+ generate = gen,
+ setup = glacierSetup
+}
+
+gen :: Git.Repo -> UUID -> Maybe RemoteConfig -> Annex Remote
+gen r u c = do
+ cst <- remoteCost r veryExpensiveRemoteCost
+ return $ gen' r u c cst
+gen' :: Git.Repo -> UUID -> Maybe RemoteConfig -> Int -> Remote
+gen' r u c cst =
+ encryptableRemote c
+ (storeEncrypted this)
+ (retrieveEncrypted this)
+ this
+ where
+ this = Remote {
+ uuid = u,
+ cost = cst,
+ name = Git.repoDescribe r,
+ storeKey = store this,
+ retrieveKeyFile = retrieve this,
+ retrieveKeyFileCheap = retrieveCheap this,
+ removeKey = remove this,
+ hasKey = checkPresent this,
+ hasKeyCheap = False,
+ whereisKey = Nothing,
+ config = c,
+ repo = r,
+ localpath = Nothing,
+ readonly = False,
+ remotetype = remote
+ }
+
+glacierSetup :: UUID -> RemoteConfig -> Annex RemoteConfig
+glacierSetup u c = do
+ c' <- encryptionSetup c
+ let fullconfig = c' `M.union` defaults
+ genVault fullconfig u
+ gitConfigSpecialRemote u fullconfig "glacier" "true"
+ setRemoteCredPair fullconfig (AWS.creds u)
+ where
+ remotename = fromJust (M.lookup "name" c)
+ defvault = remotename ++ "-" ++ fromUUID u
+ defaults = M.fromList
+ [ ("datacenter", "us-east-1")
+ , ("vault", defvault)
+ ]
+
+store :: Remote -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
+store r k _f m
+ | keySize k == Just 0 = do
+ warning "Cannot store empty files in Glacier."
+ return False
+ | otherwise = do
+ src <- inRepo $ gitAnnexLocation k
+ metered (Just m) k $ \meterupdate ->
+ storeHelper r k $ streamMeteredFile src meterupdate
+
+storeEncrypted :: Remote -> (Cipher, Key) -> Key -> MeterUpdate -> Annex Bool
+storeEncrypted r (cipher, enck) k m = do
+ f <- inRepo $ gitAnnexLocation k
+ metered (Just m) k $ \meterupdate ->
+ storeHelper r enck $ \h ->
+ encrypt cipher (feedFile f)
+ (readBytes $ meteredWrite meterupdate h)
+
+retrieve :: Remote -> Key -> AssociatedFile -> FilePath -> Annex Bool
+retrieve r k _f d = metered Nothing k $ \meterupdate ->
+ retrieveHelper r k $
+ readBytes $ meteredWriteFile meterupdate d
+
+retrieveCheap :: Remote -> Key -> FilePath -> Annex Bool
+retrieveCheap _ _ _ = return False
+
+retrieveEncrypted :: Remote -> (Cipher, Key) -> Key -> FilePath -> Annex Bool
+retrieveEncrypted r (cipher, enck) k d = metered Nothing k $ \meterupdate ->
+ retrieveHelper r enck $ readBytes $ \b ->
+ decrypt cipher (feedBytes b) $
+ readBytes $ meteredWriteFile meterupdate d
+
+storeHelper :: Remote -> Key -> (Handle -> IO ()) -> Annex Bool
+storeHelper r k feeder = go =<< glacierEnv c u
+ where
+ c = fromJust $ config r
+ u = uuid r
+ params = glacierParams c
+ [ Param "archive"
+ , Param "upload"
+ , Param "--name", Param $ archive r k
+ , Param $ remoteVault r
+ , Param "-"
+ ]
+ go Nothing = return False
+ go (Just e) = do
+ let p = (proc "glacier" (toCommand params)) { env = Just e }
+ liftIO $ catchBoolIO $
+ withHandle StdinHandle createProcessSuccess p $ \h -> do
+ feeder h
+ return True
+
+retrieveHelper :: Remote -> Key -> (Handle -> IO ()) -> Annex Bool
+retrieveHelper r k reader = go =<< glacierEnv c u
+ where
+ c = fromJust $ config r
+ u = uuid r
+ params = glacierParams c
+ [ Param "archive"
+ , Param "retrieve"
+ , Param "-o-"
+ , Param $ remoteVault r
+ , Param $ archive r k
+ ]
+ go Nothing = return False
+ go (Just e) = do
+ let p = (proc "glacier" (toCommand params)) { env = Just e }
+ ok <- liftIO $ catchBoolIO $
+ withHandle StdoutHandle createProcessSuccess p $ \h ->
+ ifM (hIsEOF h)
+ ( return False
+ , do
+ reader h
+ return True
+ )
+ unless ok later
+ return ok
+ later = showLongNote "Recommend you wait up to 4 hours, and then run this command again."
+
+remove :: Remote -> Key -> Annex Bool
+remove r k = glacierAction r
+ [ Param "archive"
+ , Param "delete"
+ , Param $ remoteVault r
+ , Param $ archive r k
+ ]
+
+checkPresent :: Remote -> Key -> Annex (Either String Bool)
+checkPresent r k = do
+ showAction $ "checking " ++ name r
+ go =<< glacierEnv (fromJust $ config r) (uuid r)
+ where
+ go Nothing = return $ Left "cannot check glacier"
+ go (Just e) = do
+ {- glacier checkpresent outputs the archive name to stdout if
+ - it's present. -}
+ v <- liftIO $ catchMsgIO $
+ readProcessEnv "glacier" (toCommand params) (Just e)
+ case v of
+ Right s -> do
+ let probablypresent = key2file k `elem` lines s
+ if probablypresent
+ then ifM (Annex.getFlag "trustglacier")
+ ( return $ Right True, untrusted )
+ else return $ Right False
+ Left err -> return $ Left err
+
+ params =
+ [ Param "archive"
+ , Param "checkpresent"
+ , Param $ remoteVault r
+ , Param "--quiet"
+ , Param $ archive r k
+ ]
+
+ untrusted = do
+ showLongNote $ unlines
+ [ "Glacier's inventory says it has a copy."
+ , "However, the inventory could be out of date, if it was recently removed."
+ , "(Use --trust-glacier if you're sure it's still in Glacier.)"
+ , ""
+ ]
+ return $ Right False
+
+glacierAction :: Remote -> [CommandParam] -> Annex Bool
+glacierAction r params = runGlacier (fromJust $ config r) (uuid r) params
+
+runGlacier :: RemoteConfig -> UUID -> [CommandParam] -> Annex Bool
+runGlacier c u params = go =<< glacierEnv c u
+ where
+ go Nothing = return False
+ go (Just e) = liftIO $
+ boolSystemEnv "glacier" (glacierParams c params) (Just e)
+
+glacierParams :: RemoteConfig -> [CommandParam] -> [CommandParam]
+glacierParams c params = datacenter:params
+ where
+ datacenter = Param $ "--region=" ++
+ (fromJust $ M.lookup "datacenter" c)
+
+glacierEnv :: RemoteConfig -> UUID -> Annex (Maybe [(String, String)])
+glacierEnv c u = go =<< getRemoteCredPair "glacier" c creds
+ where
+ go Nothing = return Nothing
+ go (Just (user, pass)) = do
+ e <- liftIO getEnvironment
+ return $ Just $ (uk, user):(pk, pass):e
+
+ creds = AWS.creds u
+ (uk, pk) = credPairEnvironment creds
+
+remoteVault :: Remote -> Vault
+remoteVault = vault . fromJust . config
+
+vault :: RemoteConfig -> Vault
+vault = fromJust . M.lookup "vault"
+
+archive :: Remote -> Key -> Archive
+archive r k = fileprefix ++ key2file k
+ where
+ fileprefix = M.findWithDefault "" "fileprefix" $ fromJust $ config r
+
+-- glacier vault create will succeed even if the vault already exists.
+genVault :: RemoteConfig -> UUID -> Annex ()
+genVault c u = unlessM (runGlacier c u params) $
+ error "Failed creating glacier vault."
+ where
+ params =
+ [ Param "vault"
+ , Param "create"
+ , Param $ vault c
+ ]
diff --git a/Remote/Helper/AWS.hs b/Remote/Helper/AWS.hs
new file mode 100644
index 0000000..a988a0b
--- /dev/null
+++ b/Remote/Helper/AWS.hs
@@ -0,0 +1,21 @@
+{- Amazon Web Services common infrastructure.
+ -
+ - Copyright 2011,2012 Joey Hess <joey@kitenet.net>
+ -
+ - Licensed under the GNU GPL version 3 or higher.
+ -}
+
+module Remote.Helper.AWS where
+
+import Common.Annex
+import Creds
+
+creds :: UUID -> CredPairStorage
+creds u = CredPairStorage
+ { credPairFile = fromUUID u
+ , credPairEnvironment = ("AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY")
+ , credPairRemoteKey = Just "s3creds"
+ }
+
+setCredsEnv :: CredPair -> IO ()
+setCredsEnv p = setEnvCredPair p $ creds undefined
diff --git a/Remote/Helper/Chunked.hs b/Remote/Helper/Chunked.hs
new file mode 100644
index 0000000..4f04a1c
--- /dev/null
+++ b/Remote/Helper/Chunked.hs
@@ -0,0 +1,125 @@
+{- git-annex chunked remotes
+ -
+ - Copyright 2012 Joey Hess <joey@kitenet.net>
+ -
+ - Licensed under the GNU GPL version 3 or higher.
+ -}
+
+module Remote.Helper.Chunked where
+
+import Common.Annex
+import Utility.DataUnits
+import Types.Remote
+import Meters
+
+import qualified Data.Map as M
+import qualified Data.ByteString.Lazy as L
+import Data.Int
+import qualified Control.Exception as E
+
+type ChunkSize = Maybe Int64
+
+{- Gets a remote's configured chunk size. -}
+chunkSize :: Maybe RemoteConfig -> ChunkSize
+chunkSize Nothing = Nothing
+chunkSize (Just m) =
+ case M.lookup "chunksize" m of
+ Nothing -> Nothing
+ Just v -> case readSize dataUnits v of
+ Nothing -> error "bad chunksize"
+ Just size
+ | size <= 0 -> error "bad chunksize"
+ | otherwise -> Just $ fromInteger size
+
+{- This is an extension that's added to the usual file (or whatever)
+ - where the remote stores a key. -}
+type ChunkExt = String
+
+{- A record of the number of chunks used.
+ -
+ - While this can be guessed at based on the size of the key, encryption
+ - makes that larger. Also, using this helps deal with changes to chunksize
+ - over the life of a remote.
+ -}
+chunkCount :: ChunkExt
+chunkCount = ".chunkcount"
+
+{- Parses the String from the chunkCount file, and returns the files that
+ - are used to store the chunks. -}
+listChunks :: FilePath -> String -> [FilePath]
+listChunks basedest chunkcount = take count $ map (basedest ++) chunkStream
+ where
+ count = fromMaybe 0 $ readish chunkcount
+
+{- An infinite stream of extensions to use for chunks. -}
+chunkStream :: [ChunkExt]
+chunkStream = map (\n -> ".chunk" ++ show n) [1 :: Integer ..]
+
+{- Given the base destination to use to store a value,
+ - generates a stream of temporary destinations (just one when not chunking)
+ - and passes it to an action, which should chunk and store the data,
+ - and return the destinations it stored to, or [] on error. Then
+ - calls the storer to write the chunk count (if chunking). Finally, the
+ - fianlizer is called to rename the tmp into the dest
+ - (and do any other cleanup).
+ -}
+storeChunks :: Key -> FilePath -> FilePath -> ChunkSize -> ([FilePath] -> IO [FilePath]) -> (FilePath -> String -> IO ()) -> (FilePath -> FilePath -> IO ()) -> IO Bool
+storeChunks key tmp dest chunksize storer recorder finalizer =
+ either (const $ return False) return
+ =<< (E.try go :: IO (Either E.SomeException Bool))
+ where
+ go = do
+ stored <- storer tmpdests
+ when (chunksize /= Nothing) $ do
+ let chunkcount = basef ++ chunkCount
+ recorder chunkcount (show $ length stored)
+ finalizer tmp dest
+ return (not $ null stored)
+
+ basef = tmp ++ keyFile key
+ tmpdests
+ | chunksize == Nothing = [basef]
+ | otherwise = map (basef ++ ) chunkStream
+
+{- Given a list of destinations to use, chunks the data according to the
+ - ChunkSize, and runs the storer action to store each chunk. Returns
+ - the destinations where data was stored, or [] on error.
+ -
+ - This buffers each chunk in memory.
+ - More optimal versions of this can be written, that rely
+ - on L.toChunks to split the lazy bytestring into chunks (typically
+ - smaller than the ChunkSize), and eg, write those chunks to a Handle.
+ - But this is the best that can be done with the storer interface that
+ - writes a whole L.ByteString at a time.
+ -}
+storeChunked :: ChunkSize -> [FilePath] -> (FilePath -> L.ByteString -> IO ()) -> L.ByteString -> IO [FilePath]
+storeChunked chunksize dests storer content =
+ either (const $ return []) return
+ =<< (E.try (go chunksize dests) :: IO (Either E.SomeException [FilePath]))
+ where
+ go _ [] = return [] -- no dests!?
+
+ go Nothing (d:_) = do
+ storer d content
+ return [d]
+
+ go (Just sz) _
+ -- always write a chunk, even if the data is 0 bytes
+ | L.null content = go Nothing dests
+ | otherwise = storechunks sz [] dests content
+
+ storechunks _ _ [] _ = return [] -- ran out of dests
+ storechunks sz useddests (d:ds) b
+ | L.null b = return $ reverse useddests
+ | otherwise = do
+ let (chunk, b') = L.splitAt sz b
+ storer d chunk
+ storechunks sz (d:useddests) ds b'
+
+{- Writes a series of chunks to a file. The feeder is called to get
+ - each chunk. -}
+meteredWriteFileChunks :: MeterUpdate -> FilePath -> [v] -> (v -> IO L.ByteString) -> IO ()
+meteredWriteFileChunks meterupdate dest chunks feeder =
+ withBinaryFile dest WriteMode $ \h ->
+ forM_ chunks $ \c ->
+ meteredWrite meterupdate h =<< feeder c
diff --git a/Remote/Helper/Encryptable.hs b/Remote/Helper/Encryptable.hs
index 12c7d37..05aca9a 100644
--- a/Remote/Helper/Encryptable.hs
+++ b/Remote/Helper/Encryptable.hs
@@ -81,24 +81,33 @@ remoteCipher c = go $ extractCipher c
cache <- Annex.getState Annex.ciphers
case M.lookup encipher cache of
Just cipher -> return $ Just cipher
- Nothing -> decrypt encipher cache
- decrypt encipher cache = do
- showNote "gpg"
- cipher <- liftIO $ decryptCipher encipher
- Annex.changeState (\s -> s { Annex.ciphers = M.insert encipher cipher cache })
- return $ Just cipher
+ Nothing -> do
+ showNote "gpg"
+ cipher <- liftIO $ decryptCipher encipher
+ Annex.changeState (\s -> s { Annex.ciphers = M.insert encipher cipher cache })
+ return $ Just cipher
-{- Checks if there is a trusted (non-shared) cipher. -}
-isTrustedCipher :: RemoteConfig -> Bool
-isTrustedCipher c =
- isJust (M.lookup "cipherkeys" c) && isJust (M.lookup "cipher" c)
+{- Checks if the remote's config allows storing creds in the remote's config.
+ -
+ - embedcreds=yes allows this, and embedcreds=no prevents it.
+ -
+ - If not set, the default is to only store creds when it's surely safe:
+ - When gpg encryption is used, in which case the creds will be encrypted
+ - using it. Not when a shared cipher is used.
+ -}
+embedCreds :: RemoteConfig -> Bool
+embedCreds c
+ | M.lookup "embedcreds" c == Just "yes" = True
+ | M.lookup "embedcreds" c == Just "no" = False
+ | isJust (M.lookup "cipherkeys" c) && isJust (M.lookup "cipher" c) = True
+ | otherwise = False
{- Gets encryption Cipher, and encrypted version of Key. -}
cipherKey :: Maybe RemoteConfig -> Key -> Annex (Maybe (Cipher, Key))
cipherKey Nothing _ = return Nothing
-cipherKey (Just c) k = maybe Nothing encrypt <$> remoteCipher c
+cipherKey (Just c) k = maybe Nothing make <$> remoteCipher c
where
- encrypt ciphertext = Just (ciphertext, encryptKey ciphertext k)
+ make ciphertext = Just (ciphertext, encryptKey ciphertext k)
{- Stores an StorableCipher in a remote's configuration. -}
storeCipher :: RemoteConfig -> StorableCipher -> RemoteConfig
diff --git a/Remote/Hook.hs b/Remote/Hook.hs
index f9a143c..7173a5b 100644
--- a/Remote/Hook.hs
+++ b/Remote/Hook.hs
@@ -108,7 +108,8 @@ store h k _f _p = do
storeEncrypted :: String -> (Cipher, Key) -> Key -> MeterUpdate -> Annex Bool
storeEncrypted h (cipher, enck) k _p = withTmp enck $ \tmp -> do
src <- inRepo $ gitAnnexLocation k
- liftIO $ withEncryptedContent cipher (L.readFile src) $ L.writeFile tmp
+ liftIO $ encrypt cipher (feedFile src) $
+ readBytes $ L.writeFile tmp
runHook h "store" enck (Just tmp) $ return True
retrieve :: String -> Key -> AssociatedFile -> FilePath -> Annex Bool
@@ -120,7 +121,8 @@ retrieveCheap _ _ _ = return False
retrieveEncrypted :: String -> (Cipher, Key) -> Key -> FilePath -> Annex Bool
retrieveEncrypted h (cipher, enck) _ f = withTmp enck $ \tmp ->
runHook h "retrieve" enck (Just tmp) $ liftIO $ catchBoolIO $ do
- withDecryptedContent cipher (L.readFile tmp) $ L.writeFile f
+ decrypt cipher (feedFile tmp) $
+ readBytes $ L.writeFile f
return True
remove :: String -> Key -> Annex Bool
diff --git a/Remote/List.hs b/Remote/List.hs
index ea1d61c..3179456 100644
--- a/Remote/List.hs
+++ b/Remote/List.hs
@@ -29,6 +29,10 @@ import qualified Remote.Bup
import qualified Remote.Directory
import qualified Remote.Rsync
import qualified Remote.Web
+#ifdef WITH_WEBDAV
+import qualified Remote.WebDAV
+#endif
+import qualified Remote.Glacier
import qualified Remote.Hook
remoteTypes :: [RemoteType]
@@ -41,6 +45,10 @@ remoteTypes =
, Remote.Directory.remote
, Remote.Rsync.remote
, Remote.Web.remote
+#ifdef WITH_WEBDAV
+ , Remote.WebDAV.remote
+#endif
+ , Remote.Glacier.remote
, Remote.Hook.remote
]
diff --git a/Remote/Rsync.hs b/Remote/Rsync.hs
index 1d5f2d2..c48a9c1 100644
--- a/Remote/Rsync.hs
+++ b/Remote/Rsync.hs
@@ -110,7 +110,8 @@ store o k _f p = rsyncSend o p k <=< inRepo $ gitAnnexLocation k
storeEncrypted :: RsyncOpts -> (Cipher, Key) -> Key -> MeterUpdate -> Annex Bool
storeEncrypted o (cipher, enck) k p = withTmp enck $ \tmp -> do
src <- inRepo $ gitAnnexLocation k
- liftIO $ withEncryptedContent cipher (L.readFile src) $ L.writeFile tmp
+ liftIO $ decrypt cipher (feedFile src) $
+ readBytes $ L.writeFile tmp
rsyncSend o p enck tmp
retrieve :: RsyncOpts -> Key -> AssociatedFile -> FilePath -> Annex Bool
@@ -128,7 +129,8 @@ retrieveEncrypted :: RsyncOpts -> (Cipher, Key) -> Key -> FilePath -> Annex Bool
retrieveEncrypted o (cipher, enck) _ f = withTmp enck $ \tmp -> do
ifM (retrieve o enck undefined tmp)
( liftIO $ catchBoolIO $ do
- withDecryptedContent cipher (L.readFile tmp) $ L.writeFile f
+ decrypt cipher (feedFile tmp) $
+ readBytes $ L.writeFile f
return True
, return False
)
diff --git a/Remote/S3.hs b/Remote/S3.hs
index 0c9d523..400f3e0 100644
--- a/Remote/S3.hs
+++ b/Remote/S3.hs
@@ -5,7 +5,7 @@
- Licensed under the GNU GPL version 3 or higher.
-}
-module Remote.S3 (remote, s3SetCredsEnv) where
+module Remote.S3 (remote) where
import Network.AWS.AWSConnection
import Network.AWS.S3Object
@@ -14,8 +14,6 @@ import Network.AWS.AWSResult
import qualified Data.ByteString.Lazy.Char8 as L
import qualified Data.Map as M
import Data.Char
-import System.Environment
-import System.Posix.Env (setEnv)
import Common.Annex
import Types.Remote
@@ -24,11 +22,11 @@ import qualified Git
import Config
import Remote.Helper.Special
import Remote.Helper.Encryptable
+import qualified Remote.Helper.AWS as AWS
import Crypto
+import Creds
+import Meters
import Annex.Content
-import Utility.Base64
-import Annex.Perms
-import Utility.FileMode
remote :: RemoteType
remote = RemoteType {
@@ -87,7 +85,7 @@ s3Setup u c = handlehost $ M.lookup "host" c
use fullconfig = do
gitConfigSpecialRemote u fullconfig "s3" "true"
- s3SetCreds fullconfig u
+ setRemoteCredPair fullconfig (AWS.creds u)
defaulthost = do
c' <- encryptionSetup c
@@ -115,62 +113,69 @@ s3Setup u c = handlehost $ M.lookup "host" c
M.delete "bucket" defaults
store :: Remote -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
-store r k _f _p = s3Action r False $ \(conn, bucket) -> do
- dest <- inRepo $ gitAnnexLocation k
- res <- liftIO $ storeHelper (conn, bucket) r k dest
+store r k _f p = s3Action r False $ \(conn, bucket) -> do
+ src <- inRepo $ gitAnnexLocation k
+ res <- storeHelper (conn, bucket) r k p src
s3Bool res
storeEncrypted :: Remote -> (Cipher, Key) -> Key -> MeterUpdate -> Annex Bool
-storeEncrypted r (cipher, enck) k _p = s3Action r False $ \(conn, bucket) ->
+storeEncrypted r (cipher, enck) k p = s3Action r False $ \(conn, bucket) ->
-- To get file size of the encrypted content, have to use a temp file.
-- (An alternative would be chunking to to a constant size.)
withTmp enck $ \tmp -> do
f <- inRepo $ gitAnnexLocation k
- liftIO $ withEncryptedContent cipher (L.readFile f) $ \s -> L.writeFile tmp s
- res <- liftIO $ storeHelper (conn, bucket) r enck tmp
+ liftIO $ encrypt cipher (feedFile f) $
+ readBytes $ L.writeFile tmp
+ res <- storeHelper (conn, bucket) r enck p tmp
s3Bool res
-storeHelper :: (AWSConnection, String) -> Remote -> Key -> FilePath -> IO (AWSResult ())
-storeHelper (conn, bucket) r k file = do
- content <- liftIO $ L.readFile file
- -- size is provided to S3 so the whole content does not need to be
- -- buffered to calculate it
+storeHelper :: (AWSConnection, String) -> Remote -> Key -> MeterUpdate -> FilePath -> Annex (AWSResult ())
+storeHelper (conn, bucket) r k p file = do
size <- maybe getsize (return . fromIntegral) $ keySize k
- let object = setStorageClass storageclass $
- S3Object bucket (bucketFile r k) ""
- (("Content-Length", show size) : xheaders) content
- sendObject conn object
+ meteredBytes (Just p) size $ \meterupdate ->
+ liftIO $ withMeteredFile file meterupdate $ \content -> do
+ -- size is provided to S3 so the whole content
+ -- does not need to be buffered to calculate it
+ let object = setStorageClass storageclass $ S3Object
+ bucket (bucketFile r k) ""
+ (("Content-Length", show size) : xheaders)
+ content
+ sendObject conn object
where
storageclass =
case fromJust $ M.lookup "storageclass" $ fromJust $ config r of
"REDUCED_REDUNDANCY" -> REDUCED_REDUNDANCY
_ -> STANDARD
- getsize = fileSize <$> (liftIO $ getFileStatus file)
+
+ getsize = liftIO $ fromIntegral . fileSize <$> getFileStatus file
xheaders = filter isxheader $ M.assocs $ fromJust $ config r
isxheader (h, _) = "x-amz-" `isPrefixOf` h
retrieve :: Remote -> Key -> AssociatedFile -> FilePath -> Annex Bool
-retrieve r k _f d = s3Action r False $ \(conn, bucket) -> do
- res <- liftIO $ getObject conn $ bucketKey r bucket k
- case res of
- Right o -> do
- liftIO $ L.writeFile d $ obj_data o
- return True
- Left e -> s3Warning e
+retrieve r k _f d = s3Action r False $ \(conn, bucket) ->
+ metered Nothing k $ \meterupdate -> do
+ res <- liftIO $ getObject conn $ bucketKey r bucket k
+ case res of
+ Right o -> do
+ liftIO $ meteredWriteFile meterupdate d $
+ obj_data o
+ return True
+ Left e -> s3Warning e
retrieveCheap :: Remote -> Key -> FilePath -> Annex Bool
retrieveCheap _ _ _ = return False
retrieveEncrypted :: Remote -> (Cipher, Key) -> Key -> FilePath -> Annex Bool
-retrieveEncrypted r (cipher, enck) _ f = s3Action r False $ \(conn, bucket) -> do
- res <- liftIO $ getObject conn $ bucketKey r bucket enck
- case res of
- Right o -> liftIO $
- withDecryptedContent cipher (return $ obj_data o) $ \content -> do
- L.writeFile f content
- return True
- Left e -> s3Warning e
+retrieveEncrypted r (cipher, enck) k d = s3Action r False $ \(conn, bucket) ->
+ metered Nothing k $ \meterupdate -> do
+ res <- liftIO $ getObject conn $ bucketKey r bucket enck
+ case res of
+ Right o -> liftIO $ decrypt cipher (\h -> meteredWrite meterupdate h $ obj_data o) $
+ readBytes $ \content -> do
+ L.writeFile d content
+ return True
+ Left e -> s3Warning e
remove :: Remote -> Key -> Annex Bool
remove r k = s3Action r False $ \(conn, bucket) -> do
@@ -257,93 +262,13 @@ s3ConnectionRequired c u =
maybe (error "Cannot connect to S3") return =<< s3Connection c u
s3Connection :: RemoteConfig -> UUID -> Annex (Maybe AWSConnection)
-s3Connection c u = do
- creds <- s3GetCreds c u
- case creds of
- Just (ak, sk) -> return $ Just $ AWSConnection host port ak sk
- _ -> do
- warning $ "Set both " ++ s3AccessKey ++ " and " ++ s3SecretKey ++ " to use S3"
- return Nothing
+s3Connection c u = go =<< getRemoteCredPair "S3" c (AWS.creds u)
where
+ go Nothing = return Nothing
+ go (Just (ak, sk)) = return $ Just $ AWSConnection host port ak sk
+
host = fromJust $ M.lookup "host" c
port = let s = fromJust $ M.lookup "port" c in
case reads s of
[(p, _)] -> p
_ -> error $ "bad S3 port value: " ++ s
-
-{- S3 creds come from the environment if set, otherwise from the cache
- - in gitAnnexCredsDir, or failing that, might be stored encrypted in
- - the remote's config. -}
-s3GetCreds :: RemoteConfig -> UUID -> Annex (Maybe (String, String))
-s3GetCreds c u = maybe fromcache (return . Just) =<< liftIO getenv
- where
- getenv = liftM2 (,)
- <$> get s3AccessKey
- <*> get s3SecretKey
- where
- get = catchMaybeIO . getEnv
- fromcache = do
- d <- fromRepo gitAnnexCredsDir
- let f = d </> fromUUID u
- v <- liftIO $ catchMaybeIO $ readFile f
- case lines <$> v of
- Just (ak:sk:[]) -> return $ Just (ak, sk)
- _ -> fromconfig
- fromconfig = do
- mcipher <- remoteCipher c
- case (M.lookup "s3creds" c, mcipher) of
- (Just s3creds, Just cipher) -> do
- creds <- liftIO $ decrypt s3creds cipher
- case creds of
- [ak, sk] -> do
- s3CacheCreds (ak, sk) u
- return $ Just (ak, sk)
- _ -> do error "bad s3creds"
- _ -> return Nothing
- decrypt s3creds cipher = lines
- <$> withDecryptedContent cipher
- (return $ L.pack $ fromB64 s3creds)
- (return . L.unpack)
-
-{- Stores S3 creds encrypted in the remote's config if possible to do so
- - securely, and otherwise locally in gitAnnexCredsDir. -}
-s3SetCreds :: RemoteConfig -> UUID -> Annex RemoteConfig
-s3SetCreds c u = do
- creds <- s3GetCreds c u
- case creds of
- Just (ak, sk) -> do
- mcipher <- remoteCipher c
- case mcipher of
- Just cipher | isTrustedCipher c -> do
- s <- liftIO $ withEncryptedContent cipher
- (return $ L.pack $ unlines [ak, sk])
- (return . L.unpack)
- return $ M.insert "s3creds" (toB64 s) c
- _ -> do
- s3CacheCreds (ak, sk) u
- return c
- _ -> return c
-
-{- The S3 creds are cached in gitAnnexCredsDir. -}
-s3CacheCreds :: (String, String) -> UUID -> Annex ()
-s3CacheCreds (ak, sk) u = do
- d <- fromRepo gitAnnexCredsDir
- createAnnexDirectory d
- liftIO $ do
- let f = d </> fromUUID u
- h <- openFile f WriteMode
- modifyFileMode f $ removeModes
- [groupReadMode, otherReadMode]
- hPutStr h $ unlines [ak, sk]
- hClose h
-
-{- Sets the S3 creds in the environment. -}
-s3SetCredsEnv :: (String, String) -> IO ()
-s3SetCredsEnv (ak, sk) = do
- setEnv s3AccessKey ak True
- setEnv s3SecretKey sk True
-
-s3AccessKey :: String
-s3AccessKey = "AWS_ACCESS_KEY_ID"
-s3SecretKey :: String
-s3SecretKey = "AWS_SECRET_ACCESS_KEY"
diff --git a/Remote/WebDAV.hs b/Remote/WebDAV.hs
new file mode 100644
index 0000000..b303dbe
--- /dev/null
+++ b/Remote/WebDAV.hs
@@ -0,0 +1,334 @@
+{- WebDAV remotes.
+ -
+ - Copyright 2012 Joey Hess <joey@kitenet.net>
+ -
+ - Licensed under the GNU GPL version 3 or higher.
+ -}
+
+{-# LANGUAGE ScopedTypeVariables #-}
+
+module Remote.WebDAV (remote, setCredsEnv) where
+
+import Network.Protocol.HTTP.DAV
+import qualified Data.Map as M
+import qualified Data.ByteString.UTF8 as B8
+import qualified Data.ByteString.Lazy.UTF8 as L8
+import qualified Data.ByteString.Lazy as L
+import qualified Data.Text.Lazy as LT
+import qualified Text.XML as XML
+import Network.URI (normalizePathSegments)
+import qualified Control.Exception as E
+import Network.HTTP.Conduit (HttpException(..))
+import Network.HTTP.Types
+import System.IO.Error
+
+import Common.Annex
+import Types.Remote
+import qualified Git
+import Config
+import Remote.Helper.Special
+import Remote.Helper.Encryptable
+import Remote.Helper.Chunked
+import Crypto
+import Creds
+import Meters
+
+type DavUrl = String
+type DavUser = B8.ByteString
+type DavPass = B8.ByteString
+
+remote :: RemoteType
+remote = RemoteType {
+ typename = "webdav",
+ enumerate = findSpecialRemotes "webdav",
+ generate = gen,
+ setup = webdavSetup
+}
+
+gen :: Git.Repo -> UUID -> Maybe RemoteConfig -> Annex Remote
+gen r u c = do
+ cst <- remoteCost r expensiveRemoteCost
+ return $ gen' r u c cst
+gen' :: Git.Repo -> UUID -> Maybe RemoteConfig -> Int -> Remote
+gen' r u c cst =
+ encryptableRemote c
+ (storeEncrypted this)
+ (retrieveEncrypted this)
+ this
+ where
+ this = Remote {
+ uuid = u,
+ cost = cst,
+ name = Git.repoDescribe r,
+ storeKey = store this,
+ retrieveKeyFile = retrieve this,
+ retrieveKeyFileCheap = retrieveCheap this,
+ removeKey = remove this,
+ hasKey = checkPresent this,
+ hasKeyCheap = False,
+ whereisKey = Nothing,
+ config = c,
+ repo = r,
+ localpath = Nothing,
+ readonly = False,
+ remotetype = remote
+ }
+
+webdavSetup :: UUID -> RemoteConfig -> Annex RemoteConfig
+webdavSetup u c = do
+ let url = fromMaybe (error "Specify url=") $
+ M.lookup "url" c
+ c' <- encryptionSetup c
+ creds <- getCreds c' u
+ testDav url creds
+ gitConfigSpecialRemote u c' "webdav" "true"
+ setRemoteCredPair c' (davCreds u)
+
+store :: Remote -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
+store r k _f p = metered (Just p) k $ \meterupdate ->
+ davAction r False $ \(baseurl, user, pass) -> do
+ f <- inRepo $ gitAnnexLocation k
+ liftIO $ withMeteredFile f meterupdate $
+ storeHelper r k baseurl user pass
+
+storeEncrypted :: Remote -> (Cipher, Key) -> Key -> MeterUpdate -> Annex Bool
+storeEncrypted r (cipher, enck) k p = metered (Just p) k $ \meterupdate ->
+ davAction r False $ \(baseurl, user, pass) -> do
+ f <- inRepo $ gitAnnexLocation k
+ liftIO $ encrypt cipher (streamMeteredFile f meterupdate) $
+ readBytes $ storeHelper r enck baseurl user pass
+
+storeHelper :: Remote -> Key -> DavUrl -> DavUser -> DavPass -> L.ByteString -> IO Bool
+storeHelper r k baseurl user pass b = catchBoolIO $ do
+ davMkdir tmpurl user pass
+ storeChunks k tmpurl keyurl chunksize storer recorder finalizer
+ where
+ tmpurl = tmpLocation baseurl k
+ keyurl = davLocation baseurl k
+ chunksize = chunkSize $ config r
+ storer urls = storeChunked chunksize urls storehttp b
+ recorder url s = storehttp url (L8.fromString s)
+ finalizer srcurl desturl = do
+ void $ catchMaybeHttp (deleteContent desturl user pass)
+ davMkdir (urlParent desturl) user pass
+ moveContent srcurl (B8.fromString desturl) user pass
+ storehttp url v = putContentAndProps url user pass
+ (noProps, (contentType, v))
+
+retrieveCheap :: Remote -> Key -> FilePath -> Annex Bool
+retrieveCheap _ _ _ = return False
+
+retrieve :: Remote -> Key -> AssociatedFile -> FilePath -> Annex Bool
+retrieve r k _f d = metered Nothing k $ \meterupdate ->
+ davAction r False $ \(baseurl, user, pass) -> liftIO $ catchBoolIO $
+ withStoredFiles r k baseurl user pass onerr $ \urls -> do
+ meteredWriteFileChunks meterupdate d urls $ \url -> do
+ mb <- davGetUrlContent url user pass
+ case mb of
+ Nothing -> throwIO "download failed"
+ Just b -> return b
+ return True
+ where
+ onerr _ = return False
+
+retrieveEncrypted :: Remote -> (Cipher, Key) -> Key -> FilePath -> Annex Bool
+retrieveEncrypted r (cipher, enck) k d = metered Nothing k $ \meterupdate ->
+ davAction r False $ \(baseurl, user, pass) -> liftIO $ catchBoolIO $
+ withStoredFiles r enck baseurl user pass onerr $ \urls -> do
+ decrypt cipher (feeder user pass urls) $
+ readBytes $ meteredWriteFile meterupdate d
+ return True
+ where
+ onerr _ = return False
+
+ feeder _ _ [] _ = noop
+ feeder user pass (url:urls) h = do
+ mb <- davGetUrlContent url user pass
+ case mb of
+ Nothing -> throwIO "download failed"
+ Just b -> do
+ L.hPut h b
+ feeder user pass urls h
+
+remove :: Remote -> Key -> Annex Bool
+remove r k = davAction r False $ \(baseurl, user, pass) -> liftIO $ do
+ -- Delete the key's whole directory, including any chunked
+ -- files, etc, in a single action.
+ let url = davLocation baseurl k
+ isJust <$> catchMaybeHttp (deleteContent url user pass)
+
+checkPresent :: Remote -> Key -> Annex (Either String Bool)
+checkPresent r k = davAction r noconn go
+ where
+ noconn = Left $ error $ name r ++ " not configured"
+
+ go (baseurl, user, pass) = do
+ showAction $ "checking " ++ name r
+ liftIO $ withStoredFiles r k baseurl user pass onerr check
+ where
+ check [] = return $ Right True
+ check (url:urls) = do
+ v <- davUrlExists url user pass
+ if v == Right True
+ then check urls
+ else return v
+
+ {- Failed to read the chunkcount file; see if it's missing,
+ - or if there's a problem accessing it,
+ - or perhaps this was an intermittent error. -}
+ onerr url = do
+ v <- davUrlExists url user pass
+ if v == Right True
+ then return $ Left $ "failed to read " ++ url
+ else return v
+
+withStoredFiles
+ :: Remote
+ -> Key
+ -> DavUrl
+ -> DavUser
+ -> DavPass
+ -> (DavUrl -> IO a)
+ -> ([DavUrl] -> IO a)
+ -> IO a
+withStoredFiles r k baseurl user pass onerr a
+ | isJust $ chunkSize $ config r = do
+ let chunkcount = keyurl ++ chunkCount
+ maybe (onerr chunkcount) (a . listChunks keyurl . L8.toString)
+ =<< davGetUrlContent chunkcount user pass
+ | otherwise = a [keyurl]
+ where
+ keyurl = davLocation baseurl k ++ keyFile k
+
+davAction :: Remote -> a -> ((DavUrl, DavUser, DavPass) -> Annex a) -> Annex a
+davAction r unconfigured action = case config r of
+ Nothing -> return unconfigured
+ Just c -> do
+ mcreds <- getCreds c (uuid r)
+ case (mcreds, M.lookup "url" c) of
+ (Just (user, pass), Just url) ->
+ action (url, toDavUser user, toDavPass pass)
+ _ -> return unconfigured
+
+toDavUser :: String -> DavUser
+toDavUser = B8.fromString
+
+toDavPass :: String -> DavPass
+toDavPass = B8.fromString
+
+{- The directory where files(s) for a key are stored. -}
+davLocation :: DavUrl -> Key -> DavUrl
+davLocation baseurl k = addTrailingPathSeparator $
+ davUrl baseurl $ hashDirLower k </> keyFile k
+
+{- Where we store temporary data for a key as it's being uploaded. -}
+tmpLocation :: DavUrl -> Key -> DavUrl
+tmpLocation baseurl k = addTrailingPathSeparator $
+ davUrl baseurl $ "tmp" </> keyFile k
+
+davUrl :: DavUrl -> FilePath -> DavUrl
+davUrl baseurl file = baseurl </> file
+
+davUrlExists :: DavUrl -> DavUser -> DavPass -> IO (Either String Bool)
+davUrlExists url user pass = decode <$> catchHttp (getProps url user pass)
+ where
+ decode (Right _) = Right True
+ decode (Left (Left (StatusCodeException status _)))
+ | statusCode status == statusCode notFound404 = Right False
+ decode (Left e) = Left $ showEitherException e
+
+davGetUrlContent :: DavUrl -> DavUser -> DavPass -> IO (Maybe L.ByteString)
+davGetUrlContent url user pass = fmap (snd . snd) <$>
+ catchMaybeHttp (getPropsAndContent url user pass)
+
+{- Creates a directory in WebDAV, if not already present; also creating
+ - any missing parent directories. -}
+davMkdir :: DavUrl -> DavUser -> DavPass -> IO ()
+davMkdir url user pass = go url
+ where
+ make u = makeCollection u user pass
+
+ go u = do
+ r <- E.try (make u) :: IO (Either E.SomeException Bool)
+ case r of
+ {- Parent directory is missing. Recurse to create
+ - it, and try once more to create the directory. -}
+ Right False -> do
+ go (urlParent u)
+ void $ make u
+ {- Directory created successfully -}
+ Right True -> return ()
+ {- Directory already exists, or some other error
+ - occurred. In the latter case, whatever wanted
+ - to use this directory will fail. -}
+ Left _ -> return ()
+
+{- Catches HTTP and IO exceptions. -}
+catchMaybeHttp :: IO a -> IO (Maybe a)
+catchMaybeHttp a = (Just <$> a) `E.catches`
+ [ E.Handler $ \(_e :: HttpException) -> return Nothing
+ , E.Handler $ \(_e :: E.IOException) -> return Nothing
+ ]
+
+{- Catches HTTP and IO exceptions -}
+catchHttp :: IO a -> IO (Either EitherException a)
+catchHttp a = (Right <$> a) `E.catches`
+ [ E.Handler $ \(e :: HttpException) -> return $ Left $ Left e
+ , E.Handler $ \(e :: E.IOException) -> return $ Left $ Right e
+ ]
+
+type EitherException = Either HttpException E.IOException
+
+showEitherException :: EitherException -> String
+showEitherException (Left (StatusCodeException status _)) = show $ statusMessage status
+showEitherException (Left httpexception) = show httpexception
+showEitherException (Right ioexception) = show ioexception
+
+throwIO :: String -> IO a
+throwIO msg = ioError $ mkIOError userErrorType msg Nothing Nothing
+
+urlParent :: DavUrl -> DavUrl
+urlParent url = dropTrailingPathSeparator $
+ normalizePathSegments (dropTrailingPathSeparator url ++ "/..")
+ where
+
+{- Test if a WebDAV store is usable, by writing to a test file, and then
+ - deleting the file. Exits with an IO error if not. -}
+testDav :: String -> Maybe CredPair -> Annex ()
+testDav baseurl (Just (u, p)) = do
+ showSideAction "testing WebDAV server"
+ liftIO $ either (throwIO . showEitherException) (const noop)
+ =<< catchHttp go
+ where
+ go = do
+ davMkdir baseurl user pass
+ putContentAndProps testurl user pass
+ (noProps, (contentType, L.empty))
+ deleteContent testurl user pass
+
+ user = toDavUser u
+ pass = toDavPass p
+ testurl = davUrl baseurl "git-annex-test"
+testDav _ Nothing = error "Need to configure webdav username and password."
+
+{- Content-Type to use for files uploaded to WebDAV. -}
+contentType :: Maybe B8.ByteString
+contentType = Just $ B8.fromString "application/octet-stream"
+
+{- The DAV library requires that properties be specified when storing a file.
+ - This just omits any real properties. -}
+noProps :: XML.Document
+noProps = XML.parseText_ XML.def $ LT.pack "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<propertyupdate/>"
+
+getCreds :: RemoteConfig -> UUID -> Annex (Maybe CredPair)
+getCreds c u = getRemoteCredPair "webdav" c (davCreds u)
+
+davCreds :: UUID -> CredPairStorage
+davCreds u = CredPairStorage
+ { credPairFile = fromUUID u
+ , credPairEnvironment = ("WEBDAV_USERNAME", "WEBDAV_PASSWORD")
+ , credPairRemoteKey = Just "davcreds"
+ }
+
+setCredsEnv :: (String, String) -> IO ()
+setCredsEnv creds = setEnvCredPair creds $ davCreds undefined
diff --git a/Seek.hs b/Seek.hs
index cd30986..959255c 100644
--- a/Seek.hs
+++ b/Seek.hs
@@ -22,8 +22,14 @@ import qualified Limit
import qualified Option
seekHelper :: ([FilePath] -> Git.Repo -> IO ([FilePath], IO Bool)) -> [FilePath] -> Annex [FilePath]
-seekHelper a params = inRepo $ \g ->
- runPreserveOrder (\fs -> Git.Command.leaveZombie <$> a fs g) params
+seekHelper a params = do
+ ll <- inRepo $ \g ->
+ runSegmentPaths (\fs -> Git.Command.leaveZombie <$> a fs g) params
+ {- Show warnings only for files/directories that do not exist. -}
+ forM_ (map fst $ filter (null . snd) $ zip params ll) $ \p ->
+ unlessM (liftIO $ doesFileExist p <||> doesDirectoryExist p) $
+ fileNotFound p
+ return $ concat ll
withFilesInGit :: (FilePath -> CommandStart) -> CommandSeek
withFilesInGit a params = prepFiltered a $ seekHelper LsFiles.inRepo params
@@ -34,7 +40,7 @@ withFilesNotInGit a params = do
files <- filter (not . dotfile) <$>
seekunless (null ps && not (null params)) ps
dotfiles <- seekunless (null dotps) dotps
- prepFiltered a $ return $ preserveOrder params (files++dotfiles)
+ prepFiltered a $ return $ concat $ segmentPaths params (files++dotfiles)
where
(dotps, ps) = partition dotfile params
seekunless True _ = return []
diff --git a/Types.hs b/Types.hs
index 4c16fb8..eb77826 100644
--- a/Types.hs
+++ b/Types.hs
@@ -12,7 +12,8 @@ module Types (
UUID(..),
Remote,
RemoteType,
- Option
+ Option,
+ MeterUpdate
) where
import Annex
@@ -21,6 +22,7 @@ import Types.Key
import Types.UUID
import Types.Remote
import Types.Option
+import Types.Meters
type Backend = BackendA Annex
type Remote = RemoteA Annex
diff --git a/Types/Messages.hs b/Types/Messages.hs
index 75653d5..4fcce79 100644
--- a/Types/Messages.hs
+++ b/Types/Messages.hs
@@ -7,14 +7,18 @@
module Types.Messages where
+import qualified Data.Set as S
+
data OutputType = NormalOutput | QuietOutput | JSONOutput
data SideActionBlock = NoBlock | StartBlock | InBlock
+ deriving (Eq)
data MessageState = MessageState
{ outputType :: OutputType
, sideActionBlock :: SideActionBlock
+ , fileNotFoundShown :: S.Set FilePath
}
defaultMessageState :: MessageState
-defaultMessageState = MessageState NormalOutput NoBlock
+defaultMessageState = MessageState NormalOutput NoBlock S.empty
diff --git a/Types/Meters.hs b/Types/Meters.hs
new file mode 100644
index 0000000..ef304d1
--- /dev/null
+++ b/Types/Meters.hs
@@ -0,0 +1,12 @@
+{- git-annex meter types
+ -
+ - Copyright 2012 Joey Hess <joey@kitenet.net>
+ -
+ - Licensed under the GNU GPL version 3 or higher.
+ -}
+
+module Types.Meters where
+
+{- An action that can be run repeatedly, feeding it the number of
+ - bytes sent or retrieved so far. -}
+type MeterUpdate = (Integer -> IO ())
diff --git a/Types/Remote.hs b/Types/Remote.hs
index d31d9a7..271676d 100644
--- a/Types/Remote.hs
+++ b/Types/Remote.hs
@@ -15,8 +15,10 @@ import Data.Ord
import qualified Git
import Types.Key
import Types.UUID
+import Types.Meters
-type RemoteConfig = M.Map String String
+type RemoteConfigKey = String
+type RemoteConfig = M.Map RemoteConfigKey String
{- There are different types of remotes. -}
data RemoteTypeA a = RemoteType {
@@ -36,10 +38,6 @@ instance Eq (RemoteTypeA a) where
{- A filename associated with a Key, for display to user. -}
type AssociatedFile = Maybe FilePath
-{- An action that can be run repeatedly, feeding it the number of
- - bytes sent or retrieved so far. -}
-type MeterUpdate = (Integer -> IO ())
-
{- An individual remote. -}
data RemoteA a = Remote {
-- each Remote has a unique uuid
diff --git a/Types/StandardGroups.hs b/Types/StandardGroups.hs
index 1739c20..c1ea1fd 100644
--- a/Types/StandardGroups.hs
+++ b/Types/StandardGroups.hs
@@ -7,31 +7,35 @@
module Types.StandardGroups where
-data StandardGroup = ClientGroup | TransferGroup | ArchiveGroup | BackupGroup
+data StandardGroup = ClientGroup | TransferGroup | BackupGroup | SmallArchiveGroup | FullArchiveGroup
deriving (Eq, Ord, Enum, Bounded, Show)
fromStandardGroup :: StandardGroup -> String
fromStandardGroup ClientGroup = "client"
fromStandardGroup TransferGroup = "transfer"
-fromStandardGroup ArchiveGroup = "archive"
fromStandardGroup BackupGroup = "backup"
+fromStandardGroup SmallArchiveGroup = "smallarchive"
+fromStandardGroup FullArchiveGroup = "archive"
toStandardGroup :: String -> Maybe StandardGroup
toStandardGroup "client" = Just ClientGroup
toStandardGroup "transfer" = Just TransferGroup
-toStandardGroup "archive" = Just ArchiveGroup
toStandardGroup "backup" = Just BackupGroup
+toStandardGroup "smallarchive" = Just SmallArchiveGroup
+toStandardGroup "archive" = Just FullArchiveGroup
toStandardGroup _ = Nothing
descStandardGroup :: StandardGroup -> String
descStandardGroup ClientGroup = "client: a repository on your computer"
descStandardGroup TransferGroup = "transfer: distributes files to clients"
-descStandardGroup ArchiveGroup = "archive: collects files that are not archived elsewhere"
-descStandardGroup BackupGroup = "backup: collects all files"
+descStandardGroup BackupGroup = "backup: backs up all files"
+descStandardGroup SmallArchiveGroup = "small archive: archives files located in \"archive\" directories"
+descStandardGroup FullArchiveGroup = "full archive: archives all files not archived elsewhere"
{- See doc/preferred_content.mdwn for explanations of these expressions. -}
preferredContent :: StandardGroup -> String
-preferredContent ClientGroup = "exclude=*/archive/*"
+preferredContent ClientGroup = "exclude=*/archive/* and exclude=archive/*"
preferredContent TransferGroup = "not (inallgroup=client and copies=client:2) and " ++ preferredContent ClientGroup
-preferredContent ArchiveGroup = "not copies=archive:1"
preferredContent BackupGroup = "" -- all content is preferred
+preferredContent SmallArchiveGroup = "(include=*/archive/* or include=archive/*) and " ++ preferredContent FullArchiveGroup
+preferredContent FullArchiveGroup = "not (copies=archive:1 or copies=smallarchive:1)"
diff --git a/Utility/Gpg.hs b/Utility/Gpg.hs
index b5cffb1..054e6ca 100644
--- a/Utility/Gpg.hs
+++ b/Utility/Gpg.hs
@@ -7,7 +7,6 @@
module Utility.Gpg where
-import qualified Data.ByteString.Lazy as L
import System.Posix.Types
import Control.Applicative
import Control.Concurrent
@@ -54,14 +53,15 @@ pipeStrict params input = do
hClose to
hGetContentsStrict from
-{- Runs gpg with some parameters, first feeding it a passphrase via
- - --passphrase-fd, then feeding it an input, and passing a handle
- - to its output to an action.
+{- Runs gpg with some parameters. First sends it a passphrase via
+ - --passphrase-fd. Then runs a feeder action that is passed a handle and
+ - should write to it all the data to input to gpg. Finally, runs
+ - a reader action that is passed a handle to gpg's output.
-
- Note that to avoid deadlock with the cleanup stage,
- - the action must fully consume gpg's input before returning. -}
-passphraseHandle :: [CommandParam] -> String -> IO L.ByteString -> (Handle -> IO a) -> IO a
-passphraseHandle params passphrase a b = do
+ - the reader must fully consume gpg's input before returning. -}
+feedRead :: [CommandParam] -> String -> (Handle -> IO ()) -> (Handle -> IO a) -> IO a
+feedRead params passphrase feeder reader = do
-- pipe the passphrase into gpg on a fd
(frompipe, topipe) <- createPipe
void $ forkIO $ do
@@ -77,9 +77,9 @@ passphraseHandle params passphrase a b = do
where
go (to, from) = do
void $ forkIO $ do
- L.hPut to =<< a
+ feeder to
hClose to
- b from
+ reader from
{- Finds gpg public keys matching some string. (Could be an email address,
- a key id, or a name. -}
diff --git a/Utility/OSX.hs b/Utility/OSX.hs
index cf4a6e8..769d073 100644
--- a/Utility/OSX.hs
+++ b/Utility/OSX.hs
@@ -38,6 +38,7 @@ genOSXAutoStartFile label command params = unlines
, unlines $ map (\v -> "<string>" ++ v ++ "</string>") (command:params)
, "</array>"
, "<key>RunAtLoad</key>"
+ , "<true/>"
, "</dict>"
, "</plist>"
]
diff --git a/Utility/Observed.hs b/Utility/Observed.hs
new file mode 100644
index 0000000..3ee9734
--- /dev/null
+++ b/Utility/Observed.hs
@@ -0,0 +1,43 @@
+module Utility.Observed where
+
+import qualified Data.ByteString.Lazy as L
+import qualified Data.ByteString as S
+import System.IO
+import System.IO.Unsafe
+import Foreign.Storable (Storable(sizeOf))
+
+{- This is like L.hGetContents, but after each chunk is read, an action
+ - is run to observe the size of the chunk.
+ -
+ - Note that the observer is run in unsafeInterleaveIO, which means that
+ - it can be run at any time. It's even possible for observers to run out
+ - of order, as different parts of the ByteString are consumed.
+ -
+ - All the usual caveats about using unsafeInterleaveIO apply to the observers,
+ - so use caution.
+ -}
+hGetContentsObserved :: Handle -> (Int -> IO ()) -> IO L.ByteString
+hGetContentsObserved h observe = lazyRead
+ where
+ lazyRead = unsafeInterleaveIO loop
+
+ loop = do
+ c <- S.hGetSome h defaultChunkSize
+ if S.null c
+ then do
+ hClose h
+ return $ L.empty
+ else do
+ observe $ S.length c
+ {- unsafeInterleaveIO causes this to be
+ - deferred until the data is read from the
+ - ByteString. -}
+ cs <- lazyRead
+ return $ L.append (L.fromChunks [c]) cs
+
+{- Same default chunk size Lazy ByteStrings use. -}
+defaultChunkSize :: Int
+defaultChunkSize = 32 * k - chunkOverhead
+ where
+ k = 1024
+ chunkOverhead = 2 * sizeOf (undefined :: Int) -- GHC specific
diff --git a/Utility/Path.hs b/Utility/Path.hs
index f4c2843..272d2e8 100644
--- a/Utility/Path.hs
+++ b/Utility/Path.hs
@@ -104,29 +104,25 @@ prop_relPathDirToFile_regressionTest = same_dir_shortcurcuits_at_difference
same_dir_shortcurcuits_at_difference =
relPathDirToFile "/tmp/r/lll/xxx/yyy/18" "/tmp/r/.git/annex/objects/18/gk/SHA256-foo/SHA256-foo" == "../../../../.git/annex/objects/18/gk/SHA256-foo/SHA256-foo"
-{- Given an original list of files, and an expanded list derived from it,
- - ensures that the original list's ordering is preserved.
- -
- - The input list may contain a directory, like "dir" or "dir/". Any
- - items in the expanded list that are contained in that directory will
- - appear at the same position as it did in the input list.
+{- Given an original list of paths, and an expanded list derived from it,
+ - generates a list of lists, where each sublist corresponds to one of the
+ - original paths. When the original path is a direcotry, any items
+ - in the expanded list that are contained in that directory will appear in
+ - its segment.
-}
-preserveOrder :: [FilePath] -> [FilePath] -> [FilePath]
-preserveOrder [] new = new
-preserveOrder [_] new = new -- optimisation
-preserveOrder (l:ls) new = found ++ preserveOrder ls rest
+segmentPaths :: [FilePath] -> [FilePath] -> [[FilePath]]
+segmentPaths [] new = [new]
+segmentPaths [_] new = [new] -- optimisation
+segmentPaths (l:ls) new = [found] ++ segmentPaths ls rest
where
(found, rest)=partition (l `dirContains`) new
-{- Runs an action that takes a list of FilePaths, and ensures that
- - its return list preserves order.
- -
- - This assumes that it's cheaper to call preserveOrder on the result,
- - than it would be to run the action separately with each param. In the case
- - of git file list commands, that assumption tends to hold.
+{- This assumes that it's cheaper to call segmentPaths on the result,
+ - than it would be to run the action separately with each path. In
+ - the case of git file list commands, that assumption tends to hold.
-}
-runPreserveOrder :: ([FilePath] -> IO [FilePath]) -> [FilePath] -> IO [FilePath]
-runPreserveOrder a files = preserveOrder files <$> a files
+runSegmentPaths :: ([FilePath] -> IO [FilePath]) -> [FilePath] -> IO [[FilePath]]
+runSegmentPaths a paths = segmentPaths paths <$> a paths
{- Converts paths in the home directory to use ~/ -}
relHome :: FilePath -> IO String
diff --git a/Utility/Process.hs b/Utility/Process.hs
index 68d5452..14d40f0 100644
--- a/Utility/Process.hs
+++ b/Utility/Process.hs
@@ -65,7 +65,7 @@ readProcessEnv cmd args environ =
, env = environ
}
-{- Writes a string to a process on its stdout,
+{- Writes a string to a process on its stdin,
- returns its output, and also allows specifying the environment.
-}
writeReadProcessEnv
diff --git a/Utility/TempFile.hs b/Utility/TempFile.hs
index 60feb74..688baa4 100644
--- a/Utility/TempFile.hs
+++ b/Utility/TempFile.hs
@@ -32,27 +32,27 @@ type Template = String
{- Runs an action with a temp file, then removes the file. -}
withTempFile :: Template -> (FilePath -> Handle -> IO a) -> IO a
withTempFile template a = bracket create remove use
- where
- create = do
- tmpdir <- catchDefaultIO "." getTemporaryDirectory
- openTempFile tmpdir template
- remove (name, handle) = do
- hClose handle
- catchBoolIO (removeFile name >> return True)
- use (name, handle) = a name handle
+ where
+ create = do
+ tmpdir <- catchDefaultIO "." getTemporaryDirectory
+ openTempFile tmpdir template
+ remove (name, handle) = do
+ hClose handle
+ catchBoolIO (removeFile name >> return True)
+ use (name, handle) = a name handle
{- Runs an action with a temp directory, then removes the directory and
- all its contents. -}
withTempDir :: Template -> (FilePath -> IO a) -> IO a
withTempDir template = bracket create remove
- where
- remove = removeDirectoryRecursive
- create = do
- tmpdir <- catchDefaultIO "." getTemporaryDirectory
- createDirectoryIfMissing True tmpdir
- pid <- getProcessID
- makedir tmpdir (template ++ show pid) (0 :: Int)
- makedir tmpdir t n = do
- let dir = tmpdir </> t ++ "." ++ show n
- r <- tryIO $ createDirectory dir
- either (const $ makedir tmpdir t $ n + 1) (const $ return dir) r
+ where
+ remove = removeDirectoryRecursive
+ create = do
+ tmpdir <- catchDefaultIO "." getTemporaryDirectory
+ createDirectoryIfMissing True tmpdir
+ pid <- getProcessID
+ makedir tmpdir (template ++ show pid) (0 :: Int)
+ makedir tmpdir t n = do
+ let dir = tmpdir </> t ++ "." ++ show n
+ r <- tryIO $ createDirectory dir
+ either (const $ makedir tmpdir t $ n + 1) (const $ return dir) r
diff --git a/Utility/ThreadScheduler.hs b/Utility/ThreadScheduler.hs
index 5e165c9..6557398 100644
--- a/Utility/ThreadScheduler.hs
+++ b/Utility/ThreadScheduler.hs
@@ -11,8 +11,6 @@ module Utility.ThreadScheduler where
import Common
import Control.Concurrent
-import Control.Exception
-import Control.Concurrent.Async
import System.Posix.Terminal
import System.Posix.Signals
@@ -46,19 +44,6 @@ unboundDelay time = do
threadDelay $ fromInteger maxWait
when (maxWait /= time) $ unboundDelay (time - maxWait)
-{- Runs an action until a timeout is reached. If it fails to complete in
- - time, or throws an exception, returns a Left value.
- -
- - Note that if the action runs an unsafe foreign call, the signal to
- - cancel it may not arrive until the call returns. -}
-runTimeout :: Seconds -> IO a -> IO (Either SomeException a)
-runTimeout secs a = do
- runner <- async a
- controller <- async $ do
- threadDelaySeconds secs
- cancel runner
- cancel controller `after` waitCatch runner
-
{- Pauses the main thread, letting children run until program termination. -}
waitForTermination :: IO ()
waitForTermination = do
diff --git a/debian/changelog b/debian/changelog
index c05d529..b24b102 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,33 @@
+git-annex (3.20121126) unstable; urgency=low
+
+ * New webdav and Amazon glacier special remotes.
+ * Display a warning when a non-existing file or directory is specified.
+ * webapp: Added configurator for Box.com.
+ * webapp: Show error messages to user when testing XMPP creds.
+ * Fix build of assistant without yesod.
+ * webapp: The list of repositiories refreshes when new repositories are
+ added, including when new repository configurations are pushed in from
+ remotes.
+ * OSX: Fix RunAtLoad value in plist file.
+ * Getting a file from chunked directory special remotes no longer buffers
+ it all in memory.
+ * S3: Added progress display for uploading and downloading.
+ * directory special remote: Made more efficient and robust.
+ * Bugfix: directory special remote could loop forever storing a key
+ when a too small chunksize was configured.
+ * Allow controlling whether login credentials for S3 and webdav are
+ committed to the repository, by setting embedcreds=yes|no when running
+ initremote.
+ * Added smallarchive repository group, that only archives files that are
+ in archive directories. Used by default for glacier when set up in the
+ webapp.
+ * assistant: Fixed handling of toplevel archive directory and
+ client repository group.
+ * assistant: Apply preferred content settings when a new symlink
+ is created, or a symlink gets renamed. Made archive directories work.
+
+ -- Joey Hess <joeyh@debian.org> Mon, 26 Nov 2012 11:37:49 -0400
+
git-annex (3.20121112) unstable; urgency=low
* assistant: Can use XMPP to notify other nodes about pushes made to other
diff --git a/debian/control b/debian/control
index 00ab576..03723de 100644
--- a/debian/control
+++ b/debian/control
@@ -4,6 +4,7 @@ Priority: optional
Build-Depends:
debhelper (>= 9),
ghc (>= 7.4),
+ libghc-mtl-dev (>= 2.1.1),
libghc-missingh-dev,
libghc-hslogger-dev,
libghc-pcre-light-dev,
@@ -12,6 +13,7 @@ Build-Depends:
libghc-http-dev,
libghc-utf8-string-dev,
libghc-hs3-dev (>= 0.5.6),
+ libghc-dav-dev (>= 0.2),
libghc-testpack-dev,
libghc-quickcheck2-dev,
libghc-monad-control-dev (>= 0.3),
@@ -39,10 +41,9 @@ Build-Depends:
libghc-network-multicast-dev,
libghc-network-info-dev,
libghc-safesemaphore-dev,
- libghc-network-protocol-xmpp-dev (>= 0.4.3-2),
+ libghc-network-protocol-xmpp-dev (>= 0.4.3-1+b1),
libghc-gnutls-dev (>= 0.1.4),
libghc-xml-types-dev,
- libghc-async-dev,
ikiwiki,
perlmagick,
git,
diff --git a/doc/assistant.mdwn b/doc/assistant.mdwn
index c451d45..0032698 100644
--- a/doc/assistant.mdwn
+++ b/doc/assistant.mdwn
@@ -32,9 +32,11 @@ interface to add repositories and control the git-annex assistant.
## documentation
* Want to make two nearby computers share the same synchronised folder?
- Follow the [[local_pairing_walkthrough]].
+ Follow the [[pairing_walkthrough]].
* Want to share a synchronised folder with a friend?
Follow the [[share_with_a_friend_walkthrough]].
+* Want to archive data to a drive or the cloud?
+ Follow the [[archival_walkthrough]]
## command line startup
diff --git a/doc/assistant/archival_walkthrough.mdwn b/doc/assistant/archival_walkthrough.mdwn
new file mode 100644
index 0000000..0e3ddcc
--- /dev/null
+++ b/doc/assistant/archival_walkthrough.mdwn
@@ -0,0 +1,28 @@
+Normally, the git-annex assistant makes your files be available
+wherever you use it, and so a copy of each file is stored in each repository.
+That's perfect for files you're using right now, but what about files you're
+not using any more?
+
+You could just delete those files, but it's better to archive them, so
+you can access them later. All you need to get started archiving your old
+files is a USB drive, or an [Amazon Glacier](http://aws.amazon.com/glacier/)
+account.
+
+The webapp makes it easy to make a repository on either a USB drive,
+or on Amazon Glacier. Once the repository is created, be sure to
+put it in either the small archive, or full archive repository group.
+
+[[!img repogroup.png]]
+
+Now when you're done with a file, just move it into a directory named
+"archive". The assistant will notice you put it there, and next time it
+has the opportunity (when you plug in the USB drive, or when it can
+talk to Amazon Glacier over the network), will move the file's
+content to your archive repository.
+
+You'll no longer be able to open the file once it's been archived.
+If you later want to access it, you can just copy or move it out
+of the archive directory, and the assistant will retrieve its
+content from the archive.
+
+Note that retrieving data from Amazon Glacier takes 4 to 5 hours.
diff --git a/doc/assistant/release_notes.mdwn b/doc/assistant/release_notes.mdwn
index 270897f..db2a7c9 100644
--- a/doc/assistant/release_notes.mdwn
+++ b/doc/assistant/release_notes.mdwn
@@ -1,4 +1,4 @@
-## version 3.20121012
+## version 3.20121112
This is a major upgrade of the git-annex assistant, which is still in beta.
diff --git a/doc/assistant/repogroup.png b/doc/assistant/repogroup.png
new file mode 100644
index 0000000..ac5aabd
--- /dev/null
+++ b/doc/assistant/repogroup.png
Binary files differ
diff --git a/doc/bugs/3.20121112:_build_error_in_assistant.mdwn b/doc/bugs/3.20121112:_build_error_in_assistant.mdwn
new file mode 100644
index 0000000..de11dfb
--- /dev/null
+++ b/doc/bugs/3.20121112:_build_error_in_assistant.mdwn
@@ -0,0 +1,432 @@
+Git-annex stopped compiling with GHC 7.4.2 after updating Yesod and friends to the respective latest version. The complete build log is attached below. I hope this helps. Further build logs are available at <http://hydra.nixos.org/job/nixpkgs/trunk/gitAndTools.gitAnnex>, too.
+
+ building
+ make flags: PREFIX=/nix/store/9az61h33v1j6fkdmwdfy7gi0rhspsb9k-git-annex-3.20121112
+ building Build/SysConfig.hs
+ ghc -O2 -Wall -outputdir tmp -IUtility -DWITH_ASSISTANT -DWITH_S3 -DWITH_WEBAPP -DWITH_PAIRING -DWITH_XMPP -DWITH_DNS -DWITH_INOTIFY -DWITH_DBUS -threaded --make configure
+ [1 of 7] Compiling Utility.Exception ( Utility/Exception.hs, tmp/Utility/Exception.o )
+ [2 of 7] Compiling Utility.Misc ( Utility/Misc.hs, tmp/Utility/Misc.o )
+ [3 of 7] Compiling Utility.Process ( Utility/Process.hs, tmp/Utility/Process.o )
+ [4 of 7] Compiling Utility.SafeCommand ( Utility/SafeCommand.hs, tmp/Utility/SafeCommand.o )
+ [5 of 7] Compiling Build.TestConfig ( Build/TestConfig.hs, tmp/Build/TestConfig.o )
+ [6 of 7] Compiling Build.Configure ( Build/Configure.hs, tmp/Build/Configure.o )
+ [7 of 7] Compiling Main ( configure.hs, tmp/Main.o )
+ Linking configure ...
+ ./configure
+ checking version... 3.20121112
+ checking git... yes
+ checking git version... 1.8.0
+ checking cp -a... yes
+ checking cp -p... yes
+ checking cp --reflink=auto... yes
+ checking uuid generator... uuidgen
+ checking xargs -0... yes
+ checking rsync... yes
+ checking curl... yes
+ checking wget... no
+ checking bup... no
+ checking gpg... no
+ checking lsof... no
+ checking ssh connection caching... yes
+ checking sha1... sha1sum
+ checking sha256... sha256sum
+ checking sha512... sha512sum
+ checking sha224... sha224sum
+ checking sha384... sha384sum
+ building Utility/Touch.hs
+ hsc2hs Utility/Touch.hsc
+ building Utility/Mounts.hs
+ hsc2hs Utility/Mounts.hsc
+ building Utility/libdiskfree.o
+ cc -Wall -c -o Utility/libdiskfree.o Utility/libdiskfree.c
+ building Utility/libmounts.o
+ cc -Wall -c -o Utility/libmounts.o Utility/libmounts.c
+ building git-annex
+ install -d tmp
+ ghc -O2 -Wall -outputdir tmp -IUtility -DWITH_ASSISTANT -DWITH_S3 -DWITH_WEBAPP -DWITH_PAIRING -DWITH_XMPP -DWITH_DNS -DWITH_INOTIFY -DWITH_DBUS -threaded --make git-annex -o tmp/git-annex Utility/libdiskfree.o Utility/libmounts.o
+ [ 1 of 279] Compiling Utility.Dot ( Utility/Dot.hs, tmp/Utility/Dot.o )
+ [ 2 of 279] Compiling Utility.ThreadLock ( Utility/ThreadLock.hs, tmp/Utility/ThreadLock.o )
+ [ 3 of 279] Compiling Utility.Mounts ( Utility/Mounts.hs, tmp/Utility/Mounts.o )
+ [ 4 of 279] Compiling Utility.Yesod ( Utility/Yesod.hs, tmp/Utility/Yesod.o )
+ [ 5 of 279] Compiling Utility.Tense ( Utility/Tense.hs, tmp/Utility/Tense.o )
+ [ 6 of 279] Compiling Utility.Verifiable ( Utility/Verifiable.hs, tmp/Utility/Verifiable.o )
+ [ 7 of 279] Compiling Assistant.Types.TransferSlots ( Assistant/Types/TransferSlots.hs, tmp/Assistant/Types/TransferSlots.o )
+ [ 8 of 279] Compiling Types.StandardGroups ( Types/StandardGroups.hs, tmp/Types/StandardGroups.o )
+ [ 9 of 279] Compiling Utility.Percentage ( Utility/Percentage.hs, tmp/Utility/Percentage.o )
+ [ 10 of 279] Compiling Utility.Base64 ( Utility/Base64.hs, tmp/Utility/Base64.o )
+ [ 11 of 279] Compiling Utility.DataUnits ( Utility/DataUnits.hs, tmp/Utility/DataUnits.o )
+ [ 12 of 279] Compiling Utility.JSONStream ( Utility/JSONStream.hs, tmp/Utility/JSONStream.o )
+ [ 13 of 279] Compiling Messages.JSON ( Messages/JSON.hs, tmp/Messages/JSON.o )
+ [ 14 of 279] Compiling Build.SysConfig ( Build/SysConfig.hs, tmp/Build/SysConfig.o )
+ [ 15 of 279] Compiling Types.KeySource ( Types/KeySource.hs, tmp/Types/KeySource.o )
+ [ 16 of 279] Compiling Utility.State ( Utility/State.hs, tmp/Utility/State.o )
+ [ 17 of 279] Compiling Types.UUID ( Types/UUID.hs, tmp/Types/UUID.o )
+ [ 18 of 279] Compiling Types.Messages ( Types/Messages.hs, tmp/Types/Messages.o )
+ [ 19 of 279] Compiling Types.Group ( Types/Group.hs, tmp/Types/Group.o )
+ [ 20 of 279] Compiling Types.TrustLevel ( Types/TrustLevel.hs, tmp/Types/TrustLevel.o )
+ [ 21 of 279] Compiling Types.BranchState ( Types/BranchState.hs, tmp/Types/BranchState.o )
+ [ 22 of 279] Compiling Utility.PartialPrelude ( Utility/PartialPrelude.hs, tmp/Utility/PartialPrelude.o )
+ [ 23 of 279] Compiling Utility.HumanTime ( Utility/HumanTime.hs, tmp/Utility/HumanTime.o )
+ [ 24 of 279] Compiling Utility.Format ( Utility/Format.hs, tmp/Utility/Format.o )
+ [ 25 of 279] Compiling Utility.FileSystemEncoding ( Utility/FileSystemEncoding.hs, tmp/Utility/FileSystemEncoding.o )
+ [ 26 of 279] Compiling Utility.Touch ( Utility/Touch.hs, tmp/Utility/Touch.o )
+ [ 27 of 279] Compiling Utility.Applicative ( Utility/Applicative.hs, tmp/Utility/Applicative.o )
+ [ 28 of 279] Compiling Utility.Monad ( Utility/Monad.hs, tmp/Utility/Monad.o )
+ [ 29 of 279] Compiling Utility.Exception ( Utility/Exception.hs, tmp/Utility/Exception.o )
+ [ 30 of 279] Compiling Utility.DBus ( Utility/DBus.hs, tmp/Utility/DBus.o )
+ [ 31 of 279] Compiling Utility.Misc ( Utility/Misc.hs, tmp/Utility/Misc.o )
+ [ 32 of 279] Compiling Utility.Process ( Utility/Process.hs, tmp/Utility/Process.o )
+ [ 33 of 279] Compiling Utility.SafeCommand ( Utility/SafeCommand.hs, tmp/Utility/SafeCommand.o )
+ [ 34 of 279] Compiling Utility.Network ( Utility/Network.hs, tmp/Utility/Network.o )
+ [ 35 of 279] Compiling Utility.SRV ( Utility/SRV.hs, tmp/Utility/SRV.o )
+
+ Utility/SRV.hs:88:1: Warning: Defined but not used: `lookupSRVHost'
+
+ Utility/SRV.hs:94:1: Warning: Defined but not used: `parseSrvHost'
+ [ 36 of 279] Compiling Git.Types ( Git/Types.hs, tmp/Git/Types.o )
+ [ 37 of 279] Compiling Utility.UserInfo ( Utility/UserInfo.hs, tmp/Utility/UserInfo.o )
+ [ 38 of 279] Compiling Utility.Path ( Utility/Path.hs, tmp/Utility/Path.o )
+ [ 39 of 279] Compiling Utility.TempFile ( Utility/TempFile.hs, tmp/Utility/TempFile.o )
+ [ 40 of 279] Compiling Utility.Directory ( Utility/Directory.hs, tmp/Utility/Directory.o )
+ [ 41 of 279] Compiling Utility.FreeDesktop ( Utility/FreeDesktop.hs, tmp/Utility/FreeDesktop.o )
+ [ 42 of 279] Compiling Assistant.Install.AutoStart ( Assistant/Install/AutoStart.hs, tmp/Assistant/Install/AutoStart.o )
+ [ 43 of 279] Compiling Common ( Common.hs, tmp/Common.o )
+ [ 44 of 279] Compiling Utility.FileMode ( Utility/FileMode.hs, tmp/Utility/FileMode.o )
+ [ 45 of 279] Compiling Git ( Git.hs, tmp/Git.o )
+ [ 46 of 279] Compiling Git.FilePath ( Git/FilePath.hs, tmp/Git/FilePath.o )
+ [ 47 of 279] Compiling Utility.Matcher ( Utility/Matcher.hs, tmp/Utility/Matcher.o )
+ [ 48 of 279] Compiling Utility.Gpg ( Utility/Gpg.hs, tmp/Utility/Gpg.o )
+ [ 49 of 279] Compiling Types.Crypto ( Types/Crypto.hs, tmp/Types/Crypto.o )
+ [ 50 of 279] Compiling Types.Key ( Types/Key.hs, tmp/Types/Key.o )
+ [ 51 of 279] Compiling Types.Backend ( Types/Backend.hs, tmp/Types/Backend.o )
+ [ 52 of 279] Compiling Types.Remote ( Types/Remote.hs, tmp/Types/Remote.o )
+ [ 53 of 279] Compiling Git.Sha ( Git/Sha.hs, tmp/Git/Sha.o )
+ [ 54 of 279] Compiling Utility.CoProcess ( Utility/CoProcess.hs, tmp/Utility/CoProcess.o )
+ [ 55 of 279] Compiling Git.Command ( Git/Command.hs, tmp/Git/Command.o )
+ [ 56 of 279] Compiling Git.Ref ( Git/Ref.hs, tmp/Git/Ref.o )
+ [ 57 of 279] Compiling Git.Branch ( Git/Branch.hs, tmp/Git/Branch.o )
+ [ 58 of 279] Compiling Git.UpdateIndex ( Git/UpdateIndex.hs, tmp/Git/UpdateIndex.o )
+ [ 59 of 279] Compiling Git.Queue ( Git/Queue.hs, tmp/Git/Queue.o )
+ [ 60 of 279] Compiling Git.HashObject ( Git/HashObject.hs, tmp/Git/HashObject.o )
+ [ 61 of 279] Compiling Git.CatFile ( Git/CatFile.hs, tmp/Git/CatFile.o )
+ [ 62 of 279] Compiling Git.UnionMerge ( Git/UnionMerge.hs, tmp/Git/UnionMerge.o )
+ [ 63 of 279] Compiling Git.Url ( Git/Url.hs, tmp/Git/Url.o )
+ [ 64 of 279] Compiling Git.Construct ( Git/Construct.hs, tmp/Git/Construct.o )
+ [ 65 of 279] Compiling Git.Config ( Git/Config.hs, tmp/Git/Config.o )
+ [ 66 of 279] Compiling Git.SharedRepository ( Git/SharedRepository.hs, tmp/Git/SharedRepository.o )
+ [ 67 of 279] Compiling Git.Version ( Git/Version.hs, tmp/Git/Version.o )
+ [ 68 of 279] Compiling Git.CheckAttr ( Git/CheckAttr.hs, tmp/Git/CheckAttr.o )
+ [ 69 of 279] Compiling Annex ( Annex.hs, tmp/Annex.o )
+ [ 70 of 279] Compiling Types.Option ( Types/Option.hs, tmp/Types/Option.o )
+ [ 71 of 279] Compiling Types ( Types.hs, tmp/Types.o )
+ [ 72 of 279] Compiling Messages ( Messages.hs, tmp/Messages.o )
+ [ 73 of 279] Compiling Types.Command ( Types/Command.hs, tmp/Types/Command.o )
+ [ 74 of 279] Compiling Locations ( Locations.hs, tmp/Locations.o )
+ [ 75 of 279] Compiling Common.Annex ( Common/Annex.hs, tmp/Common/Annex.o )
+ [ 76 of 279] Compiling Fields ( Fields.hs, tmp/Fields.o )
+ [ 77 of 279] Compiling Annex.BranchState ( Annex/BranchState.hs, tmp/Annex/BranchState.o )
+ [ 78 of 279] Compiling Annex.CatFile ( Annex/CatFile.hs, tmp/Annex/CatFile.o )
+ [ 79 of 279] Compiling Annex.Perms ( Annex/Perms.hs, tmp/Annex/Perms.o )
+ [ 80 of 279] Compiling Crypto ( Crypto.hs, tmp/Crypto.o )
+ [ 81 of 279] Compiling Annex.Exception ( Annex/Exception.hs, tmp/Annex/Exception.o )
+ [ 82 of 279] Compiling Annex.Journal ( Annex/Journal.hs, tmp/Annex/Journal.o )
+ [ 83 of 279] Compiling Annex.Branch ( Annex/Branch.hs, tmp/Annex/Branch.o )
+ [ 84 of 279] Compiling Usage ( Usage.hs, tmp/Usage.o )
+ [ 85 of 279] Compiling Annex.CheckAttr ( Annex/CheckAttr.hs, tmp/Annex/CheckAttr.o )
+ [ 86 of 279] Compiling Remote.Helper.Special ( Remote/Helper/Special.hs, tmp/Remote/Helper/Special.o )
+ [ 87 of 279] Compiling Logs.Presence ( Logs/Presence.hs, tmp/Logs/Presence.o )
+ [ 88 of 279] Compiling Logs.Location ( Logs/Location.hs, tmp/Logs/Location.o )
+ [ 89 of 279] Compiling Logs.Web ( Logs/Web.hs, tmp/Logs/Web.o )
+ [ 90 of 279] Compiling Annex.LockPool ( Annex/LockPool.hs, tmp/Annex/LockPool.o )
+ [ 91 of 279] Compiling Logs.Transfer ( Logs/Transfer.hs, tmp/Logs/Transfer.o )
+ [ 92 of 279] Compiling Backend.SHA ( Backend/SHA.hs, tmp/Backend/SHA.o )
+ [ 93 of 279] Compiling Backend.WORM ( Backend/WORM.hs, tmp/Backend/WORM.o )
+ [ 94 of 279] Compiling Backend.URL ( Backend/URL.hs, tmp/Backend/URL.o )
+ [ 95 of 279] Compiling Assistant.Types.ScanRemotes ( Assistant/Types/ScanRemotes.hs, tmp/Assistant/Types/ScanRemotes.o )
+ [ 96 of 279] Compiling Assistant.Types.ThreadedMonad ( Assistant/Types/ThreadedMonad.hs, tmp/Assistant/Types/ThreadedMonad.o )
+ [ 97 of 279] Compiling Assistant.Types.TransferQueue ( Assistant/Types/TransferQueue.hs, tmp/Assistant/Types/TransferQueue.o )
+ [ 98 of 279] Compiling Assistant.Types.Pushes ( Assistant/Types/Pushes.hs, tmp/Assistant/Types/Pushes.o )
+ [ 99 of 279] Compiling Assistant.Types.BranchChange ( Assistant/Types/BranchChange.hs, tmp/Assistant/Types/BranchChange.o )
+ [100 of 279] Compiling Logs.UUIDBased ( Logs/UUIDBased.hs, tmp/Logs/UUIDBased.o )
+ [101 of 279] Compiling Logs.Remote ( Logs/Remote.hs, tmp/Logs/Remote.o )
+ [102 of 279] Compiling Logs.Group ( Logs/Group.hs, tmp/Logs/Group.o )
+ [103 of 279] Compiling Utility.DiskFree ( Utility/DiskFree.hs, tmp/Utility/DiskFree.o )
+ [104 of 279] Compiling Utility.Url ( Utility/Url.hs, tmp/Utility/Url.o )
+ [105 of 279] Compiling Utility.CopyFile ( Utility/CopyFile.hs, tmp/Utility/CopyFile.o )
+ [106 of 279] Compiling Utility.Rsync ( Utility/Rsync.hs, tmp/Utility/Rsync.o )
+ [107 of 279] Compiling Git.LsFiles ( Git/LsFiles.hs, tmp/Git/LsFiles.o )
+ [108 of 279] Compiling Git.AutoCorrect ( Git/AutoCorrect.hs, tmp/Git/AutoCorrect.o )
+ [109 of 279] Compiling Git.CurrentRepo ( Git/CurrentRepo.hs, tmp/Git/CurrentRepo.o )
+ [110 of 279] Compiling Locations.UserConfig ( Locations/UserConfig.hs, tmp/Locations/UserConfig.o )
+ [111 of 279] Compiling Utility.ThreadScheduler ( Utility/ThreadScheduler.hs, tmp/Utility/ThreadScheduler.o )
+ [112 of 279] Compiling Git.Merge ( Git/Merge.hs, tmp/Git/Merge.o )
+ [113 of 279] Compiling Utility.Parallel ( Utility/Parallel.hs, tmp/Utility/Parallel.o )
+ [114 of 279] Compiling Git.Remote ( Git/Remote.hs, tmp/Git/Remote.o )
+ [115 of 279] Compiling Assistant.Ssh ( Assistant/Ssh.hs, tmp/Assistant/Ssh.o )
+ [116 of 279] Compiling Assistant.Pairing ( Assistant/Pairing.hs, tmp/Assistant/Pairing.o )
+ [117 of 279] Compiling Assistant.Types.NetMessager ( Assistant/Types/NetMessager.hs, tmp/Assistant/Types/NetMessager.o )
+ [118 of 279] Compiling Utility.NotificationBroadcaster ( Utility/NotificationBroadcaster.hs, tmp/Utility/NotificationBroadcaster.o )
+ [119 of 279] Compiling Assistant.Types.Buddies ( Assistant/Types/Buddies.hs, tmp/Assistant/Types/Buddies.o )
+ [120 of 279] Compiling Utility.TSet ( Utility/TSet.hs, tmp/Utility/TSet.o )
+ [121 of 279] Compiling Assistant.Types.Commits ( Assistant/Types/Commits.hs, tmp/Assistant/Types/Commits.o )
+ [122 of 279] Compiling Assistant.Types.Changes ( Assistant/Types/Changes.hs, tmp/Assistant/Types/Changes.o )
+ [123 of 279] Compiling Utility.WebApp ( Utility/WebApp.hs, tmp/Utility/WebApp.o )
+ [124 of 279] Compiling Utility.Daemon ( Utility/Daemon.hs, tmp/Utility/Daemon.o )
+ [125 of 279] Compiling Utility.LogFile ( Utility/LogFile.hs, tmp/Utility/LogFile.o )
+ [126 of 279] Compiling Git.Filename ( Git/Filename.hs, tmp/Git/Filename.o )
+ [127 of 279] Compiling Git.LsTree ( Git/LsTree.hs, tmp/Git/LsTree.o )
+ [128 of 279] Compiling Utility.Types.DirWatcher ( Utility/Types/DirWatcher.hs, tmp/Utility/Types/DirWatcher.o )
+ [129 of 279] Compiling Utility.INotify ( Utility/INotify.hs, tmp/Utility/INotify.o )
+ [130 of 279] Compiling Utility.DirWatcher ( Utility/DirWatcher.hs, tmp/Utility/DirWatcher.o )
+ [131 of 279] Compiling Utility.Lsof ( Utility/Lsof.hs, tmp/Utility/Lsof.o )
+ [132 of 279] Compiling Config ( Config.hs, tmp/Config.o )
+ [133 of 279] Compiling Annex.UUID ( Annex/UUID.hs, tmp/Annex/UUID.o )
+ [134 of 279] Compiling Logs.UUID ( Logs/UUID.hs, tmp/Logs/UUID.o )
+ [135 of 279] Compiling Backend ( Backend.hs, tmp/Backend.o )
+ [136 of 279] Compiling Remote.Helper.Hooks ( Remote/Helper/Hooks.hs, tmp/Remote/Helper/Hooks.o )
+ [137 of 279] Compiling Remote.Helper.Encryptable ( Remote/Helper/Encryptable.hs, tmp/Remote/Helper/Encryptable.o )
+ [138 of 279] Compiling Annex.Queue ( Annex/Queue.hs, tmp/Annex/Queue.o )
+ [139 of 279] Compiling Annex.Content ( Annex/Content.hs, tmp/Annex/Content.o )
+ [140 of 279] Compiling Remote.S3 ( Remote/S3.hs, tmp/Remote/S3.o )
+ [141 of 279] Compiling Remote.Directory ( Remote/Directory.hs, tmp/Remote/Directory.o )
+ [142 of 279] Compiling Remote.Rsync ( Remote/Rsync.hs, tmp/Remote/Rsync.o )
+ [143 of 279] Compiling Remote.Web ( Remote/Web.hs, tmp/Remote/Web.o )
+ [144 of 279] Compiling Remote.Hook ( Remote/Hook.hs, tmp/Remote/Hook.o )
+ [145 of 279] Compiling Upgrade.V2 ( Upgrade/V2.hs, tmp/Upgrade/V2.o )
+ [146 of 279] Compiling Annex.Ssh ( Annex/Ssh.hs, tmp/Annex/Ssh.o )
+ [147 of 279] Compiling Remote.Helper.Ssh ( Remote/Helper/Ssh.hs, tmp/Remote/Helper/Ssh.o )
+ [148 of 279] Compiling Remote.Bup ( Remote/Bup.hs, tmp/Remote/Bup.o )
+ [149 of 279] Compiling Annex.Version ( Annex/Version.hs, tmp/Annex/Version.o )
+ [150 of 279] Compiling Init ( Init.hs, tmp/Init.o )
+ [151 of 279] Compiling Checks ( Checks.hs, tmp/Checks.o )
+ [152 of 279] Compiling Remote.Git ( Remote/Git.hs, tmp/Remote/Git.o )
+ [153 of 279] Compiling Remote.List ( Remote/List.hs, tmp/Remote/List.o )
+ [154 of 279] Compiling Logs.Trust ( Logs/Trust.hs, tmp/Logs/Trust.o )
+ [155 of 279] Compiling Remote ( Remote.hs, tmp/Remote.o )
+ [156 of 279] Compiling Assistant.Alert ( Assistant/Alert.hs, tmp/Assistant/Alert.o )
+ Loading package ghc-prim ... linking ... done.
+ Loading package integer-gmp ... linking ... done.
+ Loading package base ... linking ... done.
+ Loading object (static) Utility/libdiskfree.o ... done
+ Loading object (static) Utility/libmounts.o ... done
+ final link ... done
+ Loading package pretty-1.1.1.0 ... linking ... done.
+ Loading package filepath-1.3.0.0 ... linking ... done.
+ Loading package old-locale-1.0.0.4 ... linking ... done.
+ Loading package old-time-1.1.0.0 ... linking ... done.
+ Loading package bytestring-0.9.2.1 ... linking ... done.
+ Loading package unix-2.5.1.0 ... linking ... done.
+ Loading package directory-1.1.0.2 ... linking ... done.
+ Loading package process-1.1.0.1 ... linking ... done.
+ Loading package array-0.4.0.0 ... linking ... done.
+ Loading package deepseq-1.3.0.0 ... linking ... done.
+ Loading package time-1.4 ... linking ... done.
+ Loading package containers-0.4.2.1 ... linking ... done.
+ Loading package text-0.11.2.0 ... linking ... done.
+ Loading package blaze-builder-0.3.1.0 ... linking ... done.
+ Loading package blaze-markup-0.5.1.1 ... linking ... done.
+ Loading package blaze-html-0.5.1.0 ... linking ... done.
+ Loading package hashable-1.1.2.5 ... linking ... done.
+ Loading package case-insensitive-0.4.0.3 ... linking ... done.
+ Loading package primitive-0.5.0.1 ... linking ... done.
+ Loading package vector-0.10.0.1 ... linking ... done.
+ Loading package random-1.0.1.1 ... linking ... done.
+ Loading package dlist-0.5 ... linking ... done.
+ Loading package data-default-0.5.0 ... linking ... done.
+ Loading package transformers-0.3.0.0 ... linking ... done.
+ Loading package mtl-2.1.1 ... linking ... done.
+ Loading package parsec-3.1.2 ... linking ... done.
+ Loading package network-2.3.0.13 ... linking ... done.
+ Loading package failure-0.2.0.1 ... linking ... done.
+ Loading package template-haskell ... linking ... done.
+ Loading package shakespeare-1.0.2 ... linking ... done.
+ Loading package hamlet-1.1.1.1 ... linking ... done.
+ Loading package http-types-0.7.3.0.1 ... linking ... done.
+ Loading package base-unicode-symbols-0.2.2.4 ... linking ... done.
+ Loading package transformers-base-0.4.1 ... linking ... done.
+ Loading package monad-control-0.3.1.4 ... linking ... done.
+ Loading package lifted-base-0.2 ... linking ... done.
+ Loading package resourcet-0.4.3 ... linking ... done.
+ Loading package semigroups-0.8.4.1 ... linking ... done.
+ Loading package void-0.5.8 ... linking ... done.
+ Loading package conduit-0.5.4.1 ... linking ... done.
+ Loading package unordered-containers-0.2.2.1 ... linking ... done.
+ Loading package vault-0.2.0.1 ... linking ... done.
+ Loading package wai-1.3.0.1 ... linking ... done.
+ Loading package date-cache-0.3.0 ... linking ... done.
+ Loading package unix-time-0.1.2 ... linking ... done.
+ Loading package fast-logger-0.3.1 ... linking ... done.
+ Loading package attoparsec-0.10.2.0 ... linking ... done.
+ Loading package cookie-0.4.0.1 ... linking ... done.
+ Loading package shakespeare-css-1.0.2 ... linking ... done.
+ Loading package syb-0.3.6.1 ... linking ... done.
+ Loading package aeson-0.6.0.2 ... linking ... done.
+ Loading package shakespeare-js-1.1.0 ... linking ... done.
+ Loading package ansi-terminal-0.5.5 ... linking ... done.
+ Loading package blaze-builder-conduit-0.5.0.2 ... linking ... done.
+ Loading package stringsearch-0.3.6.4 ... linking ... done.
+ Loading package byteorder-1.0.3 ... linking ... done.
+ Loading package wai-logger-0.3.0 ... linking ... done.
+ Loading package zlib-0.5.3.3 ... linking ... done.
+ Loading package zlib-bindings-0.1.1.1 ... linking ... done.
+ Loading package zlib-conduit-0.5.0.2 ... linking ... done.
+ Loading package wai-extra-1.3.0.4 ... linking ... done.
+ Loading package monad-logger-0.2.1 ... linking ... done.
+ Loading package cereal-0.3.5.2 ... linking ... done.
+ Loading package base64-bytestring-1.0.0.0 ... linking ... done.
+ Loading package cipher-aes-0.1.2 ... linking ... done.
+ Loading package entropy-0.2.1 ... linking ... done.
+ Loading package largeword-1.0.3 ... linking ... done.
+ Loading package tagged-0.4.4 ... linking ... done.
+ Loading package crypto-api-0.10.2 ... linking ... done.
+ Loading package cpu-0.1.1 ... linking ... done.
+ Loading package crypto-pubkey-types-0.1.1 ... linking ... done.
+ Loading package cryptocipher-0.3.5 ... linking ... done.
+ Loading package cprng-aes-0.2.4 ... linking ... done.
+ Loading package skein-0.1.0.9 ... linking ... done.
+ Loading package clientsession-0.8.0.1 ... linking ... done.
+ Loading package path-pieces-0.1.2 ... linking ... done.
+ Loading package shakespeare-i18n-1.0.0.2 ... linking ... done.
+ Loading package yesod-routes-1.1.1.1 ... linking ... done.
+ Loading package yesod-core-1.1.5 ... linking ... done.
+ [157 of 279] Compiling Assistant.Types.DaemonStatus ( Assistant/Types/DaemonStatus.hs, tmp/Assistant/Types/DaemonStatus.o )
+ [158 of 279] Compiling Assistant.Monad ( Assistant/Monad.hs, tmp/Assistant/Monad.o )
+ [159 of 279] Compiling Assistant.Types.NamedThread ( Assistant/Types/NamedThread.hs, tmp/Assistant/Types/NamedThread.o )
+ [160 of 279] Compiling Assistant.Common ( Assistant/Common.hs, tmp/Assistant/Common.o )
+ [161 of 279] Compiling Assistant.XMPP ( Assistant/XMPP.hs, tmp/Assistant/XMPP.o )
+ [162 of 279] Compiling Assistant.XMPP.Buddies ( Assistant/XMPP/Buddies.hs, tmp/Assistant/XMPP/Buddies.o )
+ [163 of 279] Compiling Assistant.NetMessager ( Assistant/NetMessager.hs, tmp/Assistant/NetMessager.o )
+
+ Assistant/NetMessager.hs:12:1:
+ Warning: The import of `Types.Remote' is redundant
+ except perhaps to import instances from `Types.Remote'
+ To import instances alone, use: import Types.Remote()
+
+ Assistant/NetMessager.hs:13:1:
+ Warning: The import of `Git' is redundant
+ except perhaps to import instances from `Git'
+ To import instances alone, use: import Git()
+
+ Assistant/NetMessager.hs:20:1:
+ Warning: The import of `Data.Text' is redundant
+ except perhaps to import instances from `Data.Text'
+ To import instances alone, use: import Data.Text()
+ [164 of 279] Compiling Assistant.Pushes ( Assistant/Pushes.hs, tmp/Assistant/Pushes.o )
+ [165 of 279] Compiling Assistant.ScanRemotes ( Assistant/ScanRemotes.hs, tmp/Assistant/ScanRemotes.o )
+ [166 of 279] Compiling Assistant.Install ( Assistant/Install.hs, tmp/Assistant/Install.o )
+ [167 of 279] Compiling Assistant.XMPP.Client ( Assistant/XMPP/Client.hs, tmp/Assistant/XMPP/Client.o )
+ [168 of 279] Compiling Assistant.Commits ( Assistant/Commits.hs, tmp/Assistant/Commits.o )
+ [169 of 279] Compiling Assistant.BranchChange ( Assistant/BranchChange.hs, tmp/Assistant/BranchChange.o )
+ [170 of 279] Compiling Assistant.Changes ( Assistant/Changes.hs, tmp/Assistant/Changes.o )
+ [171 of 279] Compiling Assistant.WebApp.Types ( Assistant/WebApp/Types.hs, tmp/Assistant/WebApp/Types.o )
+ Loading package unix-compat-0.4.0.0 ... linking ... done.
+ Loading package file-embed-0.0.4.6 ... linking ... done.
+ Loading package system-filepath-0.4.7 ... linking ... done.
+ Loading package system-fileio-0.3.10 ... linking ... done.
+ Loading package cryptohash-0.7.8 ... linking ... done.
+ Loading package crypto-conduit-0.4.0.1 ... linking ... done.
+ Loading package http-date-0.0.2 ... linking ... done.
+ Loading package mime-types-0.1.0.0 ... linking ... done.
+ Loading package wai-app-static-1.3.0.4 ... linking ... done.
+ Loading package yesod-static-1.1.1.1 ... linking ... done.
+ [172 of 279] Compiling Assistant.WebApp ( Assistant/WebApp.hs, tmp/Assistant/WebApp.o )
+ Loading package network-conduit-0.6.1.1 ... linking ... done.
+ Loading package safe-0.3.3 ... linking ... done.
+ Loading package simple-sendfile-0.2.8 ... linking ... done.
+ Loading package warp-1.3.4.4 ... linking ... done.
+ Loading package yaml-0.8.1 ... linking ... done.
+ Loading package yesod-default-1.1.2 ... linking ... done.
+ [173 of 279] Compiling Assistant.WebApp.OtherRepos ( Assistant/WebApp/OtherRepos.hs, tmp/Assistant/WebApp/OtherRepos.o )
+ [174 of 279] Compiling Limit ( Limit.hs, tmp/Limit.o )
+ [175 of 279] Compiling Option ( Option.hs, tmp/Option.o )
+ [176 of 279] Compiling Seek ( Seek.hs, tmp/Seek.o )
+ [177 of 279] Compiling Command ( Command.hs, tmp/Command.o )
+ [178 of 279] Compiling CmdLine ( CmdLine.hs, tmp/CmdLine.o )
+ [179 of 279] Compiling Command.ConfigList ( Command/ConfigList.hs, tmp/Command/ConfigList.o )
+ [180 of 279] Compiling Command.InAnnex ( Command/InAnnex.hs, tmp/Command/InAnnex.o )
+ [181 of 279] Compiling Command.DropKey ( Command/DropKey.hs, tmp/Command/DropKey.o )
+ [182 of 279] Compiling Command.SendKey ( Command/SendKey.hs, tmp/Command/SendKey.o )
+ [183 of 279] Compiling Command.RecvKey ( Command/RecvKey.hs, tmp/Command/RecvKey.o )
+ [184 of 279] Compiling Command.TransferInfo ( Command/TransferInfo.hs, tmp/Command/TransferInfo.o )
+ [185 of 279] Compiling Command.Commit ( Command/Commit.hs, tmp/Command/Commit.o )
+ [186 of 279] Compiling Command.Add ( Command/Add.hs, tmp/Command/Add.o )
+ [187 of 279] Compiling Command.Unannex ( Command/Unannex.hs, tmp/Command/Unannex.o )
+ [188 of 279] Compiling Command.FromKey ( Command/FromKey.hs, tmp/Command/FromKey.o )
+ [189 of 279] Compiling Command.ReKey ( Command/ReKey.hs, tmp/Command/ReKey.o )
+ [190 of 279] Compiling Command.Fix ( Command/Fix.hs, tmp/Command/Fix.o )
+ [191 of 279] Compiling Command.Describe ( Command/Describe.hs, tmp/Command/Describe.o )
+ [192 of 279] Compiling Command.InitRemote ( Command/InitRemote.hs, tmp/Command/InitRemote.o )
+ [193 of 279] Compiling Command.Unlock ( Command/Unlock.hs, tmp/Command/Unlock.o )
+ [194 of 279] Compiling Command.Lock ( Command/Lock.hs, tmp/Command/Lock.o )
+ [195 of 279] Compiling Command.PreCommit ( Command/PreCommit.hs, tmp/Command/PreCommit.o )
+ [196 of 279] Compiling Command.Log ( Command/Log.hs, tmp/Command/Log.o )
+ [197 of 279] Compiling Command.Merge ( Command/Merge.hs, tmp/Command/Merge.o )
+ [198 of 279] Compiling Command.Group ( Command/Group.hs, tmp/Command/Group.o )
+ [199 of 279] Compiling Command.Ungroup ( Command/Ungroup.hs, tmp/Command/Ungroup.o )
+ [200 of 279] Compiling Command.Import ( Command/Import.hs, tmp/Command/Import.o )
+ [201 of 279] Compiling Logs.Unused ( Logs/Unused.hs, tmp/Logs/Unused.o )
+ [202 of 279] Compiling Command.AddUnused ( Command/AddUnused.hs, tmp/Command/AddUnused.o )
+ [203 of 279] Compiling Command.Find ( Command/Find.hs, tmp/Command/Find.o )
+ [204 of 279] Compiling Logs.PreferredContent ( Logs/PreferredContent.hs, tmp/Logs/PreferredContent.o )
+ [205 of 279] Compiling Annex.Wanted ( Annex/Wanted.hs, tmp/Annex/Wanted.o )
+ [206 of 279] Compiling Command.Whereis ( Command/Whereis.hs, tmp/Command/Whereis.o )
+ [207 of 279] Compiling Command.Trust ( Command/Trust.hs, tmp/Command/Trust.o )
+ [208 of 279] Compiling Command.Untrust ( Command/Untrust.hs, tmp/Command/Untrust.o )
+ [209 of 279] Compiling Command.Semitrust ( Command/Semitrust.hs, tmp/Command/Semitrust.o )
+ [210 of 279] Compiling Command.Dead ( Command/Dead.hs, tmp/Command/Dead.o )
+ [211 of 279] Compiling Command.Vicfg ( Command/Vicfg.hs, tmp/Command/Vicfg.o )
+ [212 of 279] Compiling Command.Map ( Command/Map.hs, tmp/Command/Map.o )
+ [213 of 279] Compiling Command.Init ( Command/Init.hs, tmp/Command/Init.o )
+ [214 of 279] Compiling Command.Uninit ( Command/Uninit.hs, tmp/Command/Uninit.o )
+ [215 of 279] Compiling Command.Version ( Command/Version.hs, tmp/Command/Version.o )
+ [216 of 279] Compiling Upgrade.V1 ( Upgrade/V1.hs, tmp/Upgrade/V1.o )
+ [217 of 279] Compiling Upgrade.V0 ( Upgrade/V0.hs, tmp/Upgrade/V0.o )
+ [218 of 279] Compiling Upgrade ( Upgrade.hs, tmp/Upgrade.o )
+ [219 of 279] Compiling Command.Upgrade ( Command/Upgrade.hs, tmp/Command/Upgrade.o )
+ [220 of 279] Compiling Command.Drop ( Command/Drop.hs, tmp/Command/Drop.o )
+ [221 of 279] Compiling Command.Move ( Command/Move.hs, tmp/Command/Move.o )
+ [222 of 279] Compiling Command.Copy ( Command/Copy.hs, tmp/Command/Copy.o )
+ [223 of 279] Compiling Command.Get ( Command/Get.hs, tmp/Command/Get.o )
+ [224 of 279] Compiling Command.TransferKey ( Command/TransferKey.hs, tmp/Command/TransferKey.o )
+ [225 of 279] Compiling Command.DropUnused ( Command/DropUnused.hs, tmp/Command/DropUnused.o )
+ [226 of 279] Compiling Command.Fsck ( Command/Fsck.hs, tmp/Command/Fsck.o )
+ [227 of 279] Compiling Command.Reinject ( Command/Reinject.hs, tmp/Command/Reinject.o )
+ [228 of 279] Compiling Command.Migrate ( Command/Migrate.hs, tmp/Command/Migrate.o )
+ [229 of 279] Compiling Command.Unused ( Command/Unused.hs, tmp/Command/Unused.o )
+ [230 of 279] Compiling Command.Status ( Command/Status.hs, tmp/Command/Status.o )
+ [231 of 279] Compiling Command.Sync ( Command/Sync.hs, tmp/Command/Sync.o )
+ [232 of 279] Compiling Command.Help ( Command/Help.hs, tmp/Command/Help.o )
+ [233 of 279] Compiling Command.AddUrl ( Command/AddUrl.hs, tmp/Command/AddUrl.o )
+ [234 of 279] Compiling Assistant.DaemonStatus ( Assistant/DaemonStatus.hs, tmp/Assistant/DaemonStatus.o )
+ [235 of 279] Compiling Assistant.Sync ( Assistant/Sync.hs, tmp/Assistant/Sync.o )
+ [236 of 279] Compiling Assistant.MakeRemote ( Assistant/MakeRemote.hs, tmp/Assistant/MakeRemote.o )
+ [237 of 279] Compiling Assistant.XMPP.Git ( Assistant/XMPP/Git.hs, tmp/Assistant/XMPP/Git.o )
+ [238 of 279] Compiling Command.XMPPGit ( Command/XMPPGit.hs, tmp/Command/XMPPGit.o )
+ [239 of 279] Compiling Assistant.Threads.NetWatcher ( Assistant/Threads/NetWatcher.hs, tmp/Assistant/Threads/NetWatcher.o )
+ [240 of 279] Compiling Assistant.NamedThread ( Assistant/NamedThread.hs, tmp/Assistant/NamedThread.o )
+ [241 of 279] Compiling Assistant.WebApp.Notifications ( Assistant/WebApp/Notifications.hs, tmp/Assistant/WebApp/Notifications.o )
+
+ Assistant/WebApp/Notifications.hs:39:11:
+ No instances for (Text.Julius.ToJavascript String,
+ Text.Julius.ToJavascript Text)
+ arising from a use of `Text.Julius.toJavascript'
+ Possible fix:
+ add instance declarations for
+ (Text.Julius.ToJavascript String, Text.Julius.ToJavascript Text)
+ In the first argument of `Text.Julius.Javascript', namely
+ `Text.Julius.toJavascript delay'
+ In the expression:
+ Text.Julius.Javascript (Text.Julius.toJavascript delay)
+ In the first argument of `Data.Monoid.mconcat', namely
+ `[Text.Julius.Javascript
+ ((Data.Text.Lazy.Builder.fromText . Text.Shakespeare.pack')
+ "function longpoll_"),
+ Text.Julius.Javascript (Text.Julius.toJavascript ident),
+ Text.Julius.Javascript
+ ((Data.Text.Lazy.Builder.fromText . Text.Shakespeare.pack')
+ "() {\
+ \\tlongpoll(longpoll_"),
+ Text.Julius.Javascript (Text.Julius.toJavascript ident), ....]'
+ make: *** [git-annex] Error 1
+
+> Reproduced this and confirmed it's fixed in git. --[[Joey]] [[done]]
diff --git a/doc/bugs/3.20121112_build_fails_on_Ubuntu_12.04.mdwn b/doc/bugs/3.20121112_build_fails_on_Ubuntu_12.04.mdwn
new file mode 100644
index 0000000..cd08976
--- /dev/null
+++ b/doc/bugs/3.20121112_build_fails_on_Ubuntu_12.04.mdwn
@@ -0,0 +1,97 @@
+What steps will reproduce the problem?
+
+* Start with Ubuntu 12.04
+* sudo apt-get install haskell-platform libgsasl7-dev gsasl g2hs
+* cabal install git-annex --bindir=$HOME/bin
+
+What is the expected output? What do you see instead?
+
+Expected omething like "installation successful"
+
+Actual output, after build notices:
+
+
+Loading package IfElse-0.85 ... linking ... done.
+Loading object (static) dist/build/git-annex/git-annex-tmp/Utility/libdiskfree.o ... done
+Loading object (static) dist/build/git-annex/git-annex-tmp/Utility/libmounts.o ... done
+final link ... done
+[157 of 279] Compiling Assistant.Types.DaemonStatus ( Assistant/Types/DaemonStatus.hs, dist/build/git-annex/git-annex-tmp/Assistant/Types/DaemonStatus.o )
+[158 of 279] Compiling Assistant.Monad ( Assistant/Monad.hs, dist/build/git-annex/git-annex-tmp/Assistant/Monad.o )
+
+Assistant/Monad.hs:86:16:
+ Couldn't match expected type `Assistant a'
+ with actual type `Reader AssistantData a'
+ Expected type: (AssistantData -> a) -> Assistant a
+ Actual type: (AssistantData -> a) -> Reader AssistantData a
+ In the expression: reader
+ In an equation for `getAssistant': getAssistant = reader
+
+Assistant/Monad.hs:93:15:
+ Couldn't match expected type `Assistant t0'
+ with actual type `Reader r0 a0'
+ In the return type of a call of `reader'
+ In a stmt of a 'do' block: st <- reader threadState
+ In the expression:
+ do { st <- reader threadState;
+ liftIO $ runThreadState st a }
+
+Assistant/Monad.hs:99:14:
+ Couldn't match expected type `Assistant t0'
+ with actual type `Reader r0 a0'
+ In the return type of a call of `reader'
+ In a stmt of a 'do' block: d <- reader id
+ In the expression:
+ do { d <- reader id;
+ liftIO $ io $ runAssistant d a }
+
+Assistant/Monad.hs:105:14:
+ Couldn't match expected type `Assistant t0'
+ with actual type `Reader r0 a0'
+ In the return type of a call of `reader'
+ In a stmt of a 'do' block: d <- reader id
+ In the expression:
+ do { d <- reader id;
+ return $ runAssistant d a }
+
+Assistant/Monad.hs:110:14:
+ Couldn't match expected type `Assistant t0'
+ with actual type `Reader r0 a0'
+ In the return type of a call of `reader'
+ In a stmt of a 'do' block: d <- reader id
+ In the expression:
+ do { d <- reader id;
+ return $ \ v -> runAssistant d $ a v }
+
+Assistant/Monad.hs:115:14:
+ Couldn't match expected type `Assistant t0'
+ with actual type `Reader r0 a0'
+ In the return type of a call of `reader'
+ In a stmt of a 'do' block: d <- reader id
+ In the expression:
+ do { d <- reader id;
+ return $ \ v1 v2 -> runAssistant d (a v1 v2) }
+
+Assistant/Monad.hs:120:12:
+ Couldn't match expected type `Assistant a0'
+ with actual type `Reader r0 a1'
+ In the return type of a call of `reader'
+ In the first argument of `(>>=)', namely `reader v'
+ In the expression: reader v >>= liftIO . io
+cabal: Error: some packages failed to install:
+git-annex-3.20121112 failed during the building phase. The exception was:
+ExitFailure 1
+
+
+What version of git-annex are you using? On what operating system?
+
+git annex 3.20121112
+Ubuntu 12.04 (current "long term support", all packages up to date)
+
+Please provide any additional information below.
+
+No idea how important this is for git-annex in general but reporting in case it is. Thank you for working on git annex!
+
+> I was able to reproduce this build error when I force installed
+> an old version of the haskell mtl library. So git-annex needs version
+> 2.1.1 to build, and I have adjusted the build dependencies appropriately.
+> [[done]] --[[Joey]]
diff --git a/doc/bugs/3.20121113_build_error___39__not_in_scope_getAddBoxComR__39__.mdwn b/doc/bugs/3.20121113_build_error___39__not_in_scope_getAddBoxComR__39__.mdwn
new file mode 100644
index 0000000..59ca6b5
--- /dev/null
+++ b/doc/bugs/3.20121113_build_error___39__not_in_scope_getAddBoxComR__39__.mdwn
@@ -0,0 +1,33 @@
+What steps will reproduce the problem?
+
+Building from latest source, Cabal update, cabal install --only dependencies, cabal configure, Cabal build
+
+What is the expected output? What do you see instead?
+
+Error message from build
+
+...
+
+Loading package DAV-0.2 ... linking ... done.
+
+Loading object (static) dist/build/git-annex/git-annex-tmp/Utility/libdiskfree.o ... done
+
+Loading object (static) dist/build/git-annex/git-annex-tmp/Utility/libmounts.o ... done
+
+final link ... done
+
+
+Assistant/Threads/WebApp.hs:47:1: Not in scope: `getAddBoxComR'
+
+Assistant/Threads/WebApp.hs:47:1: Not in scope: `getEnableWebDAVR'
+
+
+What version of git-annex are you using? On what operating system?
+
+Latest version via git from git-annex.branchable.com
+
+Debian Squeeze (6.0.6)
+
+Please provide any additional information below.
+
+> I noticed this earlier and fixed it. [[done]] --[[Joey]]
diff --git a/doc/bugs/Detection_assumes_that_shell_is_bash.mdwn b/doc/bugs/Detection_assumes_that_shell_is_bash.mdwn
index 23298bf..46b159e 100644
--- a/doc/bugs/Detection_assumes_that_shell_is_bash.mdwn
+++ b/doc/bugs/Detection_assumes_that_shell_is_bash.mdwn
@@ -16,3 +16,5 @@ git-annex version: 3.20121017
###Please provide any additional information below.###
Not everyone has bash as there login-shell.
+
+[[!tag /design/assistant]]
diff --git a/doc/bugs/Install_of_git-annex-3.20121112_fails.mdwn b/doc/bugs/Install_of_git-annex-3.20121112_fails.mdwn
new file mode 100644
index 0000000..f4f1c6c
--- /dev/null
+++ b/doc/bugs/Install_of_git-annex-3.20121112_fails.mdwn
@@ -0,0 +1,20 @@
+What steps will reproduce the problem?
+
+- rm -rf ~/.ghc/ && cabal update && cabal install git-annex --bindir=$HOME/bin
+
+What is the expected output? What do you see instead?
+
+- I would like to have the latest release installed
+
+What version of git-annex are you using? On what operating system?
+
+- git-annex-3.20121112
+- Ubuntu 12.04 LTS
+- The Glorious Glasgow Haskell Compilation System, version 7.4.1
+
+Please provide any additional information below.
+
+I use it heavily on 4 machines since a month and I really like it.
+
+> closing since this is a cabal library problem, and not something that
+> can be fixed by any change to git-annex. [[done]] --[[Joey]]
diff --git a/doc/bugs/Is_there_any_way_to_rate_limit_uploads_to_an_S3_backend__63__.mdwn b/doc/bugs/Is_there_any_way_to_rate_limit_uploads_to_an_S3_backend__63__.mdwn
new file mode 100644
index 0000000..64826c0
--- /dev/null
+++ b/doc/bugs/Is_there_any_way_to_rate_limit_uploads_to_an_S3_backend__63__.mdwn
@@ -0,0 +1,19 @@
+What steps will reproduce the problem?
+
+Adding files to a local annex set up to sync to a remote S3 one
+
+
+What is the expected output? What do you see instead?
+
+It syncs, but maxes out the uplink
+
+
+What version of git-annex are you using? On what operating system?
+
+3.20121112 on Debian testing
+
+
+Please provide any additional information below.
+
+The man page lists how to configure rate limiting for rsync, not sure how to do it for this
+
diff --git a/doc/bugs/another_build_error_in_assistant.mdwn b/doc/bugs/another_build_error_in_assistant.mdwn
new file mode 100644
index 0000000..c21f1ac
--- /dev/null
+++ b/doc/bugs/another_build_error_in_assistant.mdwn
@@ -0,0 +1,79 @@
+What steps will reproduce the problem?
+Just trying to install git-annex last release (20121120) from cabal or from bundled sources
+
+What is the expected output? What do you see instead?
+Build stop like this :
+(doing cabal install or build from the bundle)
+...
+[161 of 284] Compiling Assistant.Alert ( Assistant/Alert.hs, dist/build/git-annex/git-annex-tmp/Assistant/Alert.o )
+[162 of 284] Compiling Assistant.Types.DaemonStatus ( Assistant/Types/DaemonStatus.hs, dist/build/git-annex/git-annex-tmp/Assistant/Types/DaemonStatus.o )
+[163 of 284] Compiling Assistant.Monad ( Assistant/Monad.hs, dist/build/git-annex/git-annex-tmp/Assistant/Monad.o )
+
+Assistant/Monad.hs:86:16:
+ Couldn't match expected type `Assistant a'
+ with actual type `Reader AssistantData a'
+ Expected type: (AssistantData -> a) -> Assistant a
+ Actual type: (AssistantData -> a) -> Reader AssistantData a
+ In the expression: reader
+ In an equation for `getAssistant': getAssistant = reader
+
+Assistant/Monad.hs:93:15:
+ Couldn't match expected type `Assistant t0'
+ with actual type `Reader r0 a0'
+ In the return type of a call of `reader'
+ In a stmt of a 'do' block: st <- reader threadState
+ In the expression:
+ do { st <- reader threadState;
+ liftIO $ runThreadState st a }
+
+Assistant/Monad.hs:99:14:
+ Couldn't match expected type `Assistant t0'
+ with actual type `Reader r0 a0'
+ In the return type of a call of `reader'
+ In a stmt of a 'do' block: d <- reader id
+ In the expression:
+ do { d <- reader id;
+ liftIO $ io $ runAssistant d a }
+
+Assistant/Monad.hs:105:14:
+ Couldn't match expected type `Assistant t0'
+ with actual type `Reader r0 a0'
+ In the return type of a call of `reader'
+ In a stmt of a 'do' block: d <- reader id
+ In the expression:
+ do { d <- reader id;
+ return $ runAssistant d a }
+
+Assistant/Monad.hs:110:14:
+ Couldn't match expected type `Assistant t0'
+ with actual type `Reader r0 a0'
+ In the return type of a call of `reader'
+ In a stmt of a 'do' block: d <- reader id
+ In the expression:
+ do { d <- reader id;
+ return $ \ v -> runAssistant d $ a v }
+
+Assistant/Monad.hs:115:14:
+ Couldn't match expected type `Assistant t0'
+ with actual type `Reader r0 a0'
+ In the return type of a call of `reader'
+ In a stmt of a 'do' block: d <- reader id
+ In the expression:
+ do { d <- reader id;
+ return $ \ v1 v2 -> runAssistant d (a v1 v2) }
+
+Assistant/Monad.hs:120:12:
+ Couldn't match expected type `Assistant a0'
+ with actual type `Reader r0 a1'
+ In the return type of a call of `reader'
+ In the first argument of `(>>=)', namely `reader v'
+ In the expression: reader v >>= liftIO . io
+
+
+
+
+What version of git-annex are you using? On what operating system?
+- version 3.20121112
+- Ubuntu 12.04 LTS, 64 bits
+
+> Dup of [[3.20121112_build_fails_on_Ubuntu_12.04]]. --[[Joey]] [[done]]
diff --git a/doc/bugs/com.branchable.git-annex.assistant.plist_is_invalid.mdwn b/doc/bugs/com.branchable.git-annex.assistant.plist_is_invalid.mdwn
new file mode 100644
index 0000000..d67cdcd
--- /dev/null
+++ b/doc/bugs/com.branchable.git-annex.assistant.plist_is_invalid.mdwn
@@ -0,0 +1,15 @@
+What steps will reproduce the problem?
+`cat com.branchable.git-annex.assistant`
+
+What version of git-annex are you using? On what operating system?
+ git-annex version: 3.20121112 on OS X Mountain Lion
+
+Please provide any additional information below.
+The '`RunAtLoad`' key is missing a value.
+
+It should say:
+
+`<key>RunAtLoad</key>`<br>
+`<true/>`
+
+> Fixed in git. [[done]] --[[Joey]]
diff --git a/doc/bugs/dropping_and_re-adding_from_web_remotes_doesn__39__t_work.mdwn b/doc/bugs/dropping_and_re-adding_from_web_remotes_doesn__39__t_work.mdwn
new file mode 100644
index 0000000..f0eb905
--- /dev/null
+++ b/doc/bugs/dropping_and_re-adding_from_web_remotes_doesn__39__t_work.mdwn
@@ -0,0 +1,131 @@
+In experimenting with the web remote, I found that dropping a URL gave an error, but still succeeded, while re-adding the same URL fails to work correctly.
+
+#What steps will reproduce the problem?
+
+<pre>
+/tmp $ dd if=/dev/zero of=/tmp/file.bin count=1024
+1024+0 records in
+1024+0 records out
+524288 bytes (524 kB) copied, 0.0135652 s, 38.6 MB/s
+/tmp $ mkdir /tmp/repo
+/tmp $ cd /tmp/repo
+/tmp/repo $ git init
+Initialized empty Git repository in /tmp/repo/.git/
+/tmp/repo $ git annex init "test"
+init test ok
+(Recording state in git...)
+/tmp/repo $ git annex addurl file:///tmp/file.bin --file annexed.bin
+######################################################################## 100.0%
+(checksum...) ok
+(Recording state in git...)
+/tmp/repo $ git annex drop annexed.bin
+drop annexed.bin (checking file:///tmp/file.bin...) ok
+(Recording state in git...)
+/tmp/repo $ mv /tmp/file.bin /tmp/file2.bin
+/tmp/repo $ git annex get annexed.bin
+get annexed.bin (from web...)
+curl: (37) Couldn't open file /tmp/file.bin
+
+ Unable to access these remotes: web
+
+ Try making some of these repositories available:
+ 00000000-0000-0000-0000-000000000001 -- web
+failed
+git-annex: get: 1 failed
+/tmp/repo $ git annex drop --from web annexed.bin --force
+drop web annexed.bin
+ removal from web not supported
+failed
+(Recording state in git...)
+git-annex: drop: 1 failed
+/tmp/repo $ git annex get annexed.bin
+get annexed.bin (not available)
+ No other repository is known to contain the file.
+failed
+git-annex: get: 1 failed
+/tmp/repo $ mv /tmp/file2.bin /tmp/file.bin
+/tmp/repo $ git annex addurl file:///tmp/file.bin --file annexed.bin
+addurl annexed.bin ok
+/tmp/repo $ git annex whereis annexed.bin
+whereis annexed.bin (0 copies) failed
+git-annex: whereis: 1 failed
+/tmp/repo $ git annex addurl file:///tmp/file.bin --file annexed2.bin
+######################################################################## 100.0%
+(checksum...) ok
+(Recording state in git...)
+/tmp/repo $ git annex whereis annexed.bin
+whereis annexed.bin (1 copy)
+ e2418e81-ec04-4091-aabe-ed75d65f93fa -- here (test)
+ok
+/tmp/repo $ git annex whereis annexed2.bin
+whereis annexed2.bin (1 copy)
+ e2418e81-ec04-4091-aabe-ed75d65f93fa -- here (test)
+ok
+/tmp/repo $ git annex drop annexed.bin
+drop annexed.bin (unsafe)
+ Could only verify the existence of 0 out of 1 necessary copies
+
+ No other repository is known to contain the file.
+
+ (Use --force to override this check, or adjust annex.numcopies.)
+failed
+git-annex: drop: 1 failed
+/tmp/repo $ git annex drop annexed2.bin
+drop annexed2.bin (unsafe)
+ Could only verify the existence of 0 out of 1 necessary copies
+
+ No other repository is known to contain the file.
+
+ (Use --force to override this check, or adjust annex.numcopies.)
+failed
+git-annex: drop: 1 failed
+/tmp/repo $ mv /tmp/file.bin /tmp/file2.bin
+/tmp/repo $ git annex addurl file:///tmp/file2.bin --file annexed.bin
+addurl annexed.bin ok
+(Recording state in git...)
+/tmp/repo $ git annex whereis annexed2.bin
+whereis annexed2.bin (2 copies)
+ 00000000-0000-0000-0000-000000000001 -- web
+ e2418e81-ec04-4091-aabe-ed75d65f93fa -- here (test)
+
+ web: file:///tmp/file.bin
+ web: file:///tmp/file2.bin
+ok
+/tmp/repo $ mv /tmp/file2.bin /tmp/file.bin
+/tmp/repo $ git annex drop annexed.bin
+drop annexed.bin (checking file:///tmp/file.bin...) ok
+(Recording state in git...)
+/tmp/repo $ git annex get annexed.bin
+get annexed.bin (from web...)
+######################################################################## 100.0%
+ok
+(Recording state in git...)
+/tmp/repo $ git annex drop annexed.bin
+drop annexed.bin (checking file:///tmp/file.bin...) ok
+(Recording state in git...)
+/tmp/repo $ mv /tmp/file.bin /tmp/file2.bin
+/tmp/repo $ git annex get annexed.bin
+get annexed.bin (from web...)
+curl: (37) Couldn't open file /tmp/file.bin
+######################################################################## 100.0%
+ok
+(Recording state in git...)
+</pre>
+
+#What is the expected output? What do you see inst
+
+
+When dropping one file and I see "git-annex: drop: 1 failed" I would expect the file to still be in the remote as far as git-annex is concerned.
+
+When re-adding the URL, I expect the file to be re-added to the web remote. (note that it re-appears after adding the same file via a different URL)
+
+
+#What version of git-annex are you using? On what operating system?
+
+
+3.20121112 on Gentoo Linux
+
+
+#Please provide any additional information below.
+
+This seems to be a corner case, and would probably have minimal impact on most people.
diff --git a/doc/bugs/fat_support/comment_8_acc947643a635eb10a1bff92083a3506._comment b/doc/bugs/fat_support/comment_8_acc947643a635eb10a1bff92083a3506._comment
new file mode 100644
index 0000000..558e0ca
--- /dev/null
+++ b/doc/bugs/fat_support/comment_8_acc947643a635eb10a1bff92083a3506._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawmcYryijvlF8bJvM_eZNSrUPEkMlxMDGTQ"
+ nickname="Thiago"
+ subject="POSIX layer on top of VFAT using FUSE"
+ date="2012-11-24T00:21:23Z"
+ content="""
+I just found out about this project and didn't try it, but it looks like it would allow using git-annex on an usb stick with a normal repository:
+
+<http://sourceforge.net/projects/posixovl/>
+"""]]
diff --git a/doc/bugs/glacier_with_assistant_bugs.mdwn b/doc/bugs/glacier_with_assistant_bugs.mdwn
new file mode 100644
index 0000000..4486a4f
--- /dev/null
+++ b/doc/bugs/glacier_with_assistant_bugs.mdwn
@@ -0,0 +1,13 @@
+* Need to handle retrying downloads of files from glacier after 4 hours.
+
+* When a file is moved into archive/, the assistant that sends it to
+ glacier is able to trust that it's in glacier and remove the local copy.
+ But other assistants that also have a copy cannot trust that, and so
+ don't remove their copies.
+
+* For that matter, glacier-cli currently relies on a local cache of
+ inventory information, and so other git-annexes using the same glacier
+ repository are not able to access stuff in it, unless and until
+ `glacier vault sync` is run.
+
+[[!tag /design/assistant]]
diff --git a/doc/bugs/map_not_respecting_annex_ssh_options__63__.mdwn b/doc/bugs/map_not_respecting_annex_ssh_options__63__.mdwn
new file mode 100644
index 0000000..0d362db
--- /dev/null
+++ b/doc/bugs/map_not_respecting_annex_ssh_options__63__.mdwn
@@ -0,0 +1,37 @@
+### What steps will reproduce the problem?
+
+1. Have a remote that uses annex-ssh-options to specify an sshkey which is needed to invoke git-annex-shell on that remote.
+2. Run git-annex map.
+
+### What is the expected output? What do you see instead?
+
+I expect to see a map without any errors complaining of commands not recognized.
+
+Instead I see:
+
+ greg@x200s:~/Pictures/Photos$ git-annex map
+ map /home/greg/Pictures/Photos ok
+ map 60justin (sshing...)
+ ok
+ map rose (sshing...)
+ fatal: unrecognized command 'cd '/home/greg/Media/Pictures/Photos/' && git config --null --list'
+ git-annex-shell: git-shell failed
+
+relevant part of .git/config:
+
+ [remote "rose"]
+ url = greg@rose.makesad.us:/home/greg/Media/Pictures/Photos/
+ fetch = +refs/heads/*:refs/remotes/rose/*
+ annex-ssh-options = "-i /home/greg/.ssh/annex.x200s_rsa"
+ annex-trustlevel = trusted
+ annex-uuid = c0e4106e-2631-11e2-9749-1bfa37a61069
+
+
+### What version of git-annex are you using? On what operating system?
+
+ git-annex version: 3.20121017
+ local repository version: 3
+ default repository version: 3
+ supported repository versions: 3
+ upgrade supported from repository versions: 0 1 2
+
diff --git a/doc/bugs/migrated_files_not_showing_up_in_unused_list.mdwn b/doc/bugs/migrated_files_not_showing_up_in_unused_list.mdwn
index e76448b..33776d1 100644
--- a/doc/bugs/migrated_files_not_showing_up_in_unused_list.mdwn
+++ b/doc/bugs/migrated_files_not_showing_up_in_unused_list.mdwn
@@ -57,3 +57,6 @@ Command outputs (see, specifically, the output of status showing number of SHA25
100 /var/lib/dpkg/status
3.20120629 0
650 http://ftp.us.debian.org/debian/ wheezy/main i386 Packages
+
+
+As Joey predicted, this took care of itself over time. Marking as [[bugs/done]].
diff --git a/doc/bugs/unlock_fails_silently_with_directory_symlinks.mdwn b/doc/bugs/unlock_fails_silently_with_directory_symlinks.mdwn
new file mode 100644
index 0000000..9b9bb63
--- /dev/null
+++ b/doc/bugs/unlock_fails_silently_with_directory_symlinks.mdwn
@@ -0,0 +1,53 @@
+What steps will reproduce the problem?
+
++ ```~/``` is tracked by git and git annex
++ ```~/text/books/foo``` is annexed
++ ```~/books``` is a symlink to ```text/books```
++ from ```~/``` execute: ```git annex unlock books/foo```
++ which returns immediately with zero exit code and does not unlock foo.
+
+What is the expected output? What do you see instead?
+
++ I expect ```~/text/books/foo`` to be unlocked
+
++ I think ```git annex unlock``` should resolve the symlinks and realize that this is a tracked file.
+
+What version of git-annex are you using? On what operating system?
+
++ 3.20121112 in debian unstable
+
+Please provide any additional information below.
+
++ I can unlock foo if I provide the full path, eg:
+from ```~/``` execute: ```git annex unlock text/books/foo```
+
++ Interestingly, the following _does_ successfully unlock the file: ```cd ~/books && git annex unlock foo```
+
+ So it seems that symlinks in $PWD are being resolved, but not those in file paths passed as arguments.
+
+Thank you, thank you!
+
+ - Jason
+
+jason@jasonwoof.com
+
+> I'm afraid this is not a bug. Here's why: If you run "git mv books/foo
+> books/bar", git will complain:
+>
+>> fatal: not under version control, source=books/foo, destination=books/bar
+>
+> So git-annex is just following git's lead (indeed, it's just running
+> `git ls-files` to find files to act on), and git doesn't
+> recognise this path as a file that's in git. --[[Joey]]
+
++ Also, I think ```git annex unlock``` should emit an error message if a file explicitly addressed on the commandline can not be acted upon.
+
+> I'm beginning to think perhaps it should. Users seem to find the current
+> behavior to be sometimes confusing.
+>
+> However, it's actually a very difficult change to make. Several commands
+> have multiple seek stages that act on different types of files, so
+> any warning printed by an earlier stage may be premature if a later
+> stage comes along and deals with a file. --[[Joey]]
+
+>> Figured out a non-invasive way to add that warning. [[done]] --[[Joey]]
diff --git a/doc/design/assistant.mdwn b/doc/design/assistant.mdwn
index ccc217a..f8ef510 100644
--- a/doc/design/assistant.mdwn
+++ b/doc/design/assistant.mdwn
@@ -9,10 +9,10 @@ and use cases to add. Feel free to chip in with comments! --[[Joey]]
* Month 2 "shiny webapp": [[!traillink webapp]] [[!traillink progressbars]]
* Month 3 "easy setup": [[!traillink configurators]] [[!traillink pairing]]
* Month 4 "cloud": [[!traillink cloud]] [[!traillink transfer_control]]
+* Month 5 "cloud continued": [[!traillink xmpp]] [[!traillink more_cloud_providers]]
We are, approximately, here:
-* Month 5 "cloud continued": [[!traillink xmpp]] [[polls]]
* Months 6-7 "9k bonus round": [[!traillink Android]] [[!traillink partial_content]] [[!traillink leftovers]]
* Months 8-11: more user-driven features and polishing (see remaining TODO items in all pages above)
* Month 12: "Windows purgatory" [[Windows]]
diff --git a/doc/design/assistant/OSX.mdwn b/doc/design/assistant/OSX.mdwn
index 3f2b732..d7fffce 100644
--- a/doc/design/assistant/OSX.mdwn
+++ b/doc/design/assistant/OSX.mdwn
@@ -4,9 +4,7 @@ Misc OSX porting things:
* icon to start webapp **done**
* Use OSX's "network reachability functionality" to detect when on a network
<http://developer.apple.com/library/mac/#documentation/Networking/Conceptual/SystemConfigFrameworks/SC_Intro/SC_Intro.html#//apple_ref/doc/uid/TP40001065>
-* daily build seems to have a bad yesod-static, resulting in the url
- for `/static/jquery-full.js` redirecting to `/jquery-full.js?etag=foo',
- which is a 404. Works ok when I build it on oberon.
+* Fix app build to work on more OSX versions.
Gripes:
diff --git a/doc/design/assistant/blog/day_129__release.mdwn b/doc/design/assistant/blog/day_129__release.mdwn
new file mode 100644
index 0000000..553b0e9
--- /dev/null
+++ b/doc/design/assistant/blog/day_129__release.mdwn
@@ -0,0 +1,4 @@
+Cut a new release today. It's been nearly a month since the last one, and a
+large number of improvements.. Be sure to read the
+[[/assistant/release_notes]] if upgrading. All the standalone builds are
+updated already.
diff --git a/doc/design/assistant/blog/day_130__what_now.mdwn b/doc/design/assistant/blog/day_130__what_now.mdwn
new file mode 100644
index 0000000..4fc3fe5
--- /dev/null
+++ b/doc/design/assistant/blog/day_130__what_now.mdwn
@@ -0,0 +1,36 @@
+Dealt with post-release feedback deluge. There are a couple weird bugs that
+I don't understand yet. OSX app is still not working everywhere.
+
+----
+
+Got the list of repositories in the webapp to update automatically when
+repositories are added, including when syncing with a remote causes
+repositories to be discovered.
+
+----
+
+I need a plan for the rest of the month. It feels right to focus on more
+cloud storage support. Particularly because all the cloud providers
+supported so far are ones that, while often being best of breed, also cost
+money. To finish up the cloud story, need support for some free ones.
+
+Looking at the results of the [[polls/prioritizing_special_remotes]]
+poll, I suspect that free storage is a large part of why Google Drive got
+so many votes. Soo, since there is not yet a Haskell library for Google
+Drive, rather than spending a large chunk of time writing one, I hope to
+use a [Haskell WebDAV library](http://hackage.haskell.org/package/DAV)
+that my friend Clint recently wrote. A generic
+WebDAV special remote in git-annex will provide much better support for
+box.com (which has 5 to 50 gb free storage), as well as all the
+[OwnCloud providers](http://owncloud.org/providers/), at least one of which
+provides 5 gb free storage.
+
+If I have time left this month after doing that, I'd be inclined to do
+Amazon Glacier. People have already gotten that working with git-annex, but
+a proper special remote should be better/easier, and will allow integrating
+support for it into the assistant, which should help deal with its long
+retrieval delays. And since, if you have a lot of data
+archived in Glacier, you will only want to pull out a few files at a time,
+this is another place besides mobile phones where a [[partial_content]]
+retrieval UI is needed. Which is on the roadmap to be worked on next
+month-ish. Synergy, here I come. I hope.
diff --git a/doc/design/assistant/blog/day_131__webdav_groundwork.mdwn b/doc/design/assistant/blog/day_131__webdav_groundwork.mdwn
new file mode 100644
index 0000000..795acfe
--- /dev/null
+++ b/doc/design/assistant/blog/day_131__webdav_groundwork.mdwn
@@ -0,0 +1,28 @@
+Read up on WebDAV, and got the haskell library working. Several hours
+were wasted by stumbling over a bug in the library, that requires a
+carefully crafted XML document to prevent. Such a pity about things
+like DAV (and XMPP) being designed back when people were gung-ho about
+XML.. but we're stuck with them now.
+
+Now I'm able to send and receive files to box.com using the library. Trying to
+use an OwnCloud server, though, I get a most strange error message, which
+looks to be coming from deep in the HTTPS library stack: "invalid IV length"
+
+The haskell DAV library didn't have a way to delete files. I've added one
+and sent off a patch.
+
+Roughed in a skeleton of a webdav special remote. Doesn't do anything yet.
+Will soon.
+
+Factored out a Creds module from parts of the S3 special remote and XMPP
+support, that all has to do with credentials storage. Using this for webdav
+creds storage too.
+
+Will also need to factor out the code that's currently in the directory
+special remote, for chunking of files.
+
+----
+
+PS: WebDAV, for all its monstrously complicated feature set, lacks one obvious
+feature: The ability to check how much free space is available to store
+files. Eyeroll.
diff --git a/doc/design/assistant/blog/day_132__webdav_continued.mdwn b/doc/design/assistant/blog/day_132__webdav_continued.mdwn
new file mode 100644
index 0000000..075a5d8
--- /dev/null
+++ b/doc/design/assistant/blog/day_132__webdav_continued.mdwn
@@ -0,0 +1,22 @@
+Two releases of the Haskell DAV library today. First release had my changes
+from yesterday. Then I realized I'd also need support for making WebDAV
+"collections" (subdirectories), and sent Clint a patch for that too, as
+well as a patch for querying DAV properties, and that was 0.2.
+Got it into Debian unstable as well. Should have everything I'll need now.
+
+The webdav special remote is now working! Still todo:
+Encryption support, progress bars, large file chunking, and webapp
+configurators. But already, it's a lot nicer than the old approach of using
+davfs2, which was really flakey and slow with large data volumes.
+
+I did notice, though, that uploading a 100 mb file made the process use 100
+mb of memory. This is a problem I've struggled with earlier with S3, the
+Haskell http libraries are prevented from streaming data by several parts
+of the protocol that cause the data to be accessed more than once. I guess
+this won't be a problem for DAV though, since it'll probably be chunking
+files anyway.
+
+---
+
+Mailed out all my Kickstarter USB key rewards, and ordered
+the T-shirts too.
diff --git a/doc/design/assistant/blog/day_133__webdav_working.mdwn b/doc/design/assistant/blog/day_133__webdav_working.mdwn
new file mode 100644
index 0000000..d71f258
--- /dev/null
+++ b/doc/design/assistant/blog/day_133__webdav_working.mdwn
@@ -0,0 +1,31 @@
+Worked on webdav special remotes all day.
+
+* Got encryption working,
+ after fixing an amusing typo that made `initremote` for webdav throw away the
+ encryption configuration and store files unencrypted.
+* Factored out parts of the directory special remote that had to do with file
+ chunking, and am using that for webdav. This refactoring was painful.
+
+At this point, I feel the webdav special remote works better than the old
+davfs2 + directory special remote hack. While webdav doesn't yet have
+progress info for uploads, that info was pretty busted anyway with
+davfs2 due to how it buffers files. So ... I've merged webdav into master!
+
+-----
+
+Tomorrow, webapp configurators for Box.com and any other webdav supporting
+sites I can turn up and get to work..
+
+-----
+
+A while ago I made git-annex not store login credentials in git for special
+remotes, when it's only encrypting them with a shared cipher. The
+rationalle was that you don't want to give everyone who gets ahold of your
+git repo (which includes the encryption key) access to your passwords,
+Amazon S3 account, or whatever. I'm now considering adding a checkbox (or
+command-line flag) that allows storing the login credentials in git,
+if the user wants to. While using public key crypto is the real solution
+(and is fully supported by git-annex (but not yet configurable in the
+webapp)), this seems like a reasonable thing to do in some circumstances,
+like when you have a Box.com account you really do want to share with
+the people who use the git repo.
diff --git a/doc/design/assistant/blog/day_134__box.com_configurator.mdwn b/doc/design/assistant/blog/day_134__box.com_configurator.mdwn
new file mode 100644
index 0000000..1156336
--- /dev/null
+++ b/doc/design/assistant/blog/day_134__box.com_configurator.mdwn
@@ -0,0 +1,8 @@
+I needed an easy day, and I got one. Configurator in the webapp for Box.com
+came together really quickly and easily, and worked on the first try.
+
+Also filed a [bug](https://github.com/vincenthz/hs-cryptocipher/issues/21)
+on the Haskell library that is failing on portknox.com's SSL certificate.
+That site is the only OwnCloud provider currently offering free WebDAV
+storage. Will hold off on adding OwnCloud to the webapp's cloud provider lists
+until that's fixed.
diff --git a/doc/design/assistant/blog/day_135__progress_revisited.mdwn b/doc/design/assistant/blog/day_135__progress_revisited.mdwn
new file mode 100644
index 0000000..63e3c4b
--- /dev/null
+++ b/doc/design/assistant/blog/day_135__progress_revisited.mdwn
@@ -0,0 +1,37 @@
+Unexpectedly today, I got progress displays working for uploads via WebDAV.
+
+The roadblock had been that the interface of for uploading to S3 and WebDAV
+is something like `ByteString -> IO a`. Which doesn't provide any hook to
+update a progress display as the ByteString is consumed.
+
+My solution to this was to create a `hGetContentsObserved`, that's similar
+to `hGetContents`, but after reading each 64kb chunk of data from the
+Handle to populate the ByteString, it runs some observing action. So when
+the ByteString is streamed, as each chunk is consumed, the observer
+runs. I was able to get this to run in constant space, despite not having
+access to some of the ByteString internals that `hGetContents` is built
+with.
+
+So, a little scary, but nice. I am curious if there's not a better way
+to solve this problem hidden in a library somewhere. Perhaps it's another
+thing that conduit solves somehow? Because if there's not, my solution
+probably deserves to be put into a library. Any Haskell folk know?
+
+----
+
+Used above to do progress displays for uploads to S3. Also did progress
+display to console on download from S3. Now very close to being done
+with [[progressbars]]. Finally. Only bup and hook remotes need progress
+work.
+
+----
+
+Reworked the core crypto interface, to better support streaming data through
+gpg. This allowed fixing both the directory and webdav special remotes to
+not buffer whole files in memory when retrieving them as chunks from the
+remote.
+
+-----
+
+Spent some time dealing with API changes in Yesod and Conduit. Some of them
+annoyingly gratuitous.
diff --git a/doc/design/assistant/blog/day_136__misc.mdwn b/doc/design/assistant/blog/day_136__misc.mdwn
new file mode 100644
index 0000000..5a14156
--- /dev/null
+++ b/doc/design/assistant/blog/day_136__misc.mdwn
@@ -0,0 +1,14 @@
+Changed how the directory and webdav special remotes store content.
+The new method uses less system calls, is more robust, and leaves any
+partially transferred key content in a single tmp directory, which
+will make it easier to clean that out later.
+
+Also found & fixed a cute bug in the directory special remote when the
+chunksize is set to a smaller value than the ByteString chunk size, that
+caused it to loop forever creating empty files.
+
+----
+
+Added an embedcreds=yes option when setting up special remotes.
+Will put UI for it into the webapp later, but rather than work on that
+tomorrow, I plan to work on glacier.
diff --git a/doc/design/assistant/blog/day_137__Glacier.mdwn b/doc/design/assistant/blog/day_137__Glacier.mdwn
new file mode 100644
index 0000000..4e6787e
--- /dev/null
+++ b/doc/design/assistant/blog/day_137__Glacier.mdwn
@@ -0,0 +1,30 @@
+Got Amazon Glacier working as a full-fledged special remote.
+
+(Well, I think it works... Since it takes 4 hours to get data out,
+which is longer than the time it took me to sign up for Glacier and
+write the special remote ... I've yet to fully test it!)
+
+Thanks to Robie Basak for writing glacier-cli, and developing the intial
+hook remote support. Also thanks to Peter Todd for pointing out that
+Glacier cannot store empty files, which had to be worked around in the
+special remote.
+
+Of course the 4 hour delay on retreval makes Glacier interesting. For now,
+you have to run "git annex get" twice, once to queue the retrieval, and a
+second time in 4 hours to get the file(s). There is a helpful example in
+[[tips/using_Amazon_Glacier]].
+
+The real complication though, is that Glacier's inventories take a long
+time to get, and can be out of date. So glacier-cli caches inventory info.
+I didn't feel comfortable making git-annex trust that information,
+so it'll refuse to trust that Glacier has a copy of a file when dropping
+it. There's a `--trust-glacier` switch to override this default paranoid
+behavior when dropping files.
+
+----
+
+Tomorrow ... er, tomorrow is Thanksgiving trip start.
+
+Next weekend: Webapp configurator for glacier, and maybe something
+to get the assistant to detect when jobs are complete and finish
+retrievals from Glacier, automatically.
diff --git a/doc/design/assistant/blog/day_138__back.mdwn b/doc/design/assistant/blog/day_138__back.mdwn
new file mode 100644
index 0000000..7c2b4ec
--- /dev/null
+++ b/doc/design/assistant/blog/day_138__back.mdwn
@@ -0,0 +1,25 @@
+Added a configurator for Glacier repositories to the webapp. That was the last
+cloud repository configurator that was listed in the webapp and wasn't
+done. Indeed, just two more repository configurators remain to be filled in:
+phone and NAS.
+
+By default, Glacier repositories are put in a new "small archive" group.
+This makes only files placed in "archive" directories be sent to Glacier
+(as well as being dropped from clients), unlike the full archive group
+which archives all files. Of course you can change this setting, but
+avoiding syncing all data to Glacier seemed like a good default, especially
+since some are still worried about Glacier's pricing model.
+
+Fixed several bugs in the handling of archive directories, and
+the webapp makes a toplevel archive directory when an archive remote is
+created, so the user can get on with using it.
+
+Made the assistant able to drop local files immediately after transferring
+them to glacier, despite not being able to trust glacier's inventory.
+This was accomplished by making the transferrer, after a successful upload,
+indicate that it trusts the remote it just uploaded to has the file,
+when it checks if the file should be dropped.
+
+Only thing left to do for glacier is to make the assistant retry failed
+downloads from it after 4 hours, or better, as soon as they become
+available.
diff --git a/doc/design/assistant/blog/day_139__catch_up.mdwn b/doc/design/assistant/blog/day_139__catch_up.mdwn
new file mode 100644
index 0000000..fc1e572
--- /dev/null
+++ b/doc/design/assistant/blog/day_139__catch_up.mdwn
@@ -0,0 +1,11 @@
+Got progress bars working for glacier. This needed some glacier-cli
+changes, which Robie helpfully made earlier.
+
+Spent some hours getting caught up and responding to bug reports, etc.
+
+Spent a while trying to make git-annex commands that fail to find
+any matching files to act on print a useful warning message,
+rather than the current nothing. Concluded this will be surprisingly
+hard to do, due to the multiple seek passes some commands perform. Update:
+Thought of a non-invasive and not too ugly way to do this while on my
+evening walk, and this wart is gone.
diff --git a/doc/design/assistant/blog/day_36__minimal_test_case.mdwn b/doc/design/assistant/blog/day_36__minimal_test_case.mdwn
index b1877f9..b77da14 100644
--- a/doc/design/assistant/blog/day_36__minimal_test_case.mdwn
+++ b/doc/design/assistant/blog/day_36__minimal_test_case.mdwn
@@ -1,7 +1,7 @@
Managed to find a minimal, 20 line test case for at least one of the ways
git-annex was hanging with GHC's threaded runtime. Sent it off to
haskell-cafe for analysis.
-[thread](http://news.gmane.org/gmane.comp.lang.haskell.cafe)
+[thread](http://thread.gmane.org/gmane.comp.lang.haskell.cafe/99334)
Further managed to narrow the bug down to MissingH's use of logging code,
that git-annex doesn't use. [bug report](http://bugs.debian.org/681621).
diff --git a/doc/design/assistant/cloud.mdwn b/doc/design/assistant/cloud.mdwn
index 9bb8696..6b62c9c 100644
--- a/doc/design/assistant/cloud.mdwn
+++ b/doc/design/assistant/cloud.mdwn
@@ -2,23 +2,6 @@ The [[syncing]] design assumes the network is connected. But it's often
not in these pre-IPV6 days, so the cloud needs to be used to bridge between
LANS.
-## more cloud providers
-
-Git-annex already supports storing large files in
-several cloud providers via [[special_remotes]].
-More should be added, such as:
-
-* Google drive (attractive because it's free, only 5 gb tho)
-* OpenStack Swift (teh future)
-* Box.com (it's free, and current method is hard to set up and a sorta
- shakey; a better method would be to use its API)
-* Dropbox? That would be ironic.. Via its API, presumably.
-* [[Amazon Glacier|todo/special_remote_for_amazon_glacier]]
-* [nimbus.io](https://nimbus.io/) Fairly low prices ($0.06/GB);
- REST API; free software
-
-See poll at [[polls/prioritizing_special_remotes]].
-
## The cloud notification problem **done**
Alice and Bob have repos, and there is a cloud remote they both share.
diff --git a/doc/design/assistant/more_cloud_providers.mdwn b/doc/design/assistant/more_cloud_providers.mdwn
new file mode 100644
index 0000000..9a4237e
--- /dev/null
+++ b/doc/design/assistant/more_cloud_providers.mdwn
@@ -0,0 +1,16 @@
+Git-annex already supports storing large files in
+several cloud providers via [[special_remotes]].
+More should be added, such as:
+
+* Google drive (attractive because it's free, only 5 gb tho)
+* Owncloud (has several [providers](http://owncloud.org/providers/);
+ at least one provides 5 gb free; open DAV based API)
+* OpenStack Swift (teh future)
+* Box.com (it's free, and current method is hard to set up and a sorta
+ shakey; a better method would be to use its API) **done**
+* Dropbox? That would be ironic.. Via its API, presumably.
+* [[Amazon Glacier|todo/special_remote_for_amazon_glacier]] **done**
+* [nimbus.io](https://nimbus.io/) Fairly low prices ($0.06/GB);
+ REST API; free software
+
+See poll at [[polls/prioritizing_special_remotes]].
diff --git a/doc/design/assistant/polls/prioritizing_special_remotes.mdwn b/doc/design/assistant/polls/prioritizing_special_remotes.mdwn
index 2f7f4bc..93c4b37 100644
--- a/doc/design/assistant/polls/prioritizing_special_remotes.mdwn
+++ b/doc/design/assistant/polls/prioritizing_special_remotes.mdwn
@@ -6,7 +6,7 @@ locally paired systems, and remote servers with rsync.
Help me prioritize my work: What special remote would you most like
to use with the git-annex assistant?
-[[!poll open=yes 15 "Amazon S3 (done)" 11 "Amazon Glacier" 8 "Box.com" 61 "My phone (or MP3 player)" 15 "Tahoe-LAFS" 5 "OpenStack SWIFT" 23 "Google Drive"]]
+[[!poll open=yes 15 "Amazon S3 (done)" 12 "Amazon Glacier (done)" 9 "Box.com (done)" 63 "My phone (or MP3 player)" 16 "Tahoe-LAFS" 6 "OpenStack SWIFT" 23 "Google Drive"]]
This poll is ordered with the options I consider easiest to build
listed first. Mostly because git-annex already supports them and they
diff --git a/doc/design/assistant/progressbars.mdwn b/doc/design/assistant/progressbars.mdwn
index 61e19ba..19b7003 100644
--- a/doc/design/assistant/progressbars.mdwn
+++ b/doc/design/assistant/progressbars.mdwn
@@ -10,7 +10,6 @@ This is one of those potentially hidden but time consuming problems.
## downloads
* Watch temp file as it's coming in and use its size.
- This is the only option for some special remotes (ie, non-rsync).
Can either poll every .5 seconds or so to check file size, or
could use inotify. **done**
@@ -23,7 +22,9 @@ the MeterUpdate callback as the upload progresses.
* rsync: **done**
* directory: **done**
* web: Not applicable; does not upload
-* S3: TODO
+* webdav: **done**
+* S3: **done**
+* glacier: **done**
* bup: TODO
* hook: Would require the hook interface to somehow do this, which seems
too complicated. So skipping.
diff --git a/doc/design/assistant/transfer_control.mdwn b/doc/design/assistant/transfer_control.mdwn
index ad5578c..c21c4a1 100644
--- a/doc/design/assistant/transfer_control.mdwn
+++ b/doc/design/assistant/transfer_control.mdwn
@@ -19,6 +19,9 @@ something smart with such remotes.
log is not updated in time, it will fail to drop unwanted content.
(There's a 10 second sleep there now to avoid the race, but that's hardly
a fix.)
+* When a file is renamed into an archive directory, it's not immediately
+ transferred to archive remotes. (Next expensive scan does successfully
+ cause the transfer to happen).
### dropping no longer preferred content
diff --git a/doc/design/assistant/xmpp.mdwn b/doc/design/assistant/xmpp.mdwn
index e31a149..001b529 100644
--- a/doc/design/assistant/xmpp.mdwn
+++ b/doc/design/assistant/xmpp.mdwn
@@ -11,10 +11,6 @@ who share a repository, that is stored in the [[cloud]].
See <http://git-annex.branchable.com/design/assistant/blog/day_114__xmpp/#comment-aaba579f92cb452caf26ac53071a6788>
* Assistant.Sync.manualPull doesn't handle XMPP remotes yet.
This is needed to handle getting back in sync after reconnection.
-* When pairing, sometimes both sides start to push, and the other side
- sends a PushRequest, and the two deadlock, neither doing anything.
- (Timeout eventually breaks this.)
- Maybe should allow one push and one receive-pack at a time?
## design goals
diff --git a/doc/forum/Building_git-annex-3.20121112-19309.mdwn b/doc/forum/Building_git-annex-3.20121112-19309.mdwn
new file mode 100644
index 0000000..68d5e32
--- /dev/null
+++ b/doc/forum/Building_git-annex-3.20121112-19309.mdwn
@@ -0,0 +1,78 @@
+Hi,
+
+I have Problems building git-annex-3.20121112-19309, I rceive the following error:
+
+...
+ Loading object (static) dist/build/git-annex/git-annex-tmp/Utility/libmounts.o ... done
+ final link ... done
+
+ Assistant/Alert.hs:66:30:
+ Warning: default newline style has changed, using an explicit $newline is recommended
+
+ Assistant/Alert.hs:69:31:
+ Warning: default newline style has changed, using an explicit $newline is recommended
+ [157 of 279] Compiling Assistant.Types.DaemonStatus ( Assistant/Types/DaemonStatus.hs, dist/build/git-annex/git-annex-tmp/Assistant/Types/DaemonStatus.o )
+ [158 of 279] Compiling Assistant.Monad ( Assistant/Monad.hs, dist/build/git-annex/git-annex-tmp/Assistant/Monad.o )
+
+ Assistant/Monad.hs:86:16:
+ Couldn't match expected type `Assistant a'
+ with actual type `Reader AssistantData a'
+ Expected type: (AssistantData -> a) -> Assistant a
+ Actual type: (AssistantData -> a) -> Reader AssistantData a
+ In the expression: reader
+ In an equation for `getAssistant': getAssistant = reader
+
+ Assistant/Monad.hs:93:15:
+ Couldn't match expected type `Assistant t0'
+ with actual type `Reader r0 a0'
+ In the return type of a call of `reader'
+ In a stmt of a 'do' block: st <- reader threadState
+ In the expression:
+ do { st <- reader threadState;
+ liftIO $ runThreadState st a }
+
+ Assistant/Monad.hs:99:14:
+ Couldn't match expected type `Assistant t0'
+ with actual type `Reader r0 a0'
+ In the return type of a call of `reader'
+ In a stmt of a 'do' block: d <- reader id
+ In the expression:
+ do { d <- reader id;
+ liftIO $ io $ runAssistant d a }
+
+ Assistant/Monad.hs:105:14:
+ Couldn't match expected type `Assistant t0'
+ with actual type `Reader r0 a0'
+ In the return type of a call of `reader'
+ In a stmt of a 'do' block: d <- reader id
+ In the expression:
+ do { d <- reader id;
+ return $ runAssistant d a }
+
+ Assistant/Monad.hs:110:14:
+ Couldn't match expected type `Assistant t0'
+ with actual type `Reader r0 a0'
+ In the return type of a call of `reader'
+ In a stmt of a 'do' block: d <- reader id
+ In the expression:
+ do { d <- reader id;
+ return $ \ v -> runAssistant d $ a v }
+
+ Assistant/Monad.hs:115:14:
+ Couldn't match expected type `Assistant t0'
+ with actual type `Reader r0 a0'
+ In the return type of a call of `reader'
+ In a stmt of a 'do' block: d <- reader id
+ In the expression:
+ do { d <- reader id;
+ return $ \ v1 v2 -> runAssistant d (a v1 v2) }
+
+ Assistant/Monad.hs:120:12:
+ Couldn't match expected type `Assistant a0'
+ with actual type `Reader r0 a1'
+ In the return type of a call of `reader'
+ In the first argument of `(>>=)', namely `reader v'
+ In the expression: reader v >>= liftIO . io
+ cabal: Error: some packages failed to install:
+ git-annex-3.20121112 failed during the building phase. The exception was:
+ ExitFailure 1
diff --git a/doc/forum/Default_text__47__html_handler.mdwn b/doc/forum/Default_text__47__html_handler.mdwn
new file mode 100644
index 0000000..a6bf4aa
--- /dev/null
+++ b/doc/forum/Default_text__47__html_handler.mdwn
@@ -0,0 +1,2 @@
+I've had to change my default `text/html .html` handler from a text editor to a browser to support the opening sequence of `git annex webapp`. any other way around this? perhaps to have it open the page in the default browser rather than the default text/html handler?
+
diff --git a/doc/forum/How_to_restore_symlinks.mdwn b/doc/forum/How_to_restore_symlinks.mdwn
new file mode 100644
index 0000000..30fb070
--- /dev/null
+++ b/doc/forum/How_to_restore_symlinks.mdwn
@@ -0,0 +1 @@
+Somehow the symlinks have vanished from one directory of my repository. How can I restore them?
diff --git a/doc/forum/Push__47__Pull_with_the_Assistant.mdwn b/doc/forum/Push__47__Pull_with_the_Assistant.mdwn
new file mode 100644
index 0000000..315ad38
--- /dev/null
+++ b/doc/forum/Push__47__Pull_with_the_Assistant.mdwn
@@ -0,0 +1 @@
+If I use git-annex with a centralized bare git repository as [described here](http://git-annex.branchable.com/tips/centralized_git_repository_tutorial/), will the Assistant automatically `git push` and `git pull` the master and git-annex branches? Or does it basically just do a `git annex sync`?
diff --git a/doc/forum/Setup_of_rsync_special_remote_with_non-standard_ssh_port.mdwn b/doc/forum/Setup_of_rsync_special_remote_with_non-standard_ssh_port.mdwn
new file mode 100644
index 0000000..85755e9
--- /dev/null
+++ b/doc/forum/Setup_of_rsync_special_remote_with_non-standard_ssh_port.mdwn
@@ -0,0 +1,13 @@
+I want to setup a rsync special remote on my server with a non-standard ssh port.
+
+I tried the following steps:
+
+ git annex initremote rsync-encrypted type=rsync rsyncurl=1.2.3.4:/encrypted-annex encryption=AAAAAAAAA
+ git config remote.rsync-encrypted.annex-rsync-options "-e \'ssh -p 443\'"
+
+But I just get this error:
+
+ [2012-11-22 21:04:30 CET] read: rsync ["-e","'ssh","-p","443'","--progress","--recursive","--partial","-- partial-dir=.rsync-partial","/home/marco/annex/.git/annex/tmp/rsynctmp/15309/","1.2.3.4:/encrypted-annex"]
+ Missing trailing-' in remote-shell command.
+
+I tried some ways to escape the config but I don't have a clue. Anybody?
diff --git a/doc/forum/Special_remote_without_chmod.mdwn b/doc/forum/Special_remote_without_chmod.mdwn
new file mode 100644
index 0000000..46ace44
--- /dev/null
+++ b/doc/forum/Special_remote_without_chmod.mdwn
@@ -0,0 +1,12 @@
+Apparently, the tablet computer I'm using (Galaxy Tab 2, non-rooted) does not export the sd cards via mass storage protocol (UMS) any more. In order to be able to use a special remote for my mp3-files, I installed an ftp/ssh server on the tablet device and used curlftpfs/sshfs (fuse file systems) to mount the sd cards on my local Linux machine. In this way, it is is quite easy to setup the special remote (e.g. "git-annex initremote galaxy-tab type=directory directory=$HOME/mnt/galaxy-tab encryption=none").
+
+Problems arise, when files are transferred to the special remote. From the logs of the ftp server, I can see that the actual copy operation is successful (the data is written to the file system), but the subsequent "chmod" that changes the permissions to read-only fails. The output on the console of a "git-annex copy -t galaxy-tab" is
+
+ copy 01 some.mp3 (to galaxy-tab...)
+ failed
+ git-annex: copy: 1 failed
+
+
+Therefore my question: is it possible to perform a file transfer to a special remote (type=directory) without the final "chmod"-operation?
+
+Thank you.
diff --git a/doc/forum/get_and_copy_with_bare_repositories.mdwn b/doc/forum/get_and_copy_with_bare_repositories.mdwn
new file mode 100644
index 0000000..846887f
--- /dev/null
+++ b/doc/forum/get_and_copy_with_bare_repositories.mdwn
@@ -0,0 +1,7 @@
+is `git annex get` and `git annex copy --to somewhere` expected to work with bare repos? the [[bare repositories]] page doesn't indicate otherwise, but a `git annex get` does plain nothing in my setup.
+
+if it's supposed not to work, there should be an error message saying that and an indication on the [[bare repositories]], otherwise, how can i trace it down?
+
+in case it is just unimplemented for lack of use cases: my setup consists of several laptops using parts of a 200gb+ photo collection, a central trusted server that should host everything, and an external encrypted remote backup. clients *should* copy everything they add to both central locations, but i'd prefer the trusted server to sync the two of them too.
+
+`get` and `copy` usually operate on the current directory, which in case of a bare repo does not contain any relevant files, but i tried explicitly specifying files too. `git annex` should either look them up in master, or always operate on all files (as indexed in the `git-annex` branch) unconditionally.
diff --git a/doc/git-annex.mdwn b/doc/git-annex.mdwn
index 842139c..7646e43 100644
--- a/doc/git-annex.mdwn
+++ b/doc/git-annex.mdwn
@@ -568,6 +568,17 @@ subdirectories).
The repository should be specified using the name of a configured remote,
or the UUID or description of a repository.
+* --trust-glacier-inventory
+
+ Amazon Glacier inventories take hours to retrieve, and may not represent
+ the current state of a repository. So git-annex does not trust that
+ files that the inventory claims are in Glacier are really there.
+ This switch can be used to allow it to trust the inventory.
+
+ Be careful using this, especially if you or someone else might have recently
+ removed a file from Glacier. If you try to drop the only other copy of the
+ file, and this switch is enabled, you could lose data!
+
* --backend=name
Specifies which key-value backend to use. This can be used when
@@ -864,26 +875,36 @@ Here are all the supported configuration settings.
* `remote.<name>.rsyncurl`
Used by rsync special remotes, this configures
- the location of the rsync repository to use. Normally this is automaticaly
+ the location of the rsync repository to use. Normally this is automatically
set up by `git annex initremote`, but you can change it if needed.
* `remote.<name>.buprepo`
Used by bup special remotes, this configures
- the location of the bup repository to use. Normally this is automaticaly
+ the location of the bup repository to use. Normally this is automatically
set up by `git annex initremote`, but you can change it if needed.
* `remote.<name>.directory`
Used by directory special remotes, this configures
the location of the directory where annexed files are stored for this
- remote. Normally this is automaticaly set up by `git annex initremote`,
+ remote. Normally this is automatically set up by `git annex initremote`,
but you can change it if needed.
* `remote.<name>.s3`
Used to identify Amazon S3 special remotes.
- Normally this is automaticaly set up by `git annex initremote`.
+ Normally this is automatically set up by `git annex initremote`.
+
+* `remote.<name>.glacier`
+
+ Used to identify Amazon Glacier special remotes.
+ Normally this is automatically set up by `git annex initremote`.
+
+* `remote.<name>.webdav`
+
+ Used to identify webdav special remotes.
+ Normally this is automatically set up by `git annex initremote`.
* `remote.<name>.annex-xmppaddress`
diff --git a/doc/install/Debian/comment_5_38e6399083e10a6a274f35bddc15d4ac._comment b/doc/install/Debian/comment_5_38e6399083e10a6a274f35bddc15d4ac._comment
new file mode 100644
index 0000000..fae6c4d
--- /dev/null
+++ b/doc/install/Debian/comment_5_38e6399083e10a6a274f35bddc15d4ac._comment
@@ -0,0 +1,18 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawk3eiQwrpDGJ3MJb9NWB84m4tzQ6XjVZnY"
+ nickname="Allard"
+ subject="wheezy support"
+ date="2012-11-23T20:47:58Z"
+ content="""
+Hey Joey,
+
+As a backer, I'd like to see a backport of git annex assistant to wheezy.
+
+It is currently impossible to get this assistant in wheezy without compiling it with cabal.
+
+It would be nice to see it in backports or something :)
+
+Best,
+
+Allard
+"""]]
diff --git a/doc/install/Debian/comment_6_2e7bbdbaabbfb9d89de22e913066e822._comment b/doc/install/Debian/comment_6_2e7bbdbaabbfb9d89de22e913066e822._comment
new file mode 100644
index 0000000..c4ce6b3
--- /dev/null
+++ b/doc/install/Debian/comment_6_2e7bbdbaabbfb9d89de22e913066e822._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://svend.ciffer.net/"
+ ip="2001:4978:f:52e::2"
+ subject="comment 6"
+ date="2012-11-23T21:38:29Z"
+ content="""
+The git-annex packages in unstable install on testing (wheezy).
+"""]]
diff --git a/doc/install/OSX/comment_10_4d15bfc4fc26e7249953bebfbb09e0aa._comment b/doc/install/OSX/comment_10_4d15bfc4fc26e7249953bebfbb09e0aa._comment
new file mode 100644
index 0000000..d655da7
--- /dev/null
+++ b/doc/install/OSX/comment_10_4d15bfc4fc26e7249953bebfbb09e0aa._comment
@@ -0,0 +1,11 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawkp-1EQboBDqZ05MxOHNkwNQDM4luWYioA"
+ nickname="Charles"
+ subject="comment 10"
+ date="2012-11-15T13:26:57Z"
+ content="""
+Installing it with brew, I had to do the following steps before the final `cabal` command:
+
+* `cabal install c2hs`
+* add `$HOME/.cabal/bin` to my `$PATH` (so that c2hs program can be found)
+"""]]
diff --git a/doc/install/OSX/comment_9_c6b1b31d16f2144ad08abd8c767b6ab9._comment b/doc/install/OSX/comment_9_c6b1b31d16f2144ad08abd8c767b6ab9._comment
new file mode 100644
index 0000000..faa7b1b
--- /dev/null
+++ b/doc/install/OSX/comment_9_c6b1b31d16f2144ad08abd8c767b6ab9._comment
@@ -0,0 +1,23 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawnBEsNDl_6O4rHb2en3I0-fg-6fUxglaRQ"
+ nickname="chee"
+ subject="Recent install for OS X"
+ date="2012-11-13T04:40:05Z"
+ content="""
+if you are having trouble installing with `cabal install git-annex` at the moment, trouble of the XML kind, you'll need to do a couple things:
+
+`brew update`
+`brew install libxml2`
+`cabal update`
+`cabal install libxml --extra-include-dirs=/usr/local/Cellar/libxml2/2.8.0/include/libxml2 --extra-lib-dirs=/usr/local/Cellar/libxml2/2.8.0/lib`
+
+well, then i hit a brick wall.
+
+well.
+
+I got it to work by manually symlinking from `../Cellar/libxml2/2.8.0/lib/`* into `/usr/local` and from `../../Cellar/libxml2/2.8.0/lib/` to `/usr/local/pkgconfig`, but i can't recommend it or claim to be too proud about it all.
+
+OS X already has an old libxml knocking around so this might ruin everything for me.
+
+let's find out !
+"""]]
diff --git a/doc/install/comment_2_fd560811c57df5cbc3976639642b8b19._comment b/doc/install/comment_2_fd560811c57df5cbc3976639642b8b19._comment
new file mode 100644
index 0000000..2107390
--- /dev/null
+++ b/doc/install/comment_2_fd560811c57df5cbc3976639642b8b19._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawkN91jAhoesnVI9TtWANaBPaYjd1V9Pag8"
+ nickname="Benjamin"
+ subject="Package for older OS X"
+ date="2012-11-17T12:36:45Z"
+ content="""
+Is there an option to provide application bundle for older versions of OS X? The last time I tried the bundle wouldn't work under 10.5. If no specific features from newer OS X versions are required, it could be enough to add a simple switch when building.
+"""]]
diff --git a/doc/install/comment_3_08613b2e2318680508483d204a43da76._comment b/doc/install/comment_3_08613b2e2318680508483d204a43da76._comment
new file mode 100644
index 0000000..6992b95
--- /dev/null
+++ b/doc/install/comment_3_08613b2e2318680508483d204a43da76._comment
@@ -0,0 +1,75 @@
+[[!comment format=mdwn
+ username="http://edheil.wordpress.com/"
+ nickname="edheil"
+ subject="No luck running it on OS X Lion."
+ date="2012-11-21T06:07:55Z"
+ content="""
+here's the crash info:
+
+Process: git-annex [84369]
+Path: /Applications/git-annex.app/Contents/MacOS/bin/git-annex
+Identifier: git-annex
+Version: ??? (???)
+Code Type: X86-64 (Native)
+Parent Process: sh [84364]
+
+Date/Time: 2012-11-21 00:27:03.068 -0500
+OS Version: Mac OS X 10.7.5 (11G63)
+Report Version: 9
+
+Crashed Thread: 0
+
+Exception Type: EXC_BREAKPOINT (SIGTRAP)
+Exception Codes: 0x0000000000000002, 0x0000000000000000
+
+Application Specific Information:
+dyld: launch, loading dependent libraries
+
+Dyld Error Message:
+ Library not loaded: /opt/local/lib/libgss.3.dylib
+ Referenced from: /Applications/git-annex.app/Contents/MacOS/opt/local/lib/libgsasl.7.dylib
+ Reason: image not found
+
+Binary Images:
+ 0x105baa000 - 0x107b89fe7 +git-annex (??? - ???) <45311C82-015C-3F87-9F9B-01325EFBD0D9> /Applications/git-annex.app/Contents/MacOS/bin/git-annex
+ 0x10822d000 - 0x10823eff7 +libz.1.dylib (1.2.7 - compatibility 1.0.0) <57016CC1-AD54-337E-A983-457933B24D35> /Applications/git-annex.app/Contents/MacOS/opt/local/lib/libz.1.dylib
+ 0x108245000 - 0x10827dff7 +libpcre.1.dylib (2.1.0 - compatibility 2.0.0) <431BD758-FA7B-38B3-AB7E-6511EC06152E> /Applications/git-annex.app/Contents/MacOS/opt/local/lib/libpcre.1.dylib
+ 0x108283000 - 0x1083b3ff7 +libxml2.2.dylib (11.0.0 - compatibility 11.0.0) <0663F820-D436-3304-B12F-9158901087EB> /Applications/git-annex.app/Contents/MacOS/opt/local/lib/libxml2.2.dylib
+ 0x1083e9000 - 0x108400fef +libgsasl.7.dylib (16.6.0 - compatibility 16.0.0) <41503EE1-D58B-385C-AC2E-BEAA7D0D4E38> /Applications/git-annex.app/Contents/MacOS/opt/local/lib/libgsasl.7.dylib
+ 0x10840a000 - 0x1084a1fff +libgnutls.26.dylib (49.3.0 - compatibility 49.0.0) <0320352A-3336-3B6B-A7DE-F3069669AD27> /Applications/git-annex.app/Contents/MacOS/opt/local/lib/libgnutls.26.dylib
+ 0x1084c3000 - 0x1084f1ff7 +libidn.11.dylib (18.8.0 - compatibility 18.0.0) <97073970-9370-3F85-B943-1B989EA41148> /Applications/git-annex.app/Contents/MacOS/opt/local/lib/libidn.11.dylib
+ 0x1084fc000 - 0x1085f5ff7 +libiconv.2.dylib (8.1.0 - compatibility 8.0.0) <1B8D243B-F617-301E-97B1-EE78A72617AB> /Applications/git-annex.app/Contents/MacOS/opt/local/lib/libiconv.2.dylib
+ 0x108606000 - 0x108606fff +libcharset.1.dylib (2.0.0 - compatibility 2.0.0) <E3797413-2AA3-3698-B393-E1203B4799A0> /Applications/git-annex.app/Contents/MacOS/opt/local/lib/libcharset.1.dylib
+ 0x10860c000 - 0x108665fef +libgmp.10.dylib (11.5.0 - compatibility 11.0.0) <EE407B22-0F44-38B6-9937-10CA6A529F37> /Applications/git-annex.app/Contents/MacOS/opt/local/lib/libgmp.10.dylib
+ 0x108675000 - 0x1086a2fe7 +libSystem.B.dylib (159.1.0 - compatibility 1.0.0) <7BEBB139-50BB-3112-947A-F4AA168F991C> /Applications/git-annex.app/Contents/MacOS/usr/lib/libSystem.B.dylib
+ 0x1086b4000 - 0x1086c8fef +libgcc_s.1.dylib (??? - ???) <3C5BF0B8-B1E9-3B41-B52F-F7499687217C> /Applications/git-annex.app/Contents/MacOS/opt/local/lib/gcc47/libgcc_s.1.dylib
+ 0x1086d8000 - 0x1086f5ff7 +liblzma.5.dylib (6.4.0 - compatibility 6.0.0) <1D682E06-EB89-34CA-855A-AEF611C4DF86> /usr/local/lib/liblzma.5.dylib
+ 0x7fff657aa000 - 0x7fff657debaf dyld (195.6 - ???) <0CD1B35B-A28F-32DA-B72E-452EAD609613> /usr/lib/dyld
+ 0x7fff8b669000 - 0x7fff8b672ff7 libsystem_notify.dylib (80.1.0 - compatibility 1.0.0) <A4D651E3-D1C6-3934-AD49-7A104FD14596> /usr/lib/system/libsystem_notify.dylib
+ 0x7fff8b6e4000 - 0x7fff8b6e5ff7 libsystem_sandbox.dylib (??? - ???) <2A09E4DA-F47C-35CB-B70C-E0492BA9F20E> /usr/lib/system/libsystem_sandbox.dylib
+ 0x7fff8c000000 - 0x7fff8c006ff7 libunwind.dylib (30.0.0 - compatibility 1.0.0) <1E9C6C8C-CBE8-3F4B-A5B5-E03E3AB53231> /usr/lib/system/libunwind.dylib
+ 0x7fff8c1c4000 - 0x7fff8c1c5ff7 libremovefile.dylib (21.1.0 - compatibility 1.0.0) <739E6C83-AA52-3C6C-A680-B37FE2888A04> /usr/lib/system/libremovefile.dylib
+ 0x7fff8cf13000 - 0x7fff8cf4efff libsystem_info.dylib (??? - ???) <35F90252-2AE1-32C5-8D34-782C614D9639> /usr/lib/system/libsystem_info.dylib
+ 0x7fff8dbc3000 - 0x7fff8dbc8fff libcache.dylib (47.0.0 - compatibility 1.0.0) <1571C3AB-BCB2-38CD-B3B2-C5FC3F927C6A> /usr/lib/system/libcache.dylib
+ 0x7fff8dbc9000 - 0x7fff8dbd0fff libcopyfile.dylib (85.1.0 - compatibility 1.0.0) <0AB51EE2-E914-358C-AC19-47BC024BDAE7> /usr/lib/system/libcopyfile.dylib
+ 0x7fff8dbdf000 - 0x7fff8dbedfff libdispatch.dylib (187.10.0 - compatibility 1.0.0) <8E03C652-922A-3399-93DE-9EA0CBFA0039> /usr/lib/system/libdispatch.dylib
+ 0x7fff8dcf2000 - 0x7fff8dcf7ff7 libsystem_network.dylib (??? - ???) <5DE7024E-1D2D-34A2-80F4-08326331A75B> /usr/lib/system/libsystem_network.dylib
+ 0x7fff8e1bb000 - 0x7fff8e298fef libsystem_c.dylib (763.13.0 - compatibility 1.0.0) <41B43515-2806-3FBC-ACF1-A16F35B7E290> /usr/lib/system/libsystem_c.dylib
+ 0x7fff8e6e2000 - 0x7fff8e6eafff libsystem_dnssd.dylib (??? - ???) <584B321E-5159-37CD-B2E7-82E069C70AFB> /usr/lib/system/libsystem_dnssd.dylib
+ 0x7fff8fab6000 - 0x7fff8fab8fff libquarantine.dylib (36.7.0 - compatibility 1.0.0) <8D9832F9-E4A9-38C3-B880-E5210B2353C7> /usr/lib/system/libquarantine.dylib
+ 0x7fff8fc3e000 - 0x7fff8fc80ff7 libcommonCrypto.dylib (55010.0.0 - compatibility 1.0.0) <BB770C22-8C57-365A-8716-4A3C36AE7BFB> /usr/lib/system/libcommonCrypto.dylib
+ 0x7fff90fa3000 - 0x7fff90fa9fff libmacho.dylib (800.0.0 - compatibility 1.0.0) <165514D7-1BFA-38EF-A151-676DCD21FB64> /usr/lib/system/libmacho.dylib
+ 0x7fff90faa000 - 0x7fff90fabfff libunc.dylib (24.0.0 - compatibility 1.0.0) <337960EE-0A85-3DD0-A760-7134CF4C0AFF> /usr/lib/system/libunc.dylib
+ 0x7fff910b4000 - 0x7fff910b8fff libmathCommon.A.dylib (2026.0.0 - compatibility 1.0.0) <FF83AFF7-42B2-306E-90AF-D539C51A4542> /usr/lib/system/libmathCommon.A.dylib
+ 0x7fff916b9000 - 0x7fff916bdfff libdyld.dylib (195.6.0 - compatibility 1.0.0) <FFC59565-64BD-3B37-90A4-E2C3A422CFC1> /usr/lib/system/libdyld.dylib
+ 0x7fff916be000 - 0x7fff916defff libsystem_kernel.dylib (1699.32.7 - compatibility 1.0.0) <66C9F9BD-C7B3-30D4-B1A0-03C8A6392351> /usr/lib/system/libsystem_kernel.dylib
+ 0x7fff916df000 - 0x7fff916e0fff libdnsinfo.dylib (395.11.0 - compatibility 1.0.0) <853BAAA5-270F-3FDC-B025-D448DB72E1C3> /usr/lib/system/libdnsinfo.dylib
+ 0x7fff929f8000 - 0x7fff929fdfff libcompiler_rt.dylib (6.0.0 - compatibility 1.0.0) <98ECD5F6-E85C-32A5-98CD-8911230CB66A> /usr/lib/system/libcompiler_rt.dylib
+ 0x7fff93a3c000 - 0x7fff93a3cfff libkeymgr.dylib (23.0.0 - compatibility 1.0.0) <61EFED6A-A407-301E-B454-CD18314F0075> /usr/lib/system/libkeymgr.dylib
+ 0x7fff97139000 - 0x7fff9713aff7 libsystem_blocks.dylib (53.0.0 - compatibility 1.0.0) <8BCA214A-8992-34B2-A8B9-B74DEACA1869> /usr/lib/system/libsystem_blocks.dylib
+ 0x7fff9724f000 - 0x7fff9726cfff libxpc.dylib (77.19.0 - compatibility 1.0.0) <9F57891B-D7EF-3050-BEDD-21E7C6668248> /usr/lib/system/libxpc.dylib
+ 0x7fff97cfe000 - 0x7fff97d08ff7 liblaunch.dylib (392.39.0 - compatibility 1.0.0) <8C235D13-2928-30E5-9E12-2CC3D6324AE2> /usr/lib/system/liblaunch.dylib
+
+
+
+"""]]
diff --git a/doc/install/fromscratch.mdwn b/doc/install/fromscratch.mdwn
index 18759d6..1aebc68 100644
--- a/doc/install/fromscratch.mdwn
+++ b/doc/install/fromscratch.mdwn
@@ -3,6 +3,7 @@ quite a lot.
* Haskell stuff
* [The Haskell Platform](http://haskell.org/platform/) (GHC 7.4 or newer)
+ * [mtl](http://hackage.haskell.org.package/mtl) (2.1.1 or newer)
* [MissingH](http://github.com/jgoerzen/missingh/wiki)
* [pcre-light](http://hackage.haskell.org/package/pcre-light)
* [utf8-string](http://hackage.haskell.org/package/utf8-string)
@@ -18,8 +19,8 @@ quite a lot.
* [bloomfilter](http://hackage.haskell.org/package/bloomfilter)
* [edit-distance](http://hackage.haskell.org/package/edit-distance)
* [hS3](http://hackage.haskell.org/package/hS3) (optional)
+ * [DAV](http://hackage.haskell.org/package/DAV) (optional)
* [SafeSemaphore](http://hackage.haskell.org/package/SafeSemaphore)
- * [async](http://hackage.haskell.org/package/async)
* Optional haskell stuff, used by the [[assistant]] and its webapp (edit Makefile to disable)
* [stm](http://hackage.haskell.org/package/stm)
(version 2.3 or newer)
diff --git a/doc/news/version_3.20121009.mdwn b/doc/news/version_3.20121009.mdwn
deleted file mode 100644
index c4f9664..0000000
--- a/doc/news/version_3.20121009.mdwn
+++ /dev/null
@@ -1,25 +0,0 @@
-git-annex 3.20121009 released with [[!toggle text="these changes"]]
-[[!toggleable text="""
- * watch, assistant: It's now safe to git annex unlock files while
- the watcher is running, as well as modify files checked into git
- as normal files. Additionally, .gitignore settings are now honored.
- Closes: #[689979](http://bugs.debian.org/689979)
- * group, ungroup: New commands to indicate groups of repositories.
- * webapp: Adds newly created repositories to one of these groups:
- clients, drives, servers
- * vicfg: New command, allows editing (or simply viewing) most
- of the repository configuration settings stored in the git-annex branch.
- * Added preferred content expressions, configurable using vicfg.
- * get --auto: If the local repository has preferred content
- configured, only get that content.
- * drop --auto: If the repository the content is dropped from has
- preferred content configured, drop only content that is not preferred.
- * copy --auto: Only transfer content that the destination repository prefers.
- * assistant: Now honors preferred content settings when deciding what to
- transfer.
- * --copies=group:number can now be used to match files that are present
- in a specified number of repositories in a group.
- * Added --smallerthan, --largerthan, and --inall limits.
- * Only build-depend on libghc-clientsession-dev on arches that will have
- the webapp.
- * uninit: Unset annex.version. Closes: #[689852](http://bugs.debian.org/689852)"""]] \ No newline at end of file
diff --git a/doc/news/version_3.20121126.mdwn b/doc/news/version_3.20121126.mdwn
new file mode 100644
index 0000000..3e7bbf9
--- /dev/null
+++ b/doc/news/version_3.20121126.mdwn
@@ -0,0 +1,27 @@
+git-annex 3.20121126 released with [[!toggle text="these changes"]]
+[[!toggleable text="""
+ * New webdav and Amazon glacier special remotes.
+ * Display a warning when a non-existing file or directory is specified.
+ * webapp: Added configurator for Box.com.
+ * webapp: Show error messages to user when testing XMPP creds.
+ * Fix build of assistant without yesod.
+ * webapp: The list of repositiories refreshes when new repositories are
+ added, including when new repository configurations are pushed in from
+ remotes.
+ * OSX: Fix RunAtLoad value in plist file.
+ * Getting a file from chunked directory special remotes no longer buffers
+ it all in memory.
+ * S3: Added progress display for uploading and downloading.
+ * directory special remote: Made more efficient and robust.
+ * Bugfix: directory special remote could loop forever storing a key
+ when a too small chunksize was configured.
+ * Allow controlling whether login credentials for S3 and webdav are
+ committed to the repository, by setting embedcreds=yes|no when running
+ initremote.
+ * Added smallarchive repository group, that only archives files that are
+ in archive directories. Used by default for glacier when set up in the
+ webapp.
+ * assistant: Fixed handling of toplevel archive directory and
+ client repository group.
+ * assistant: Apply preferred content settings when a new symlink
+ is created, or a symlink gets renamed. Made archive directories work."""]] \ No newline at end of file
diff --git a/doc/preferred_content.mdwn b/doc/preferred_content.mdwn
index ac2cd1e..499cf62 100644
--- a/doc/preferred_content.mdwn
+++ b/doc/preferred_content.mdwn
@@ -92,7 +92,7 @@ to "standard", and put it in one of these groups:
All content is preferred, unless it's in a "archive" directory.
-`exclude=*/archive/*`
+`exclude=*/archive/* and exclude=archive/*`
### transfer
@@ -104,7 +104,7 @@ USB drive used in a sneakernet.
The preferred content expression for these causes them to get and retain
data until all clients have a copy.
-`not (inallgroup=client and copies=client:2) and exclude=*/archive/*`
+`not (inallgroup=client and copies=client:2) and exclude=*/archive/* and exclude=archive/*`
The "copies=client:2" part of the above handles the case where
there is only one client repository. It makes a transfer repository
@@ -112,17 +112,24 @@ speculatively prefer content in this case, even though it as of yet
has nowhere to transfer it to. Presumably, another client repository
will be added later.
-### archive
+### backup
+
+All content is preferred.
+
+### small archive
+
+Only prefers content that's located in an "archive" directory, and
+only if it's not already been archived somewhere else.
+
+`(include=*/archive/* or include=archive/*) and not (copies=archive:1 or copies=smallarchive:1)`
+
+### full archive
All content is preferred, unless it's already been archived somewhere else.
-`not copies=archive:1`
+`not (copies=archive:1 or copies=smallarchive:1)`
Note that if you want to archive multiple copies (not a bad idea!),
you should instead configure all your archive repositories with a
version of the above preferred content expression with a larger
number of copies.
-
-### backup
-
-All content is preferred.
diff --git a/doc/special_remotes.mdwn b/doc/special_remotes.mdwn
index 65fcb87..2cc0cf4 100644
--- a/doc/special_remotes.mdwn
+++ b/doc/special_remotes.mdwn
@@ -8,9 +8,11 @@ types of remotes. These can be used just like any normal remote by git-annex.
They cannot be used by other git commands though.
* [[S3]] (Amazon S3, and other compatible services)
+* [[Amazon_Glacier|glacier]]
* [[bup]]
* [[directory]]
* [[rsync]]
+* [[webdav]]
* [[web]]
* [[hook]]
@@ -19,6 +21,7 @@ into many cloud services. Here are specific instructions
for various cloud things:
* [[tips/using_Amazon_S3]]
+* [[tips/using_Amazon_Glacier]]
* [[tips/Internet_Archive_via_S3]]
* [[tahoe-lafs|forum/tips:_special__95__remotes__47__hook_with_tahoe-lafs]]
* [[tips/using_box.com_as_a_special_remote]]
diff --git a/doc/special_remotes/S3.mdwn b/doc/special_remotes/S3.mdwn
index 79a8e58..5a7ecc2 100644
--- a/doc/special_remotes/S3.mdwn
+++ b/doc/special_remotes/S3.mdwn
@@ -8,21 +8,29 @@ See [[tips/using_Amazon_S3]] and
The standard environment variables `AWS_ACCESS_KEY_ID` and
`AWS_SECRET_ACCESS_KEY` are used to supply login credentials
-for Amazon. When encryption is enabled, they are stored in encrypted form
-by `git annex initremote`. Without encryption, they are stored in a
-file only you can read inside the local git repository. So you do not
-need to keep the environment variables set after the initial
-initalization of the remote.
+for Amazon. You need to set these only when running
+`git annex initremote`, as they will be cached in a file only you
+can read inside the local git repository.
A number of parameters can be passed to `git annex initremote` to configure
the S3 remote.
-* `encryption` - Required. Either "none" to disable encryption
- (not recommended),
+* `encryption` - Required. Either "none" to disable encryption (not recommended),
or a value that can be looked up (using gpg -k) to find a gpg encryption
- key that will be given access to the remote. Note that additional gpg
- keys can be given access to a remote by rerunning initremote with
- the new key id. See [[encryption]].
+ key that will be given access to the remote, or "shared" which allows
+ every clone of the repository to access the encrypted data (use with caution).
+
+ Note that additional gpg keys can be given access to a remote by
+ rerunning initremote with the new key id. See [[encryption]].
+
+* `embedcreds` - Optional. Set to "yes" embed the login credentials inside
+ the git repository, which allows other clones to also access them. This is
+ the default when gpg encryption is enabled; the credentials are stored
+ encrypted and only those with the repository's keys can access them.
+
+ It is not the default when using shared encryption, or no encryption.
+ Think carefully about who can access your repository before using
+ embedcreds without gpg encryption.
* `datacenter` - Defaults to "US". Other values include "EU",
"us-west-1", and "ap-southeast-1".
diff --git a/doc/special_remotes/bup.mdwn b/doc/special_remotes/bup.mdwn
index e59ff24..519b560 100644
--- a/doc/special_remotes/bup.mdwn
+++ b/doc/special_remotes/bup.mdwn
@@ -22,9 +22,11 @@ These parameters can be passed to `git annex initremote` to configure bup:
* `encryption` - Required. Either "none" to disable encryption of content
stored in bup (ssh will still be used to transport it securely),
or a value that can be looked up (using gpg -k) to find a gpg encryption
- key that will be given access to the remote. Note that additional gpg
- keys can be given access to a remote by rerunning initremote with
- the new key id. See [[encryption]].
+ key that will be given access to the remote, or "shared" which allows
+ every clone of the repository to access the encrypted data (use with caution).
+
+ Note that additional gpg keys can be given access to a remote by
+ rerunning initremote with the new key id. See [[encryption]].
* `buprepo` - Required. This is passed to `bup` as the `--remote`
to use to store data. To create the repository,`bup init` will be run.
diff --git a/doc/special_remotes/directory.mdwn b/doc/special_remotes/directory.mdwn
index 7194e0d..7fdfdfc 100644
--- a/doc/special_remotes/directory.mdwn
+++ b/doc/special_remotes/directory.mdwn
@@ -10,12 +10,14 @@ the drive's mountpoint as a directory remote.
These parameters can be passed to `git annex initremote` to configure the
remote:
-* `encryption` - Required. Either "none" to disable encryption of content
- stored in the directory,
+* `encryption` - Required. Either "none" to disable encryption,
or a value that can be looked up (using gpg -k) to find a gpg encryption
- key that will be given access to the remote. Note that additional gpg
- keys can be given access to a remote by rerunning initremote with
- the new key id. See [[encryption]].
+ key that will be given access to the remote, or "shared" which allows
+ every clone of the repository to decrypt the encrypted data.
+
+ Note that additional gpg keys can be given access to a remote by
+ rerunning initremote with the new key id. See [[encryption]].
+
* `chunksize` - Avoid storing files larger than the specified size in the
directory. For use on directories on mount points that have file size
limitations. The default is to never chunk files.
diff --git a/doc/special_remotes/glacier.mdwn b/doc/special_remotes/glacier.mdwn
new file mode 100644
index 0000000..79c3c38
--- /dev/null
+++ b/doc/special_remotes/glacier.mdwn
@@ -0,0 +1,50 @@
+This special remote type stores file contents in Amazon Glacier.
+
+To use it, you need to have [glacier-cli](http://github.com/basak/glacier-cli)
+installed.
+
+The unusual thing about Amazon Glacier is the multiple-hour delay it takes
+to retrieve information out of Glacier. To deal with this, commands like
+"git-annex get" request Glacier start the retrieval process, and will fail
+due to the data not yet being available. You can then wait appriximately
+four hours, re-run the same command, and this time, it will actually
+download the data.
+
+## configuration
+
+The standard environment variables `AWS_ACCESS_KEY_ID` and
+`AWS_SECRET_ACCESS_KEY` are used to supply login credentials
+for Amazon. You need to set these only when running
+`git annex initremote`, as they will be cached in a file only you
+can read inside the local git repository.
+
+A number of parameters can be passed to `git annex initremote` to configure
+the Glacier remote.
+
+* `encryption` - Required. Either "none" to disable encryption (not recommended),
+ or a value that can be looked up (using gpg -k) to find a gpg encryption
+ key that will be given access to the remote, or "shared" which allows
+ every clone of the repository to access the encrypted data (use with caution).
+
+ Note that additional gpg keys can be given access to a remote by
+ rerunning initremote with the new key id. See [[encryption]].
+
+* `embedcreds` - Optional. Set to "yes" embed the login credentials inside
+ the git repository, which allows other clones to also access them. This is
+ the default when gpg encryption is enabled; the credentials are stored
+ encrypted and only those with the repository's keys can access them.
+
+ It is not the default when using shared encryption, or no encryption.
+ Think carefully about who can access your repository before using
+ embedcreds without gpg encryption.
+
+* `datacenter` - Defaults to "us-east-1".
+
+* `vault` - By default, a vault name is chosen based on the remote name
+ and UUID. This can be specified to pick a vault name.
+
+* `fileprefix` - By default, git-annex places files in a tree rooted at the
+ top of the Glacier vault. When this is set, it's prefixed to the filenames
+ used. For example, you could set it to "foo/" in one special remote,
+ and to "bar/" in another special remote, and both special remotes could
+ then use the same vault.
diff --git a/doc/special_remotes/hook.mdwn b/doc/special_remotes/hook.mdwn
index 9a7dbf7..6867edb 100644
--- a/doc/special_remotes/hook.mdwn
+++ b/doc/special_remotes/hook.mdwn
@@ -25,11 +25,13 @@ Can you spot the potential data loss bugs in the above simple example?
These parameters can be passed to `git annex initremote`:
-* `encryption` - Required. Either "none" to disable encryption of content,
+* `encryption` - Required. Either "none" to disable encryption,
or a value that can be looked up (using gpg -k) to find a gpg encryption
- key that will be given access to the remote. Note that additional gpg
- keys can be given access to a remote by rerunning initremote with
- the new key id. See [[encryption]].
+ key that will be given access to the remote, or "shared" which allows
+ every clone of the repository to access the encrypted data.
+
+ Note that additional gpg keys can be given access to a remote by
+ rerunning initremote with the new key id. See [[encryption]].
* `hooktype` - Required. This specifies a collection of hooks to use for
this remote.
diff --git a/doc/special_remotes/rsync.mdwn b/doc/special_remotes/rsync.mdwn
index 2734692..f98c80a 100644
--- a/doc/special_remotes/rsync.mdwn
+++ b/doc/special_remotes/rsync.mdwn
@@ -15,11 +15,13 @@ Or for using rsync over SSH
These parameters can be passed to `git annex initremote` to configure rsync:
* `encryption` - Required. Either "none" to disable encryption of content
- stored in rsync,
- or a value that can be looked up (using `gpg -k`) to find a gpg encryption
- key that will be given access to the remote. Note that additional gpg
- keys can be given access to a remote by rerunning initremote with
- the new key id. See [[encryption]].
+ stored on the remote rsync server,
+ or a value that can be looked up (using gpg -k) to find a gpg encryption
+ key that will be given access to the remote, or "shared" which allows
+ every clone of the repository to decrypt the encrypted data.
+
+ Note that additional gpg keys can be given access to a remote by
+ rerunning initremote with the new key id. See [[encryption]].
* `rsyncurl` - Required. This is the url or `hostname:/directory` to
pass to rsync to tell it where to store content.
diff --git a/doc/special_remotes/webdav.mdwn b/doc/special_remotes/webdav.mdwn
new file mode 100644
index 0000000..570b6f9
--- /dev/null
+++ b/doc/special_remotes/webdav.mdwn
@@ -0,0 +1,45 @@
+This special remote type stores file contents in a WebDAV server.
+
+## configuration
+
+The environment variables `WEBDAV_USERNAME` and `WEBDAV_PASSWORD` are used
+to supply login credentials. You need to set these only when running
+`git annex initremote`, as they will be cached in a file only you
+can read inside the local git repository.
+
+A number of parameters can be passed to `git annex initremote` to configure
+the webdav remote.
+
+* `encryption` - Required. Either "none" to disable encryption (not recommended),
+ or a value that can be looked up (using gpg -k) to find a gpg encryption
+ key that will be given access to the remote, or "shared" which allows
+ every clone of the repository to access the encrypted data (use with caution).
+
+ Note that additional gpg keys can be given access to a remote by
+ rerunning initremote with the new key id. See [[encryption]].
+
+* `embedcreds` - Optional. Set to "yes" embed the login credentials inside
+ the git repository, which allows other clones to also access them. This is
+ the default when gpg encryption is enabled; the credentials are stored
+ encrypted and only those with the repository's keys can access them.
+
+ It is not the default when using shared encryption, or no encryption.
+ Think carefully about who can access your repository before using
+ embedcreds without gpg encryption.
+
+* `url` - Required. The URL to the WebDAV directory where files will be
+ stored. This can be a subdirectory of a larger WebDAV repository, and will
+ be created as needed. Use of a https URL is strongly
+ encouraged, since HTTP basic authentication is used.
+
+* `chunksize` - Avoid storing files larger than the specified size in
+ WebDAV. For use when the WebDAV server has file size
+ limitations. The default is to never chunk files.
+ The value can use specified using any commonly used units.
+ Example: `chunksize=75 megabytes`
+ Note that enabling chunking on an existing remote with non-chunked
+ files is not recommended.
+
+Setup example:
+
+ # WEBDAV_USERNAME=joey@kitenet.net WEBDAV_PASSWORD=xxxxxxx git annex initremote box.com type=webdav url=https://www.box.com/dav/git-annex chunksize=75mb encryption=joey@kitenet.net
diff --git a/doc/tips/using_Amazon_Glacier.mdwn b/doc/tips/using_Amazon_Glacier.mdwn
new file mode 100644
index 0000000..5e7131e
--- /dev/null
+++ b/doc/tips/using_Amazon_Glacier.mdwn
@@ -0,0 +1,75 @@
+Amazon Glacier provides low-cost storage, well suited for archiving and
+backup. But it takes around 4 hours to get content out of Glacier.
+
+Recent versions of git-annex support Glacier. To use it, you need to have
+[glacier-cli](http://github.com/basak/glacier-cli) installed.
+
+First, export your Amazon AWS credentials:
+
+ # export AWS_ACCESS_KEY_ID="08TJMT99S3511WOZEP91"
+ # export AWS_SECRET_ACCESS_KEY="s3kr1t"
+
+Now, create a gpg key, if you don't already have one. This will be used
+to encrypt everything stored in Glacier, for your privacy. Once you have
+a gpg key, run `gpg --list-secret-keys` to look up its key id, something
+like "2512E3C7"
+
+Next, create the Glacier remote.
+
+ # git annex initremote glacier type=glacier encryption=2512E3C7
+ initremote glacier (encryption setup with gpg key C910D9222512E3C7) (gpg) ok
+
+The configuration for the Glacier remote is stored in git. So to make another
+repository use the same Glacier remote is easy:
+
+ # cd /media/usb/annex
+ # git pull laptop
+ # git annex initremote glacier
+ initremote glacier (gpg) ok
+
+Now the remote can be used like any other remote.
+
+ # git annex move my_cool_big_file --to glacier
+ copy my_cool_big_file (gpg) (checking glacier...) (to glacier...) ok
+
+But, when you try to get a file out of Glacier, it'll queue a retrieval
+job:
+
+ # git annex get my_cool_big_file
+ get my_cool_big_file (from glacier...) (gpg)
+ glacier: queued retrieval job for archive 'GPGHMACSHA1--862afd4e67e3946587a9ef7fa5beb4e8f1aeb6b8'
+ Recommend you wait up to 4 hours, and then run this command again.
+ failed
+
+Like it says, you'll need to run the command again later. Let's remember to
+do that:
+
+ # at now + 4 hours
+ at> git annex get my_cool_big_file
+
+Another oddity of Glacier is that git-annex is never entirely sure
+if a file is still in Glacier. Glacier inventories take hours to retrieve,
+and even when retrieved do not necessarily represent the current state.
+
+So, git-annex plays it safe, and avoids trusting the inventory:
+
+ # git annex copy important_file --to glacier
+ copy important_file (gpg) (checking glacier...) (to glacier...) ok
+ # git annex drop important_file
+ drop important_file (gpg) (checking glacier...)
+ Glacier's inventory says it has a copy.
+ However, the inventory could be out of date, if it was recently removed.
+ (Use --trust-glacier if you're sure it's still in Glacier.)
+
+ (unsafe)
+ Could only verify the existence of 0 out of 1 necessary copies
+
+Like it says, you can use `--trust-glacier` if you're sure
+Glacier's inventory is correct and up-to-date.
+
+A final potential gotcha with Glacier is that glacier-cli keeps a local
+mapping of file names to Glacier archives. If this cache is lost, or
+you want to retrieve files on a different box than the one that put them in
+glacier, you'll need to use `glacier vault sync` to rebuild this cache.
+
+See [[special_remotes/Glacier]] for details.
diff --git a/doc/tips/using_Amazon_S3.mdwn b/doc/tips/using_Amazon_S3.mdwn
index 128819f..19997d0 100644
--- a/doc/tips/using_Amazon_S3.mdwn
+++ b/doc/tips/using_Amazon_S3.mdwn
@@ -2,7 +2,7 @@ git-annex extends git's usual remotes with some [[special_remotes]], that
are not git repositories. This way you can set up a remote using say,
Amazon S3, and use git-annex to transfer files into the cloud.
-First, export your S3 credentials:
+First, export your Amazon AWS credentials:
# export AWS_ACCESS_KEY_ID="08TJMT99S3511WOZEP91"
# export AWS_SECRET_ACCESS_KEY="s3kr1t"
diff --git a/doc/tips/using_box.com_as_a_special_remote.mdwn b/doc/tips/using_box.com_as_a_special_remote.mdwn
index cafbc03..6616d0a 100644
--- a/doc/tips/using_box.com_as_a_special_remote.mdwn
+++ b/doc/tips/using_box.com_as_a_special_remote.mdwn
@@ -2,8 +2,19 @@
for providing 50 gb of free storage if you sign up with its Android client.
(Or a few gb free otherwise.)
-With a little setup, git-annex can use Box as a
-[[special remote|special_remotes]].
+git-annex can use Box as a [[special remote|special_remotes]].
+Recent versions of git-annex make this very easy to set up:
+
+ WEBDAV_USERNAME=you@example.com WEBDAV_PASSWORD=xxxxxxx git annex initremote box.com type=webdav url=https://www.box.com/dav/git-annex chunksize=75mb encryption=you@example.com
+
+Note the use of chunksize; Box has a 100 mb maximum file size, and this
+breaks up large files into chunks before that limit is reached.
+
+# old davfs2 method
+
+This method is deprecated, but still documented here just in case.
+Note that the files stored using this method cannot reliably be retreived
+using the webdav special remote.
## davfs2 setup
diff --git a/doc/todo/resuming_encrypted_uploads.mdwn b/doc/todo/resuming_encrypted_uploads.mdwn
new file mode 100644
index 0000000..b3aaa7f
--- /dev/null
+++ b/doc/todo/resuming_encrypted_uploads.mdwn
@@ -0,0 +1,22 @@
+Resuming interrupted uploads to encrypted special remotes is not currently
+possible, because gpg does not produce consistent output. Special remotes
+that could support resuming include rsync and glacier.
+
+Without consistent output, git-annex would need to locally cache the encrypted
+file, and reuse that cache when resuming an upload. This would make
+encrypted uploads more expensive in terms of both file IO and disk space
+used.
+
+[It would be possible to write to the cache at the same time the special
+remote is being fed data, and if the special remote upload fails, continue
+writing the rest of the file. That would avoid half the overhead, since
+the file would not need to be read from, just written to. (Although OS
+caching may accomplish the same thing.)]
+
+Also, `git annex unused` would need to show temp files for uploads,
+the same as it currently shows temp files for downloads, and users would
+sometimes need to manually dropunused old uploads, that never completed.
+
+The question, then, is whether resuming uploads is useful enough to add
+this overhead and user-visible complexity.
+--[[Joey]]
diff --git a/doc/todo/special_remote_for_amazon_glacier.mdwn b/doc/todo/special_remote_for_amazon_glacier.mdwn
index a6e524c..0fa77b5 100644
--- a/doc/todo/special_remote_for_amazon_glacier.mdwn
+++ b/doc/todo/special_remote_for_amazon_glacier.mdwn
@@ -18,8 +18,13 @@ run, or files to transfer, at that point.
--[[Joey]]
+> [[done]]! --[[Joey]]
+
-----
> In the coming months, Amazon S3 will introduce an option that will allow customers to seamlessly move data between Amazon S3 and Amazon Glacier based on data lifecycle policies.
-- <http://aws.amazon.com/glacier/faqs/#How_should_I_choose_between_Amazon_Glacier_and_Amazon_S3>
+
+>> They did, but it's IMHO not very useful for git-annex. It's rather
+>> intended to allow aging S3 storage out to Glacier. --[[Joey]]
diff --git a/git-annex.1 b/git-annex.1
index 448c706..3b73984 100644
--- a/git-annex.1
+++ b/git-annex.1
@@ -505,6 +505,16 @@ Overrides trust settings for a repository. May be specified more than once.
The repository should be specified using the name of a configured remote,
or the UUID or description of a repository.
.IP
+.IP "\-\-trust\-glacier\-inventory"
+Amazon Glacier inventories take hours to retrieve, and may not represent
+the current state of a repository. So git\-annex does not trust that
+files that the inventory claims are in Glacier are really there.
+This switch can be used to allow it to trust the inventory.
+.IP
+Be careful using this, especially if you or someone else might have recently
+removed a file from Glacier. If you try to drop the only other copy of the
+file, and this switch is enabled, you could lose data!
+.IP
.IP "\-\-backend=name"
Specifies which key\-value backend to use. This can be used when
adding a file to the annex, or migrating a file. Once files
@@ -754,23 +764,31 @@ header. This overrides annex.http\-headers.
.IP
.IP "remote.<name>.rsyncurl"
Used by rsync special remotes, this configures
-the location of the rsync repository to use. Normally this is automaticaly
+the location of the rsync repository to use. Normally this is automatically
set up by git annex initremote, but you can change it if needed.
.IP
.IP "remote.<name>.buprepo"
Used by bup special remotes, this configures
-the location of the bup repository to use. Normally this is automaticaly
+the location of the bup repository to use. Normally this is automatically
set up by git annex initremote, but you can change it if needed.
.IP
.IP "remote.<name>.directory"
Used by directory special remotes, this configures
the location of the directory where annexed files are stored for this
-remote. Normally this is automaticaly set up by git annex initremote,
+remote. Normally this is automatically set up by git annex initremote,
but you can change it if needed.
.IP
.IP "remote.<name>.s3"
Used to identify Amazon S3 special remotes.
-Normally this is automaticaly set up by git annex initremote.
+Normally this is automatically set up by git annex initremote.
+.IP
+.IP "remote.<name>.glacier"
+Used to identify Amazon Glacier special remotes.
+Normally this is automatically set up by git annex initremote.
+.IP
+.IP "remote.<name>.webdav"
+Used to identify webdav special remotes.
+Normally this is automatically set up by git annex initremote.
.IP
.IP "remote.<name>.annex\-xmppaddress"
Used to identify the XMPP address of a Jabber buddy.
diff --git a/git-annex.cabal b/git-annex.cabal
index 511d2f2..5e3db34 100644
--- a/git-annex.cabal
+++ b/git-annex.cabal
@@ -1,5 +1,5 @@
Name: git-annex
-Version: 3.20121112
+Version: 3.20121126
Cabal-Version: >= 1.8
License: GPL
Maintainer: Joey Hess <joey@kitenet.net>
@@ -28,6 +28,9 @@ Description:
Flag S3
Description: Enable S3 support
+Flag WebDAV
+ Description: Enable WebDAV support
+
Flag Inotify
Description: Enable inotify support
@@ -52,12 +55,12 @@ Flag DNS
Executable git-annex
Main-Is: git-annex.hs
Build-Depends: MissingH, hslogger, directory, filepath,
- unix, containers, utf8-string, network (>= 2.0), mtl,
+ unix, containers, utf8-string, network (>= 2.0), mtl (>= 2.1.1),
bytestring, old-locale, time,
pcre-light, extensible-exceptions, dataenc, SHA, process, json, HTTP,
base (>= 4.5 && < 4.7), monad-control, transformers-base, lifted-base,
IfElse, text, QuickCheck >= 2.1, bloomfilter, edit-distance, process,
- SafeSemaphore, async
+ SafeSemaphore
-- Need to list these because they're generated from .hsc files.
Other-Modules: Utility.Touch Utility.Mounts
Include-Dirs: Utility
@@ -69,6 +72,10 @@ Executable git-annex
Build-Depends: hS3
CPP-Options: -DWITH_S3
+ if flag(WebDAV)
+ Build-Depends: DAV (>= 0.2), http-conduit, xml-conduit
+ CPP-Options: -DWITH_WEBDAV
+
if flag(Assistant) && ! os(windows) && ! os(solaris)
Build-Depends: stm >= 2.3
CPP-Options: -DWITH_ASSISTANT
@@ -88,7 +95,7 @@ Executable git-annex
if flag(Webapp) && flag(Assistant)
Build-Depends: yesod, yesod-static, case-insensitive,
http-types, transformers, wai, wai-logger, warp, blaze-builder,
- crypto-api, hamlet, clientsession,
+ crypto-api, hamlet, clientsession, aeson,
template-haskell, yesod-default (>= 1.1.0), data-default
CPP-Options: -DWITH_WEBAPP
@@ -108,10 +115,11 @@ Test-Suite test
Type: exitcode-stdio-1.0
Main-Is: test.hs
Build-Depends: testpack, HUnit, MissingH, hslogger, directory, filepath,
- unix, containers, utf8-string, network, mtl, bytestring, old-locale, time,
- pcre-light, extensible-exceptions, dataenc, SHA, process, json, HTTP,
- base (>= 4.5 && < 4.7), monad-control, transformers-base, lifted-base,
- IfElse, text, QuickCheck >= 2.1, bloomfilter, edit-distance, process
+ unix, containers, utf8-string, network, mtl (>= 2.1.1), bytestring,
+ old-locale, time, pcre-light, extensible-exceptions, dataenc, SHA,
+ process, json, HTTP, base (>= 4.5 && < 4.7), monad-control,
+ transformers-base, lifted-base, IfElse, text, QuickCheck >= 2.1,
+ bloomfilter, edit-distance, process
Other-Modules: Utility.Touch
Include-Dirs: Utility
C-Sources: Utility/libdiskfree.c
diff --git a/templates/bootstrap.hamlet b/templates/bootstrap.hamlet
index f743a0d..36c0cab 100644
--- a/templates/bootstrap.hamlet
+++ b/templates/bootstrap.hamlet
@@ -3,12 +3,12 @@ $doctype 5
<head>
<title>
$maybe reldir <- relDir webapp
- #{reldir} #{pageTitle page}
+ #{reldir} #{pageTitle pageinfo}
$nothing
- #{pageTitle page}
+ #{pageTitle pageinfo}
<link rel="icon" href=@{StaticR favicon_ico} type="image/x-icon">
<meta name="viewport" content="width=device-width,initial-scale=1.0">
- ^{pageHead page}
+ ^{pageHead pageinfo}
<body>
- ^{pageBody page}
+ ^{pageBody pageinfo}
<div #modal></div>
diff --git a/templates/configurators/addbox.com.hamlet b/templates/configurators/addbox.com.hamlet
new file mode 100644
index 0000000..1098221
--- /dev/null
+++ b/templates/configurators/addbox.com.hamlet
@@ -0,0 +1,26 @@
+<div .span9 .hero-unit>
+ <h2>
+ Adding a Box.com repository
+ <p>
+ <a href="http://box.com">Box.com</a> offers a small quantity of storage #
+ for free, and larger quantities for a fee.
+ <p>
+ Even a small amount of free storage is useful, as a transfer point #
+ between your repositories.
+ <p>
+ All data will be encrypted before being sent to Box.com.
+ <p>
+ <form .form-horizontal enctype=#{enctype}>
+ <fieldset>
+ ^{form}
+ ^{webAppFormAuthToken}
+ <div .form-actions>
+ <button .btn .btn-primary type=submit onclick="$('#workingmodal').modal('show');">
+ Add repository
+<div .modal .fade #workingmodal>
+ <div .modal-header>
+ <h3>
+ Making repository ...
+ <div .modal-body>
+ <p>
+ Setting up your Box.com repository. This could take a minute.
diff --git a/templates/configurators/adddrive.hamlet b/templates/configurators/adddrive.hamlet
index 2f043d1..489328f 100644
--- a/templates/configurators/adddrive.hamlet
+++ b/templates/configurators/adddrive.hamlet
@@ -25,7 +25,7 @@
<form enctype=#{enctype}>
<fieldset>
^{form}
- ^{authtoken}
+ ^{webAppFormAuthToken}
<button .btn .btn-primary type=submit onclick="$('#clonemodal').modal('show');">Use this drive</button> #
<a .btn href="@{AddDriveR}">
Rescan for removable drives
diff --git a/templates/configurators/addglacier.hamlet b/templates/configurators/addglacier.hamlet
new file mode 100644
index 0000000..ad15b2b
--- /dev/null
+++ b/templates/configurators/addglacier.hamlet
@@ -0,0 +1,40 @@
+<div .span9 .hero-unit>
+ <h2>
+ Adding an Amazon Glacier repository
+ <p>
+ <a href="http://aws.amazon.com/glacier/">Amazon Glacier</a> is an #
+ offline cloud storage provider. It takes several hours for requested #
+ files to be retrieved from Glacier, making it mostly suitable for #
+ backups and long-term data archival. #
+ <a href="http://aws.amazon.com/glacier/pricing/">
+ Pricing details
+ <p>
+ <i .icon-warning-sign></i> By default, only files you place in #
+ "archive" directories will be archived in Amazon Glacier. #
+ You will be charged by Amazon for data #
+ uploaded to Glacier, as well as data downloaded from Glacier, and a #
+ monthly fee for data storage.
+ <p>
+ All data will be encrypted before being sent to Amazon Glacier.
+ <p>
+ When you sign up to Amazon Glacier, they provide you with an Access #
+ Key ID, and a Secret Access Key. You will need to enter both below. #
+ These access keys will be stored in a file that only you can #
+ access. #
+ <a href="https://portal.aws.amazon.com/gp/aws/securityCredentials#id_block">
+ Look up your access keys
+ <p>
+ <form .form-horizontal enctype=#{enctype}>
+ <fieldset>
+ ^{form}
+ ^{webAppFormAuthToken}
+ <div .form-actions>
+ <button .btn .btn-primary type=submit onclick="$('#workingmodal').modal('show');">
+ Add Glacier repository
+<div .modal .fade #workingmodal>
+ <div .modal-header>
+ <h3>
+ Making repository ...
+ <div .modal-body>
+ <p>
+ Setting up your Amazon Glacier repository. This could take a minute.
diff --git a/templates/configurators/addrsync.net.hamlet b/templates/configurators/addrsync.net.hamlet
index 6ea55ae..d6e4348 100644
--- a/templates/configurators/addrsync.net.hamlet
+++ b/templates/configurators/addrsync.net.hamlet
@@ -28,7 +28,7 @@
<form .form-horizontal enctype=#{enctype}>
<fieldset>
^{form}
- ^{authtoken}
+ ^{webAppFormAuthToken}
<div .form-actions>
<button .btn .btn-primary type=submit onclick="$('#testmodal').modal('show');">
Use this rsync.net repository
diff --git a/templates/configurators/adds3.hamlet b/templates/configurators/adds3.hamlet
index f70ce78..f4e6751 100644
--- a/templates/configurators/adds3.hamlet
+++ b/templates/configurators/adds3.hamlet
@@ -25,7 +25,7 @@
<form .form-horizontal enctype=#{enctype}>
<fieldset>
^{form}
- ^{authtoken}
+ ^{webAppFormAuthToken}
<div .form-actions>
<button .btn .btn-primary type=submit onclick="$('#workingmodal').modal('show');">
Add S3 repository
diff --git a/templates/configurators/editrepository.hamlet b/templates/configurators/editrepository.hamlet
index f4610fd..354bdab 100644
--- a/templates/configurators/editrepository.hamlet
+++ b/templates/configurators/editrepository.hamlet
@@ -16,7 +16,7 @@
<form .form-horizontal enctype=#{enctype}>
<fieldset>
^{form}
- ^{authtoken}
+ ^{webAppFormAuthToken}
<div .form-actions>
<button .btn .btn-primary type=submit>
Save Changes
diff --git a/templates/configurators/enables3.hamlet b/templates/configurators/enableaws.hamlet
index a0d86d7..d78a4f8 100644
--- a/templates/configurators/enables3.hamlet
+++ b/templates/configurators/enableaws.hamlet
@@ -2,14 +2,14 @@
<h2>
Enabling #{description}
<p>
- To use this Amazon S3 repository, you need an Access Key ID, and a #
+ To use this Amazon repository, you need an Access Key ID, and a #
Secret Access Key. These access keys will be stored in a file that #
only you can access.
<p>
- If this repository uses your Amazon S3 account, you can #
+ If this repository uses your Amazon account, you can #
<a href="https://portal.aws.amazon.com/gp/aws/securityCredentials#id_block">
look up your access keys. #
- If this repository uses someone else's Amazon S3 account, they #
+ If this repository uses someone else's Amazon account, they #
can generate access keys for you, using their #
<a href="https://console.aws.amazon.com/iam/home">
IAM Management Console.
@@ -17,14 +17,14 @@
<form .form-horizontal enctype=#{enctype}>
<fieldset>
^{form}
- ^{authtoken}
+ ^{webAppFormAuthToken}
<div .form-actions>
<button .btn .btn-primary type=submit onclick="$('#workingmodal').modal('show');">
- Enable S3 repository
+ Enable Amazon repository
<div .modal .fade #workingmodal>
<div .modal-header>
<h3>
Enabling repository ...
<div .modal-body>
<p>
- Enabling this Amazon S3 repository. This could take a minute.
+ Enabling this Amazon repository. This could take a minute.
diff --git a/templates/configurators/enablewebdav.hamlet b/templates/configurators/enablewebdav.hamlet
new file mode 100644
index 0000000..e20f3d9
--- /dev/null
+++ b/templates/configurators/enablewebdav.hamlet
@@ -0,0 +1,22 @@
+<div .span9 .hero-unit>
+ <h2>
+ Enabling #{description}
+ <p>
+ This is a WebDAV repository located at #
+ <a href="#{url}">
+ #{url}
+ <p>
+ <form .form-horizontal enctype=#{enctype}>
+ <fieldset>
+ ^{form}
+ ^{webAppFormAuthToken}
+ <div .form-actions>
+ <button .btn .btn-primary type=submit onclick="$('#workingmodal').modal('show');">
+ Enable repository
+<div .modal .fade #workingmodal>
+ <div .modal-header>
+ <h3>
+ Enabling repository ...
+ <div .modal-body>
+ <p>
+ Enabling this WebDAV repository. This could take a minute.
diff --git a/templates/configurators/needglaciercli.hamlet b/templates/configurators/needglaciercli.hamlet
new file mode 100644
index 0000000..165e73b
--- /dev/null
+++ b/templates/configurators/needglaciercli.hamlet
@@ -0,0 +1,10 @@
+<div .span9 .hero-unit>
+ <h2>
+ Need glacier-cli
+ <p>
+ To use Amazon Glacier, you need to install #
+ <a href="https://github.com/basak/glacier-cli">
+ glacier-cli
+ <p>
+ <a .btn .btn-primary .btn-large href="">
+ Retry
diff --git a/templates/configurators/pairing/local/prompt.hamlet b/templates/configurators/pairing/local/prompt.hamlet
index 24de62b..c66d7b2 100644
--- a/templates/configurators/pairing/local/prompt.hamlet
+++ b/templates/configurators/pairing/local/prompt.hamlet
@@ -29,7 +29,7 @@
<form .form-horizontal enctype=#{enctype}>
<fieldset>
^{form}
- ^{authtoken}
+ ^{webAppFormAuthToken}
<div .form-actions>
<button .btn .btn-primary type=submit>
$if start
diff --git a/templates/configurators/pairing/xmpp/end.hamlet b/templates/configurators/pairing/xmpp/end.hamlet
index ee558e0..077430b 100644
--- a/templates/configurators/pairing/xmpp/end.hamlet
+++ b/templates/configurators/pairing/xmpp/end.hamlet
@@ -9,38 +9,23 @@
$nothing
A pair request has been sent to all other devices using your jabber #
account.
- $else
- Pair request accepted.
- <h2>
- Configure a shared cloud repository
+ $else
+ Pair request accepted.
+ <h2>
+ Configure a shared cloud repository
$maybe name <- friend
<p>
&#9730; To share files with #{name}, you'll need a repository in #
the cloud, that you both can access.
- $if null cloudrepolist
- <hr>
- ^{makeCloudRepositories}
- $else
- <p>
- Make sure that #{name} has access to one of these cloud repositories, #
- and that the repository is enabled.
- ^{repoTable cloudrepolist}
- <hr>
- Or, add a new cloud repository:
- ^{makeCloudRepositories}
$nothing
<p>
&#9730; To share files with your other devices, when they're not #
nearby, you'll need a repository in the cloud.
- $if null cloudrepolist
- <hr>
- ^{makeCloudRepositories}
- $else
- <p>
- Make sure that your other devices are configured to access one of #
- these cloud repositories, and that the repository is enabled here #
- too.
- ^{repoTable cloudrepolist}
- <hr>
- Or, add a new cloud repository:
- ^{makeCloudRepositories}
+ <p>
+ Make sure that your other devices are configured to access a #
+ cloud repository, and that the same repository is enabled here #
+ too.
+ ^{cloudrepolist}
+ <h2>
+ Add a cloud repository
+ ^{makeCloudRepositories}
diff --git a/templates/configurators/repositories.hamlet b/templates/configurators/repositories.hamlet
index c236753..d226799 100644
--- a/templates/configurators/repositories.hamlet
+++ b/templates/configurators/repositories.hamlet
@@ -1,7 +1,5 @@
<div .span9>
- <h2>
- Your repositories
- ^{repoTable repolist}
+ ^{repolist}
<div .row-fluid>
<div .span6>
<h2>
diff --git a/templates/configurators/repositories/cloud.hamlet b/templates/configurators/repositories/cloud.hamlet
index 5e78a89..4173498 100644
--- a/templates/configurators/repositories/cloud.hamlet
+++ b/templates/configurators/repositories/cloud.hamlet
@@ -1,4 +1,10 @@
<h3>
+ <a href="@{AddBoxComR}">
+ <i .icon-plus-sign></i> Box.com
+<p>
+ Provides <b>free</b> cloud storage for small amounts of data.
+
+<h3>
<a href="@{AddRsyncNetR}">
<i .icon-plus-sign></i> Rsync.net
<p>
@@ -8,19 +14,15 @@
<a href="@{AddS3R}">
<i .icon-plus-sign></i> Amazon S3
<p>
- Good choice for professional storage quality and low prices.
+ Good choice for professional quality storage.
<h3>
- <i .icon-plus-sign></i> Amazon Glacier
+ <a href="@{AddGlacierR}">
+ <i .icon-plus-sign></i> Amazon Glacier
<p>
Low cost offline data archival.
<h3>
- <i .icon-plus-sign></i> Box.com
-<p>
- Provides free cloud storage for small amounts of data.
-
-<h3>
<a href="@{AddSshR}">
<i .icon-plus-sign></i> Remote server
<p>
diff --git a/templates/configurators/repositories/list.hamlet b/templates/configurators/repositories/list.hamlet
new file mode 100644
index 0000000..5829e23
--- /dev/null
+++ b/templates/configurators/repositories/list.hamlet
@@ -0,0 +1,36 @@
+<div ##{ident}>
+ $if onlyCloud reposelector
+ $if not (null repolist)
+ <h2>
+ Cloud repositories
+ $else
+ No cloud repositories are configured yet.
+ $else
+ $if not (null repolist)
+ <h2>
+ Repositories
+ <table .table .table-condensed>
+ <tbody>
+ $forall (num, name, actions) <- repolist
+ <tr>
+ <td>
+ #{num}
+ <td>
+ #{name}
+ <td>
+ $if needsEnabled actions
+ <a href="@{setupRepoLink actions}">
+ <i .icon-warning-sign></i> not enabled
+ $else
+ <a href="@{syncToggleLink actions}">
+ $if notSyncing actions
+ <i .icon-pause></i> syncing paused
+ $else
+ <i .icon-refresh></i> syncing enabled
+ <td>
+ $if needsEnabled actions
+ <a href="@{setupRepoLink actions}">
+ enable
+ $else
+ <a href="@{setupRepoLink actions}">
+ configure
diff --git a/templates/configurators/repositories/table.hamlet b/templates/configurators/repositories/table.hamlet
deleted file mode 100644
index 81442e4..0000000
--- a/templates/configurators/repositories/table.hamlet
+++ /dev/null
@@ -1,25 +0,0 @@
-<table .table .table-condensed>
- <tbody>
- $forall (num, name, actions) <- repolist
- <tr>
- <td>
- #{num}
- <td>
- #{name}
- <td>
- $if needsEnabled actions
- <a href="@{setupRepoLink actions}">
- <i .icon-warning-sign></i> not enabled
- $else
- <a href="@{syncToggleLink actions}">
- $if notSyncing actions
- <i .icon-pause></i> syncing paused
- $else
- <i .icon-refresh></i> syncing enabled
- <td>
- $if needsEnabled actions
- <a href="@{setupRepoLink actions}">
- enable
- $else
- <a href="@{setupRepoLink actions}">
- configure
diff --git a/templates/configurators/ssh/add.hamlet b/templates/configurators/ssh/add.hamlet
index bb4e5e0..70eb6b5 100644
--- a/templates/configurators/ssh/add.hamlet
+++ b/templates/configurators/ssh/add.hamlet
@@ -19,7 +19,7 @@
<form .form-horizontal enctype=#{enctype}>
<fieldset>
^{form}
- ^{authtoken}
+ ^{webAppFormAuthToken}
<div .form-actions>
<button .btn .btn-primary type=submit onclick="$('#testmodal').modal('show');">
Check this server
diff --git a/templates/configurators/ssh/enable.hamlet b/templates/configurators/ssh/enable.hamlet
index 1e35e48..f894718 100644
--- a/templates/configurators/ssh/enable.hamlet
+++ b/templates/configurators/ssh/enable.hamlet
@@ -18,7 +18,7 @@
<i .icon-warning-sign></i> #{msg}
$of _
^{form}
- ^{authtoken}
+ ^{webAppFormAuthToken}
<div .modal .fade #testmodal>
<div .modal-header>
<h3>
diff --git a/templates/configurators/xmpp.hamlet b/templates/configurators/xmpp.hamlet
index f8388bb..5c0ed75 100644
--- a/templates/configurators/xmpp.hamlet
+++ b/templates/configurators/xmpp.hamlet
@@ -9,10 +9,10 @@
It's fine to reuse an existing jabber account; git-annex won't #
post any messages to it.
<p>
- $if problem
+ $maybe msg <- problem
<i .icon-warning-sign></i> Unable to connect to the Jabber server. #
- Maybe you entered the wrong password?
- $else
+ Maybe you entered the wrong password? (Error message: #{msg})
+ $nothing
<i .icon-user></I> If you have a Gmail account, you can use #
Google Talk. Just enter your full Gmail address #
<small>(<tt>you@gmail.com</tt>)</small> #
@@ -21,7 +21,7 @@
<form .form-horizontal enctype=#{enctype}>
<fieldset>
^{form}
- ^{authtoken}
+ ^{webAppFormAuthToken}
<div .form-actions>
<button .btn .btn-primary type=submit onclick="$('#workingmodal').modal('show');">
Use this account