Find git executable at run time
[git-darcs-import.git] / src / URL.hs
blobd0376d079bf95a50db385329fe2bf014df02eff6
1 {-# OPTIONS_GHC -cpp -fffi #-}
2 {-# LANGUAGE CPP, ForeignFunctionInterface #-}
4 module URL ( copyUrl, copyUrlFirst, pipeliningEnabledByDefault,
5 setDebugHTTP, setHTTPPipelining, waitUrl,
6 Cachable(Cachable, Uncachable, MaxAge)
7 ) where
9 import Data.IORef ( newIORef, readIORef, writeIORef, IORef )
10 import Data.Map ( Map )
11 import Data.List ( delete )
12 import qualified Data.Map as Map
13 import System.Directory ( copyFile )
14 import System.IO.Unsafe ( unsafePerformIO )
15 import Control.Concurrent ( forkIO )
16 import Control.Concurrent.Chan ( isEmptyChan, newChan, readChan, writeChan, Chan )
17 import Control.Concurrent.MVar ( isEmptyMVar, modifyMVar_, newEmptyMVar, newMVar, putMVar, readMVar, withMVar, MVar )
18 import Control.Monad ( unless, when )
19 import Control.Monad.Trans ( liftIO )
20 import Control.Monad.State ( evalStateT, get, modify, put, StateT )
21 import Foreign.C.Types ( CInt )
23 import Workaround ( renameFile )
24 import Darcs.Global ( atexit )
25 import Darcs.Progress ( debugFail, debugMessage )
26 import Darcs.Lock ( removeFileMayNotExist )
28 #if defined(HAVE_CURL) || defined(HAVE_LIBWWW)
29 import Foreign.C.String ( withCString, peekCString, CString )
30 #elif defined(HAVE_HTTP)
31 import qualified HTTP ( request_url, wait_next_url )
32 #endif
33 #include "impossible.h"
35 data UrlRequest = UrlRequest { url :: String
36 , file :: FilePath
37 , cachable :: Cachable
38 , priority :: Priority
39 , notifyVar :: MVar String }
41 data Cachable = Cachable | Uncachable | MaxAge !CInt
42 deriving (Show, Eq)
44 data UrlState = UrlState { inProgress :: Map String ( FilePath
45 , [FilePath]
46 , Cachable
47 , (MVar String) )
48 , waitToStart :: Q String
49 , pipeLength :: Int }
51 data Q a = Q [a] [a]
53 readQ :: Q a -> Maybe (a, Q a)
54 readQ (Q (x:xs) ys) = Just (x, Q xs ys)
55 readQ (Q [] ys) = do x:xs <- Just $ reverse ys
56 Just (x, Q xs [])
58 insertQ :: a -> Q a -> Q a
59 insertQ y (Q xs ys) = Q xs (y:ys)
61 pushQ :: a -> Q a -> Q a
62 pushQ x (Q xs ys) = Q (x:xs) ys
64 deleteQ :: Eq a => a -> Q a -> Q a
65 deleteQ x (Q xs ys) = Q (delete x xs) (delete x ys)
67 elemQ :: Eq a => a -> Q a -> Bool
68 elemQ x (Q xs ys) = x `elem` xs || x `elem` ys
70 emptyQ :: Q a
71 emptyQ = Q [] []
73 nullQ :: Q a -> Bool
74 nullQ (Q [] []) = True
75 nullQ _ = False
77 data Priority = High | Low deriving Eq
79 #if defined(CURL_PIPELINING) || defined(HAVE_LIBWWW) || defined(CURL_PIPELINING_DEFAULT)
80 pipeliningLimit :: Int
81 pipeliningLimit = 100
82 #endif
84 pipeliningEnabledByDefault :: Bool
85 #if defined(HAVE_LIBWWW) || defined(CURL_PIPELINING_DEFAULT)
86 pipeliningEnabledByDefault = True
87 #else
88 pipeliningEnabledByDefault = False
89 #endif
91 {-# NOINLINE maxPipeLength #-}
92 maxPipeLength :: IORef Int
93 maxPipeLength = unsafePerformIO $ newIORef $
94 #if defined(HAVE_LIBWWW) || defined(CURL_PIPELINING_DEFAULT)
95 pipeliningLimit
96 #else
98 #endif
100 {-# NOINLINE urlNotifications #-}
101 urlNotifications :: MVar (Map String (MVar String))
102 urlNotifications = unsafePerformIO $ newMVar Map.empty
104 {-# NOINLINE urlChan #-}
105 urlChan :: Chan UrlRequest
106 urlChan = unsafePerformIO $ do
107 ch <- newChan
108 forkIO (urlThread ch)
109 return ch
111 urlThread :: Chan UrlRequest -> IO ()
112 urlThread ch = evalStateT urlThread' (UrlState Map.empty emptyQ 0)
113 where urlThread' = do
114 empty <- liftIO $ isEmptyChan ch
115 st <- get
116 let l = pipeLength st
117 w = waitToStart st
118 reqs <- if not empty || (nullQ w && l == 0)
119 then liftIO readAllRequests
120 else return []
121 mapM_ addReq reqs
122 checkWaitToStart
123 waitNextUrl
124 urlThread'
125 readAllRequests = do
126 r <- readChan ch
127 debugMessage $ "URL.urlThread ("++url r++"\n"++
128 " -> "++file r++")"
129 empty <- isEmptyChan ch
130 reqs <- if not empty
131 then readAllRequests
132 else return []
133 return (r:reqs)
134 addReq r = do
135 let u = url r
136 f = file r
137 c = cachable r
138 d <- liftIO $ alreadyDownloaded u
139 if d
140 then dbg "Ignoring UrlRequest of URL that is already downloaded."
141 else do
142 st <- get
143 let p = inProgress st
144 w = waitToStart st
145 e = (f, [], c, notifyVar r)
146 new_w = case priority r of
147 High -> pushQ u w
148 Low -> insertQ u w
149 new_st = st { inProgress = Map.insert u e p
150 , waitToStart = new_w }
151 case Map.lookup u p of
152 Just (f', fs', c', v) -> do
153 let new_c = minCachable c c'
154 when (c /= c') $ let new_p = Map.insert u (f', fs', new_c, v) p
155 in do modify (\s -> s { inProgress = new_p })
156 dbg $ "Changing "++u++" request cachability from "++show c++" to "++show new_c
157 when (u `elemQ` w && priority r == High) $ do
158 modify (\s -> s { waitToStart = pushQ u (deleteQ u w) })
159 dbg $ "Moving "++u++" to head of download queue."
160 if f `notElem` (f':fs')
161 then let new_p = Map.insert u (f', f:fs', new_c, v) p
162 in do modify (\s -> s { inProgress = new_p })
163 dbg "Adding new file to existing UrlRequest."
164 else dbg "Ignoring UrlRequest of file that's already queued."
165 _ -> put new_st
166 alreadyDownloaded u = do
167 n <- liftIO $ withMVar urlNotifications (return . (Map.lookup u))
168 case n of
169 Just v -> not `fmap` isEmptyMVar v
170 Nothing -> return True
172 checkWaitToStart :: StateT UrlState IO ()
173 checkWaitToStart = do
174 st <- get
175 let l = pipeLength st
176 mpl <- liftIO $ readIORef maxPipeLength
177 when (l < mpl) $ do
178 let w = waitToStart st
179 case readQ w of
180 Just (u,rest) -> do
181 case Map.lookup u (inProgress st) of
182 Just (f, _, c, _) -> do
183 put $ st { waitToStart = rest
184 , pipeLength = l + 1 }
185 dbg ("URL.request_url ("++u++"\n"++
186 " -> "++f++")")
187 let f_new = f++"-new"
188 liftIO $ do err <- request_url u f_new c
189 if null err
190 then do atexit $ removeFileMayNotExist f_new
191 debugMessage "URL.request_url succeeded"
192 else do removeFileMayNotExist f_new
193 debugMessage $ "Failed to start download URL "++u++": "++err
194 _ -> bug $ "Possible bug in URL.checkWaitToStart "++u
195 checkWaitToStart
196 _ -> return ()
198 copyUrlFirst :: String -> FilePath -> Cachable -> IO ()
199 copyUrlFirst = copyUrlWithPriority High
201 copyUrl :: String -> FilePath -> Cachable -> IO ()
202 copyUrl = copyUrlWithPriority Low
204 copyUrlWithPriority :: Priority -> String -> String -> Cachable -> IO ()
205 copyUrlWithPriority p u f c = do
206 debugMessage ("URL.copyUrlWithPriority ("++u++"\n"++
207 " -> "++f++")")
208 v <- newEmptyMVar
209 let fn _ old_val = old_val
210 modifyMVar_ urlNotifications (return . (Map.insertWith fn u v))
211 let r = UrlRequest u f c p v
212 writeChan urlChan r
214 waitNextUrl :: StateT UrlState IO ()
215 waitNextUrl = do
216 st <- get
217 let l = pipeLength st
218 when (l > 0) $ do
219 dbg "URL.waitNextUrl start"
220 (u, e) <- liftIO $ wait_next_url
221 let p = inProgress st
222 new_st = st { inProgress = Map.delete u p
223 , pipeLength = l - 1 }
224 liftIO $ if null e
225 then case Map.lookup u p of
226 Just (f, fs, _, v) -> do
227 renameFile (f++"-new") f
228 mapM_ (safeCopyFile f) fs
229 putMVar v e
230 debugMessage $ "URL.waitNextUrl succeeded: "++u++" "++f
231 Nothing -> bug $ "Possible bug in URL.waitNextUrl: "++u
232 else case Map.lookup u p of
233 Just (f, _, _, v) -> do
234 removeFileMayNotExist (f++"-new")
235 putMVar v e
236 debugMessage $ "URL.waitNextUrl failed: "++
237 u++" "++f++" "++e
238 Nothing -> bug $ "Another possible bug in URL.waitNextUrl: "++u++" "++e
239 unless (null u) $ put new_st
240 where safeCopyFile f t = let new_t = t++"-new"
241 in do copyFile f new_t
242 renameFile new_t t
244 waitUrl :: String -> IO ()
245 waitUrl u = do debugMessage $ "URL.waitUrl "++u
246 r <- withMVar urlNotifications (return . (Map.lookup u))
247 case r of
248 Just var -> do
249 e <- readMVar var
250 modifyMVar_ urlNotifications (return . (Map.delete u))
251 unless (null e) (debugFail $ "Failed to download URL "++u++": "++e)
252 Nothing -> return () -- file was already downloaded
254 dbg :: String -> StateT a IO ()
255 dbg = liftIO . debugMessage
257 minCachable :: Cachable -> Cachable -> Cachable
258 minCachable Uncachable _ = Uncachable
259 minCachable _ Uncachable = Uncachable
260 minCachable (MaxAge a) (MaxAge b) = MaxAge $ min a b
261 minCachable (MaxAge a) _ = MaxAge a
262 minCachable _ (MaxAge b) = MaxAge b
263 minCachable _ _ = Cachable
265 #if defined(HAVE_CURL) || defined(HAVE_LIBWWW)
266 cachableToInt :: Cachable -> CInt
267 cachableToInt Cachable = -1
268 cachableToInt Uncachable = 0
269 cachableToInt (MaxAge n) = n
270 #endif
272 setHTTPPipelining :: Bool -> IO ()
273 setHTTPPipelining False = writeIORef maxPipeLength 1
274 setHTTPPipelining True = writeIORef maxPipeLength
275 #if defined(HAVE_LIBWWW) || defined(CURL_PIPELINING)
276 pipeliningLimit
277 #else
278 1 >> (putStrLn $ "Warning: darcs is compiled without HTTP pipelining "++
279 "support, '--http-pipelining' argument is ignored.")
280 #endif
282 setDebugHTTP :: IO ()
283 request_url :: String -> FilePath -> Cachable -> IO String
284 wait_next_url :: IO (String, String)
286 #if defined(HAVE_LIBWWW)
288 setDebugHTTP = libwww_enable_debug
290 request_url u f cache =
291 withCString u $ \ustr ->
292 withCString f $ \fstr -> do
293 err <- libwww_request_url ustr fstr (cachableToInt cache) >>= peekCString
294 return err
296 wait_next_url = do
297 e <- libwww_wait_next_url >>= peekCString
298 u <- libwww_last_url >>= peekCString
299 return (u, e)
301 foreign import ccall "hslibwww.h libwww_request_url"
302 libwww_request_url :: CString -> CString -> CInt -> IO CString
304 foreign import ccall "hslibwww.h libwww_wait_next_url"
305 libwww_wait_next_url :: IO CString
307 foreign import ccall "hslibwww.h libwww_last_url"
308 libwww_last_url :: IO CString
310 foreign import ccall "hslibwww.h libwww_enable_debug"
311 libwww_enable_debug :: IO ()
313 #elif defined(HAVE_CURL)
315 setDebugHTTP = curl_enable_debug
317 request_url u f cache =
318 withCString u $ \ustr ->
319 withCString f $ \fstr -> do
320 err <- curl_request_url ustr fstr (cachableToInt cache) >>= peekCString
321 return err
323 wait_next_url = do
324 e <- curl_wait_next_url >>= peekCString
325 u <- curl_last_url >>= peekCString
326 return (u, e)
328 foreign import ccall "hscurl.h curl_request_url"
329 curl_request_url :: CString -> CString -> CInt -> IO CString
331 foreign import ccall "hscurl.h curl_wait_next_url"
332 curl_wait_next_url :: IO CString
334 foreign import ccall "hscurl.h curl_last_url"
335 curl_last_url :: IO CString
337 foreign import ccall "hscurl.h curl_enable_debug"
338 curl_enable_debug :: IO ()
340 #elif defined(HAVE_HTTP)
342 setDebugHTTP = return ()
343 request_url = HTTP.request_url
344 wait_next_url = HTTP.wait_next_url
346 #else
348 setDebugHTTP = debugMessage "URL.setDebugHttp works only with curl and libwww"
349 request_url _ _ _ = debugFail "URL.request_url: there is no curl or libwww!"
350 wait_next_url = debugFail "URL.wait_next_url: there is no curl or libwww!"
352 #endif