module Darcs.Repository.Packs
    ( fetchAndUnpackBasic
    , fetchAndUnpackPatches
    , packsDir
    ) where

import qualified Codec.Archive.Tar as Tar
import Codec.Compression.GZip as GZ ( compress, decompress )

import Control.Concurrent.Async ( withAsync )
import Control.Exception ( Exception, IOException, throwIO, catch )
import Control.Monad ( void )
import System.IO.Error ( isAlreadyExistsError )

import qualified Data.ByteString.Lazy.Char8 as BL
import Data.List ( isPrefixOf )
import Data.Maybe( catMaybes, listToMaybe )

import System.Directory ( createDirectoryIfMissing
                        , renameFile
                        , doesFileExist
                        )
import System.FilePath ( (</>)
                       , takeFileName
                       , splitPath
                       , joinPath
                       , takeDirectory
                       )
import System.Posix.Files ( createLink )

import Darcs.Util.Lock ( withTemp )
import Darcs.Util.External ( Cachable(..), fetchFileLazyPS )
import Darcs.Repository.Cache ( fetchFileUsingCache
                              , HashedDir(..)
                              , Cache(..)
                              , CacheLoc(..)
                              , WritableOrNot(..)
                              , hashedDir
                              , bucketFolder
                              , CacheType(Directory)
                              )

import Darcs.Util.Global ( darcsdir )
import Darcs.Util.Progress ( debugMessage )

packsDir :: String
packsDir = "packs"

fetchAndUnpack :: FilePath
               -> HashedDir
               -> Cache
               -> FilePath
               -> IO ()
fetchAndUnpack filename dir cache remote = do
  unpackTar cache dir . Tar.read . GZ.decompress =<<
    fetchFileLazyPS (remote </> darcsdir </> packsDir </> filename) Uncachable

fetchAndUnpackPatches :: [String] -> Cache -> FilePath -> IO ()
fetchAndUnpackPatches paths cache remote =
  -- Patches pack can miss some new patches of the repository.
  -- So we download pack asynchonously and alway do a complete pass
  -- of individual patch files.
  withAsync (fetchAndUnpack "patches.tar.gz" HashedInventoriesDir cache remote) $ \_ -> do
  fetchFilesUsingCache cache HashedPatchesDir paths

fetchAndUnpackBasic :: Cache -> FilePath -> IO ()
fetchAndUnpackBasic = fetchAndUnpack "basic.tar.gz" HashedPristineDir

unpackTar :: Exception e => Cache -> HashedDir -> Tar.Entries e -> IO ()
unpackTar _ _   Tar.Done = return ()
unpackTar _ _   (Tar.Fail e) = throwIO e
unpackTar c dir (Tar.Next e es) = case Tar.entryContent e of
  Tar.NormalFile bs _ -> do
    let p = Tar.entryPath e
    if "meta-" `isPrefixOf` takeFileName p
      then unpackTar c dir es -- just ignore them
      else do
        ex <- doesFileExist p
        if ex
          then debugMessage $ "TAR thread: exists " ++ p ++ "\nStopping TAR thread."
          else do
            if p == darcsdir </> "hashed_inventory"
              then writeFile' Nothing p bs
              else writeFile' (cacheDir c) p $ GZ.compress bs
            debugMessage $ "TAR thread: GET " ++ p
            unpackTar c dir es
  _ -> fail "Unexpected non-file tar entry"
 where
  writeFile' Nothing path content = withTemp $ \tmp -> do
    BL.writeFile tmp content
    renameFile tmp path
  writeFile' (Just ca) path content = do
    let fileFullPath = case splitPath path of
          _:hDir:hFile:_  -> joinPath [ca, hDir, bucketFolder hFile, hFile]
          _               -> fail "Unexpected file path"
    createDirectoryIfMissing True $ takeDirectory path
    createLink fileFullPath path `catch` (\(ex :: IOException) -> do
      if isAlreadyExistsError ex then
        return () -- so much the better
      else
        -- ignore cache if we cannot link
        writeFile' Nothing path content)

-- | Similar to @'mapM_' ('void' 'fetchFileUsingCache')@, exepts
-- it stops execution if file it's going to fetch already exists.
fetchFilesUsingCache :: Cache -> HashedDir -> [FilePath] -> IO ()
fetchFilesUsingCache cache dir = mapM_ go where
  go path = do
    ex <- doesFileExist $ darcsdir </> hashedDir dir </> path
    if ex
     then debugMessage $ "FILE thread: exists " ++ path
     else void $ fetchFileUsingCache cache dir path

cacheDir :: Cache -> Maybe String
cacheDir (Ca cs) = listToMaybe . catMaybes .flip map cs $ \x -> case x of
  Cache Directory Writable x' -> Just x'
  _ -> Nothing