adddir ./MiniDarcs adddir ./MiniDarcs/Commands adddir ./MiniDarcs/Patch addfile ./MiniDarcs/Main.hs hunk ./MiniDarcs/Main.hs 1 + +module Main (main) where + +main :: IO () +main = return () addfile ./Setup.hs hunk ./Setup.hs 1 + +module Main (main) where + +import Distribution.Simple + +main :: IO () +main = defaultMain + addfile ./minidarcs.cabal hunk ./minidarcs.cabal 1 +Name: minidarcs +Version: 0.1 +License: BSD3 +License-File: LICENSE +Copyright: 2008 Ian Lynagh +Author: Ian Lynagh +Maintainer: Ian Lynagh +Synopsis: Mini Darcs +Description: + Mini Darcs +Category: Development +Build-Type: Simple +Cabal-Version: >=1.2 + +Executable minidarcs + Main-Is: MiniDarcs/Main.hs + + Build-Depends: base + hunk ./MiniDarcs/Main.hs 3 + +import MiniDarcs.Patch.Patch +import MiniDarcs.Patch.Sequence +import MiniDarcs.Types addfile ./MiniDarcs/Patch/Patch.hs hunk ./MiniDarcs/Patch/Patch.hs 1 + +module MiniDarcs.Patch.Patch (Patch(..)) where + +import MiniDarcs.Patch.Commute +import MiniDarcs.Types + +data Patch from to where + AddFile :: FilePath -> Patch from to + RmFile :: FilePath -> Patch from to + Hunk :: FilePath + -> Line -- skip this many lines + -> [String] -- remove these lines + -> [String] -- add these lines + -> Patch from to + +instance Commute Patch where + commute (AddFile p_f `Then` AddFile q_f) + = Just (AddFile q_f `Then` AddFile p_f) + addfile ./MiniDarcs/Types.hs hunk ./MiniDarcs/Types.hs 1 + +module MiniDarcs.Types where + +type Line = Integer hunk ./minidarcs.cabal 19 + + Extensions: GADTs addfile ./.boring hunk ./.boring 1 +^dist(/|$) +^Setup$ +^Setup\.hi$ +^Setup\.o$ + changepref boringfile .boring hunk ./MiniDarcs/Main.hs 4 +import MiniDarcs.Patch.Commute +import MiniDarcs.Patch.ContextedPatch addfile ./MiniDarcs/Patch/Commute.hs hunk ./MiniDarcs/Patch/Commute.hs 1 + +module MiniDarcs.Patch.Commute (Then(..), Commute(..)) where + +data Then p q from to where + Then :: p from mid -> q mid to -> Then p q from to + +class Commute p q where + commute :: Then p q from to -> Maybe (Then q p from to) addfile ./MiniDarcs/Patch/CommutePast.hs hunk ./MiniDarcs/Patch/CommutePast.hs 1 + +module MiniDarcs.Patch.CommutePast (ThenOpen(..), CommutePast(..)) where + +import MiniDarcs.Patch.Commute +import MiniDarcs.Patch.Patch +import MiniDarcs.Patch.Sequence +import MiniDarcs.Types + +data ThenOpen p q from where + ThenOpen :: p from mid -> q mid -> ThenOpen p q from + +class CommutePast p q where + commutePast :: ThenOpen p q from -> Maybe (q from) + addfile ./MiniDarcs/Patch/ContextedPatch.hs hunk ./MiniDarcs/Patch/ContextedPatch.hs 1 + +module MiniDarcs.Patch.ContextedPatch (ContextedPatch(..)) where + +import MiniDarcs.Patch.Commute +import MiniDarcs.Patch.CommutePast +import MiniDarcs.Patch.Patch +import MiniDarcs.Patch.Sequence +import MiniDarcs.Types + +data ContextedPatch from where + ContextedPatch :: Seq Patch from mid -> Patch mid to + -> ContextedPatch from + +instance Commute p Patch => CommutePast p ContextedPatch where + commutePast (p `ThenOpen` ContextedPatch qs q) + = do (qs' `Then` p') <- commute (p `Then` qs) + (q' `Then` _) <- commute (p' `Then` q) + return (ContextedPatch qs' q') + hunk ./MiniDarcs/Patch/Patch.hs 16 -instance Commute Patch where +-- XXX Wrong and incomplete: +instance Commute Patch Patch where addfile ./MiniDarcs/Patch/Sequence.hs hunk ./MiniDarcs/Patch/Sequence.hs 1 + +module MiniDarcs.Patch.Sequence (Seq(..)) where + +import MiniDarcs.Patch.Commute + +data Seq p from to where + Cons :: p from mid -> Seq p mid to -> Seq p from to + Nil :: Seq p here here + +commutePastSequence :: Commute p q + => Then p (Seq q) from to + -> Maybe (Then (Seq q) p from to) +commutePastSequence (p `Then` Nil) = Just (Nil `Then` p) +commutePastSequence (p `Then` Cons q qs) + = do (q' `Then` p') <- commute (p `Then` q) + (qs' `Then` p'') <- commutePastSequence (p' `Then` qs) + return (Cons q' qs' `Then` p'') + +commuteSequencePast :: Commute p q + => Then (Seq p) q from to + -> Maybe (Then q (Seq p) from to) +commuteSequencePast (Nil `Then` q) = Just (q `Then` Nil) +commuteSequencePast (Cons p ps `Then` q) + = do (q' `Then` ps') <- commuteSequencePast (ps `Then` q) + (q'' `Then` p') <- commute (p `Then` q') + return (q'' `Then` Cons p' ps') + +instance Commute p q => Commute p (Seq q) where + commute = commutePastSequence + +instance Commute p q => Commute (Seq p) q where + commute = commuteSequencePast + +-- We need to provide this redundant instance or GHC won't know which +-- of the previous two instances to use for Commute (Seq Patch) (Seq Patch) +instance Commute p q => Commute (Seq p) (Seq q) where + -- This can use either commutePastSequence or commuteSequencePast + commute = commuteSequencePast + hunk ./minidarcs.cabal 20 - Extensions: GADTs + Extensions: GADTs, MultiParamTypeClasses, + FlexibleContexts, FlexibleInstances, + OverlappingInstances, UndecidableInstances, + IncoherentInstances move ./MiniDarcs/Patch/Patch.hs ./MiniDarcs/Patch/Primitive.hs hunk ./MiniDarcs/Main.hs 4 +import MiniDarcs.Patch.Catch hunk ./MiniDarcs/Main.hs 8 +import MiniDarcs.Patch.Primitive addfile ./MiniDarcs/Patch/Catch.hs hunk ./MiniDarcs/Patch/Catch.hs 1 + +module MiniDarcs.Patch.Catch (Catch(..)) where + +import MiniDarcs.Patch.Commute +import MiniDarcs.Patch.ContextedPatch +import MiniDarcs.Patch.Patch +import MiniDarcs.Patch.Sequence +import MiniDarcs.Types + +data Catch from to where + Patch :: Patch from to -> Catch from to + Conflictor :: Seq Patch from to + -> [ContextedPatch to] + -> ContextedPatch to + -> Catch from to + +-- XXX Wrong: +instance Commute Catch Catch where + commute (Patch p `Then` Patch q) + = do (q' `Then` p') <- commute (p `Then` q) + return (Patch q' `Then` Patch p') + commute _ = Nothing + addfile ./MiniDarcs/Patch/Patch.hs hunk ./MiniDarcs/Patch/Patch.hs 1 + +module MiniDarcs.Patch.Patch (Patch(..)) where + +import MiniDarcs.Patch.Commute +import MiniDarcs.Patch.Primitive +import MiniDarcs.Types + +data Patch from to where + Primitive :: Primitive from to -> Patch from to + +instance Commute Patch Patch where + commute (Primitive p `Then` Primitive q) + = do (q' `Then` p') <- commute (p `Then` q) + return (Primitive q' `Then` Primitive p') + hunk ./MiniDarcs/Patch/Primitive.hs 2 -module MiniDarcs.Patch.Patch (Patch(..)) where +module MiniDarcs.Patch.Primitive (Primitive(..)) where hunk ./MiniDarcs/Patch/Primitive.hs 7 -data Patch from to where - AddFile :: FilePath -> Patch from to - RmFile :: FilePath -> Patch from to +data Primitive from to where + AddFile :: FilePath -> Primitive from to + RmFile :: FilePath -> Primitive from to hunk ./MiniDarcs/Patch/Primitive.hs 14 - -> Patch from to + -> Primitive from to hunk ./MiniDarcs/Patch/Primitive.hs 17 -instance Commute Patch Patch where +instance Commute Primitive Primitive where hunk ./MiniDarcs/Main.hs 1 + +{-# OPTIONS_GHC -fno-warn-unused-imports #-} +-- XXX -fno-warn-unused-imports is a hack while we don't actually have +-- a proper implementation to actually used the code properly hunk ./MiniDarcs/Patch/Catch.hs 8 -import MiniDarcs.Types hunk ./MiniDarcs/Patch/CommutePast.hs 3 - -import MiniDarcs.Patch.Commute -import MiniDarcs.Patch.Patch -import MiniDarcs.Patch.Sequence -import MiniDarcs.Types hunk ./MiniDarcs/Patch/ContextedPatch.hs 8 -import MiniDarcs.Types hunk ./MiniDarcs/Patch/Patch.hs 6 -import MiniDarcs.Types hunk ./MiniDarcs/Patch/Primitive.hs 16 --- XXX Wrong and incomplete: +-- XXX Wrong: hunk ./MiniDarcs/Patch/Primitive.hs 20 + commute _ = Nothing hunk ./minidarcs.cabal 17 + + Ghc-Options: -Wall -Werror addfile ./MiniDarcs/Patch/Name.hs hunk ./MiniDarcs/Patch/Name.hs 1 + +module MiniDarcs.Patch.Name (Name(..), SubName(..)) where + +-- Designed for using +-- TOD :: Integer -> Integer -> ClockTime +data Name = Name Integer Integer + deriving Eq + +data SubName = SubName Name Integer + deriving Eq hunk ./MiniDarcs/Patch/Patch.hs 5 +import MiniDarcs.Patch.Name hunk ./MiniDarcs/Patch/Patch.hs 9 - Primitive :: Primitive from to -> Patch from to + Primitive :: SubName -> Primitive from to -> Patch from to hunk ./MiniDarcs/Patch/Patch.hs 12 - commute (Primitive p `Then` Primitive q) + commute (Primitive np p `Then` Primitive nq q) hunk ./MiniDarcs/Patch/Patch.hs 14 - return (Primitive q' `Then` Primitive p') + return (Primitive nq q' `Then` Primitive np p') hunk ./MiniDarcs/Main.hs 11 +import MiniDarcs.Patch.Invert +import MiniDarcs.Patch.Name hunk ./MiniDarcs/Patch/Catch.hs 6 +import MiniDarcs.Patch.Invert hunk ./MiniDarcs/Patch/Catch.hs 24 +instance Invert Catch where + invert (Patch p) = Patch (invert p) + -- XXX Need to prove this right or introduce an InverseConflictor + -- XXX Actually, this is just wrong: We need the inverse patches in + -- the conflicts + invert (Conflictor effect conflicts identity) + = Conflictor (invert effect) + (map (addToContext effect) conflicts) + (addToContext effect identity) + hunk ./MiniDarcs/Patch/ContextedPatch.hs 2 -module MiniDarcs.Patch.ContextedPatch (ContextedPatch(..)) where +module MiniDarcs.Patch.ContextedPatch (ContextedPatch(..), addToContext) where hunk ./MiniDarcs/Patch/ContextedPatch.hs 13 +-- XXX Needs to handle the p p^ case hunk ./MiniDarcs/Patch/ContextedPatch.hs 20 +addToContext :: Seq Patch from mid -> ContextedPatch mid -> ContextedPatch from +addToContext Nil cq = cq +addToContext (Cons p ps) cq + = case addToContext ps cq of + cq'@(ContextedPatch qs q) -> + case commutePast (p `ThenOpen` cq') of + Nothing -> ContextedPatch (Cons p qs) q + Just cq'' -> cq'' + addfile ./MiniDarcs/Patch/Invert.hs hunk ./MiniDarcs/Patch/Invert.hs 1 + +module MiniDarcs.Patch.Invert (Invert(..)) where + +class Invert p where + invert :: p from to -> p to from hunk ./MiniDarcs/Patch/Name.hs 2 -module MiniDarcs.Patch.Name (Name(..), SubName(..)) where +module MiniDarcs.Patch.Name ( + Name(..), SubName(..), inverseName, inverseSubName + ) where hunk ./MiniDarcs/Patch/Name.hs 8 -data Name = Name Integer Integer +data Name = Name Sign Integer Integer hunk ./MiniDarcs/Patch/Name.hs 14 +data Sign = Positive | Negative + deriving Eq + +inverseName :: Name -> Name +inverseName (Name Positive i1 i2) = Name Negative i1 i2 +inverseName (Name Negative i1 i2) = Name Positive i1 i2 + +inverseSubName :: SubName -> SubName +inverseSubName (SubName n i) = SubName (inverseName n) i + + hunk ./MiniDarcs/Patch/Patch.hs 5 +import MiniDarcs.Patch.Invert hunk ./MiniDarcs/Patch/Patch.hs 17 +instance Invert Patch where + invert (Primitive n p) = Primitive (inverseSubName n) (invert p) + hunk ./MiniDarcs/Patch/Primitive.hs 5 +import MiniDarcs.Patch.Invert hunk ./MiniDarcs/Patch/Primitive.hs 23 +instance Invert Primitive where + invert (AddFile f) = RmFile f + invert (RmFile f) = AddFile f + invert (Hunk f skip remove add) = Hunk f skip add remove hunk ./MiniDarcs/Patch/Sequence.hs 5 +import MiniDarcs.Patch.Invert hunk ./MiniDarcs/Patch/Sequence.hs 41 +instance Invert p => Invert (Seq p) where + invert = f Nil + where f :: Invert p => Seq p mid from -> Seq p mid to -> Seq p to from + f is Nil = is + f is (Cons p ps) = f (Cons (invert p) is) ps hunk ./.boring 5 +(^|/)\..*\.swp hunk ./MiniDarcs/Main.hs 12 +import MiniDarcs.Patch.Merge addfile ./MiniDarcs/Patch/Merge.hs hunk ./MiniDarcs/Patch/Merge.hs 1 + +module MiniDarcs.Patch.Merge where + +import MiniDarcs.Patch.Catch +import MiniDarcs.Patch.Commute +import MiniDarcs.Patch.Invert + +data Fork to1 to2 where + Fork :: Catch from to1 -> Catch from to2 -> Fork to1 to2 + +data Join from1 from2 where + Join :: Catch from1 to -> Catch from2 to -> Join from1 from2 + +merge :: Fork mid1 mid2 -> Join mid1 mid2 +merge (Fork c1 c2) = case commute (invert c1 `Then` c2) of + Just (c2' `Then` ic1') -> + Join c2' (invert ic1') + -- XXX Incomplete + Nothing -> undefined hunk ./MiniDarcs/Main.hs 8 +import MiniDarcs.Patch.Apply addfile ./MiniDarcs/Patch/Apply.hs hunk ./MiniDarcs/Patch/Apply.hs 1 + +module MiniDarcs.Patch.Apply (Apply(..)) where + +class Apply p where + apply :: p from to -> IO () hunk ./MiniDarcs/Patch/Catch.hs 4 +import MiniDarcs.Patch.Apply hunk ./MiniDarcs/Patch/Catch.hs 35 +instance Apply Catch where + apply (Patch p) = apply p + apply (Conflictor effect _ _) = apply effect + hunk ./MiniDarcs/Patch/Patch.hs 4 +import MiniDarcs.Patch.Apply hunk ./MiniDarcs/Patch/Patch.hs 21 +instance Apply Patch where + apply (Primitive _ p) = apply p + hunk ./MiniDarcs/Patch/Primitive.hs 4 +import MiniDarcs.Patch.Apply hunk ./MiniDarcs/Patch/Primitive.hs 8 +import MiniDarcs.Utils + +import Control.Exception +import Data.List +import System.Directory +import System.IO hunk ./MiniDarcs/Patch/Primitive.hs 35 +-- XXX This ignores broken symlinks, has a race condition, etc. +-- But it's portable, and everything is fine as long as you +-- promise to be well-behaved! +instance Apply Primitive where + apply (AddFile fp) = do + fileExists <- doesFileExist fp + directoryExists <- doesDirectoryExist fp + if fileExists || directoryExists + then error ("Already exists: " ++ show fp) + else withBinaryFile fp WriteMode $ \_ -> + return () + apply (RmFile fp) = do size <- withBinaryFile fp ReadMode hFileSize + if size /= 0 + then error ("Not empty: " ++ show fp) + else removeFile fp + apply (Hunk fp skip old new) + = do content <- withBinaryFile fp ReadMode $ \h -> do + do xs <- hGetContents h + -- Hack to actually read in the contents: + evaluate (length xs) + return xs + case mySplitAt skip $ myLines content of + Just (skipped, rest) -> + case stripPrefix old rest of + Just rest' -> + withBinaryFile fp WriteMode $ \h -> + hPutStr h (myUnlines (skipped ++ new ++ rest')) + Nothing -> error "Old patch content is wrong" + Nothing -> error "Not enough lines to skip" + + hunk ./MiniDarcs/Patch/Sequence.hs 4 +import MiniDarcs.Patch.Apply hunk ./MiniDarcs/Patch/Sequence.hs 48 +instance Apply p => Apply (Seq p) where + apply Nil = return () + apply (Cons p ps) = do apply p + apply ps + + addfile ./MiniDarcs/Utils.hs hunk ./MiniDarcs/Utils.hs 1 + +module MiniDarcs.Utils (myLines, myUnlines, mySplitAt) where + +import Data.List + +-- The normal lines function returns the same thing for +-- lines "foo" +-- and +-- lines "foo\n" +myLines :: String -> [String] +myLines xs = case break ('\n' ==) xs of + (ys, _ : zs) -> ys : myLines zs + (_, []) -> [xs] + +-- The normal unlines function always puts a trailing '\n' on +myUnlines :: [String] -> String +myUnlines xs = intercalate "\n" xs + +mySplitAt :: Integer -> [a] -> Maybe ([a], [a]) +mySplitAt i _ | i < 0 = error "mySplitAt: Negative number" +mySplitAt 0 xs = Just ([], xs) +mySplitAt _ [] = Nothing +mySplitAt i (x : xs) = case mySplitAt (i - 1) xs of + Just (ys, zs) -> Just (x : ys, zs) + Nothing -> Nothing hunk ./minidarcs.cabal 20 - Build-Depends: base + Build-Depends: base, directory hunk ./MiniDarcs/Main.hs 11 +import MiniDarcs.Patch.CommutePast hunk ./MiniDarcs/Main.hs 19 +import MiniDarcs.Repository hunk ./MiniDarcs/Main.hs 21 +import MiniDarcs.Utils addfile ./MiniDarcs/Repository.hs hunk ./MiniDarcs/Repository.hs 1 + +-- XXX Export list +module MiniDarcs.Repository where + +repoRoot :: FilePath +repoRoot = "_minidarcs" move ./MiniDarcs/Commands ./MiniDarcs/Command addfile ./MiniDarcs/Command/Init.hs hunk ./MiniDarcs/Command/Init.hs 1 + +module MiniDarcs.Command.Init (initialise) where + +import MiniDarcs.Repository + hunk ./MiniDarcs/Main.hs 8 +import MiniDarcs.Command.Init hunk ./MiniDarcs/Main.hs 24 +import System.Environment + hunk ./MiniDarcs/Main.hs 27 -main = return () +main = do args <- getArgs + case args of + ["init"] -> initialise + _ -> error "Unrecognised args" + hunk ./MiniDarcs/Patch/Name.hs 9 - deriving Eq + deriving (Eq, Show, Read) hunk ./MiniDarcs/Patch/Name.hs 12 - deriving Eq + deriving (Eq, Show, Read) hunk ./MiniDarcs/Patch/Name.hs 15 - deriving Eq + deriving (Eq, Show, Read) hunk ./MiniDarcs/Patch/Primitive.hs 10 -import Control.Exception hunk ./MiniDarcs/Patch/Primitive.hs 50 - = do content <- withBinaryFile fp ReadMode $ \h -> do - do xs <- hGetContents h - -- Hack to actually read in the contents: - evaluate (length xs) - return xs + = do content <- readBinaryFile fp hunk ./MiniDarcs/Patch/Primitive.hs 54 - Just rest' -> - withBinaryFile fp WriteMode $ \h -> - hPutStr h (myUnlines (skipped ++ new ++ rest')) + Just rest' -> do + let content' = myUnlines (skipped ++ new ++ rest') + writeBinaryFile fp content' hunk ./MiniDarcs/Repository.hs 2 --- XXX Export list -module MiniDarcs.Repository where +module MiniDarcs.Repository ( + repoRoot, + initialise, + readInventory, writeInventory, + ) where + +import MiniDarcs.Patch.Name +import MiniDarcs.Utils + +import System.Directory +import System.FilePath hunk ./MiniDarcs/Repository.hs 17 +inventoryFile :: FilePath +inventoryFile = repoRoot "inventory" + +readInventory :: IO [Name] +readInventory = do content <- readBinaryFile inventoryFile + case maybeRead content of + Just names -> return names + Nothing -> error "Corrupt inventory?" + +writeInventory :: [Name] -> IO () +writeInventory ns = writeBinaryFile inventoryFile (show ns) + +patchesDir :: FilePath +patchesDir = repoRoot "patches" + +initialise :: IO () +initialise = do createDirectory repoRoot + createDirectory patchesDir + writeInventory [] + + hunk ./MiniDarcs/Utils.hs 2 -module MiniDarcs.Utils (myLines, myUnlines, mySplitAt) where +module MiniDarcs.Utils ( + myLines, myUnlines, mySplitAt, maybeRead, + readBinaryFile, writeBinaryFile, + ) where hunk ./MiniDarcs/Utils.hs 7 +import Control.Exception +import Data.Char hunk ./MiniDarcs/Utils.hs 10 +import System.IO hunk ./MiniDarcs/Utils.hs 33 +maybeRead :: Read a => String -> Maybe a +maybeRead xs = case reads xs of + [(v, spaces)] + | all isSpace spaces -> Just v + _ -> Nothing + +readBinaryFile :: FilePath -> IO String +readBinaryFile fp = withBinaryFile fp ReadMode $ \h -> do + xs <- hGetContents h + -- Hack to actually read in the contents: + evaluate (length xs) + return xs + +writeBinaryFile :: FilePath -> String -> IO () +writeBinaryFile fp xs = withBinaryFile fp WriteMode $ \h -> + hPutStr h xs + + hunk ./minidarcs.cabal 20 - Build-Depends: base, directory + Build-Depends: base, directory, filepath hunk ./MiniDarcs/Main.hs 15 +import MiniDarcs.Patch.MegaPatch hunk ./MiniDarcs/Main.hs 21 +import MiniDarcs.Patch.ShowRead hunk ./MiniDarcs/Patch/Catch.hs 17 + deriving (Show, Read) hunk ./MiniDarcs/Patch/ContextedPatch.hs 8 +import MiniDarcs.Patch.ShowRead +import MiniDarcs.Utils hunk ./MiniDarcs/Patch/ContextedPatch.hs 15 +instance Show (ContextedPatch from) where + show (ContextedPatch ps p) = show ps ++ " : " ++ show p + +instance Read (ContextedPatch from) where + readsPrec _ xs = case maybeContextedReads xs of + Just (Anonymous1 ps, ' ':':':' ':xs') -> + case maybeRead xs' of + Just (p, xs'') -> + [(ContextedPatch ps p, xs'')] + Nothing -> [] + _ -> [] + addfile ./MiniDarcs/Patch/MegaPatch.hs hunk ./MiniDarcs/Patch/MegaPatch.hs 1 + +module MiniDarcs.Patch.MegaPatch (MegaPatch(..)) where + +import MiniDarcs.Patch.Apply +import MiniDarcs.Patch.Catch +import MiniDarcs.Patch.Commute +import MiniDarcs.Patch.Invert +import MiniDarcs.Patch.Name + +data MegaPatch from to where + MegaPatch :: Name -> Catch from to -> MegaPatch from to + +instance Commute MegaPatch MegaPatch where + commute (MegaPatch np p `Then` MegaPatch nq q) + = do (q' `Then` p') <- commute (p `Then` q) + return (MegaPatch nq q' `Then` MegaPatch np p') + +instance Invert MegaPatch where + invert (MegaPatch n p) = MegaPatch (inverseName n) (invert p) + +instance Apply MegaPatch where + apply (MegaPatch _ p) = apply p + hunk ./MiniDarcs/Patch/Patch.hs 9 +import MiniDarcs.Patch.ShowRead +import MiniDarcs.Utils hunk ./MiniDarcs/Patch/Patch.hs 14 + deriving (Show, Read) + +instance Show2 Patch where + show2 = show + +instance ContextedRead Patch where + maybeContextedReads xs = case maybeReads xs of + Just (p, xs') -> Just (Anonymous1 p, xs') + Nothing -> Nothing hunk ./MiniDarcs/Patch/Primitive.hs 22 + deriving (Show, Read) hunk ./MiniDarcs/Patch/Sequence.hs 7 +import MiniDarcs.Patch.ShowRead + +import Data.List +import Unsafe.Coerce hunk ./MiniDarcs/Patch/Sequence.hs 16 +instance Show2 p => Show2 (Seq p) where + show2 xs = "Seq [\n" ++ f xs ++ "]" + where f :: Show2 p => Seq p from to -> String + f Nil = "" + f (Cons p ps) = " " ++ show p ++ ";\n" ++ f ps + +instance ContextedRead (Seq p) => Read (Seq p from to) where + readsPrec _ xs = case maybeContextedReads xs of + Just (Anonymous1 ps, xs') -> [(hackCtxt ps, xs')] + Nothing -> [] + where -- XXX Do this more nicely? + hackCtxt :: Seq p from to1 -> Seq p from to2 + hackCtxt = unsafeCoerce + +instance ContextedRead p => ContextedRead (Seq p) where + maybeContextedReads xs = case stripPrefix "Seq [" xs of + Nothing -> Nothing + Just xs' -> f xs' + where f :: ContextedRead p + => String -> Maybe (Anonymous1 (Seq p from), String) + f ys = case maybeContextedReads ys of + Just (Anonymous1 p, ';':ys') -> + case f ys' of + Just (Anonymous1 ps, ys'') -> + Just (Anonymous1 (Cons p ps), ys'') + Nothing -> Nothing + _ -> + case ys of + ']' : ys' -> Just (Anonymous1 Nil, ys') + _ -> Nothing + addfile ./MiniDarcs/Patch/ShowRead.hs hunk ./MiniDarcs/Patch/ShowRead.hs 1 + +module MiniDarcs.Patch.ShowRead where + +class Show2 p where + show2 :: p from to -> String + +instance Show2 p => Show (p from to) where + show = show2 + +data Anonymous1 p where + Anonymous1 :: p ctxt -> Anonymous1 p + +class ContextedRead p where + maybeContextedReads :: String -> Maybe (Anonymous1 (p from), String) + hunk ./MiniDarcs/Repository.hs 5 - readInventory, writeInventory, + readInventory, + writeInventory, + readMegaPatch, + writeMegaPatch, hunk ./MiniDarcs/Repository.hs 11 +import MiniDarcs.Patch.MegaPatch hunk ./MiniDarcs/Repository.hs 36 +patchFile :: Name -> FilePath +patchFile n = patchesDir show n + +readMegaPatch :: Name -> IO (MegaPatch from to) +readMegaPatch n = do content <- readBinaryFile $ patchFile n + case maybeRead content of + Just c -> return (MegaPatch n c) + Nothing -> error "Corrupt patch?" + +writeMegaPatch :: MegaPatch from to -> IO () +writeMegaPatch (MegaPatch n c) = writeBinaryFile (patchFile n) (show c) + hunk ./MiniDarcs/Utils.hs 3 - myLines, myUnlines, mySplitAt, maybeRead, + myLines, myUnlines, mySplitAt, maybeRead, maybeReads, hunk ./MiniDarcs/Utils.hs 34 -maybeRead xs = case reads xs of - [(v, spaces)] - | all isSpace spaces -> Just v - _ -> Nothing +maybeRead xs = case maybeReads xs of + Just (v, _) -> Just v + Nothing -> Nothing + +maybeReads :: Read a => String -> Maybe (a, String) +maybeReads xs = case reads xs of + [(v, xs')] -> Just (v, xs') + _ -> Nothing hunk ./MiniDarcs/Patch/ShowRead.hs 1 + +{-# OPTIONS_GHC -fno-warn-orphans #-} +-- We have an orphan we can't get rid of hunk ./MiniDarcs/Main.hs 9 -import MiniDarcs.Patch.Apply -import MiniDarcs.Patch.Catch -import MiniDarcs.Patch.Commute -import MiniDarcs.Patch.CommutePast -import MiniDarcs.Patch.ContextedPatch -import MiniDarcs.Patch.Invert -import MiniDarcs.Patch.MegaPatch hunk ./MiniDarcs/Main.hs 10 -import MiniDarcs.Patch.Name -import MiniDarcs.Patch.Patch -import MiniDarcs.Patch.Primitive -import MiniDarcs.Patch.Sequence -import MiniDarcs.Patch.ShowRead -import MiniDarcs.Repository -import MiniDarcs.Types -import MiniDarcs.Utils hunk ./MiniDarcs/Command/Init.hs 4 +import MiniDarcs.Patch.Apply +import MiniDarcs.Patch.Catch +import MiniDarcs.Patch.MegaPatch +import MiniDarcs.Patch.Name +import MiniDarcs.Patch.Patch +import MiniDarcs.Patch.Primitive +import MiniDarcs.Patch.Sequence hunk ./MiniDarcs/Command/Init.hs 13 +initialise :: [String] -> IO () +initialise [] = initialiseRepo +initialise ["test"] + = do let name1 = Name Positive 111 111 + name2 = Name Positive 222 222 + name3 = Name Positive 333 333 + names = [name1, name2, name3] + patch1a = Primitive (SubName name1 1) + (AddFile "foo") + patch1b = Primitive (SubName name1 2) + (AddFile "bar") + patch1c = Primitive (SubName name1 3) + (Hunk "foo" 0 [] + ["foo line 1", + "foo line 2", + "foo line 3", + "foo line 4", + "foo line 5"]) + patch2a = Primitive (SubName name2 1) + (Hunk "foo" 2 + ["foo line 3", "foo line 4"] + ["in between foo lines 2,5 1", + "in between foo lines 2,5 2", + "in between foo lines 2,5 3"]) + patch3a = Primitive (SubName name3 1) + (Hunk "foo" 1 [] + ["in between foo lines 1,2 1", + "in between foo lines 1,2 2", + "in between foo lines 1,2 3", + "in between foo lines 1,2 4"]) + catch1a = Patch patch1a + catch1b = Patch patch1b + catch1c = Patch patch1c + catch2a = Patch patch2a + catch3a = Patch patch3a + catches1 = catch1a `Cons` catch1b `Cons` catch1c `Cons` Nil + catches2 = catch2a `Cons` Nil + catches3 = catch3a `Cons` Nil + megaPatch1 = MegaPatch name1 catches1 + megaPatch2 = MegaPatch name2 catches2 + megaPatch3 = MegaPatch name3 catches3 + initialiseRepo + writeMegaPatch megaPatch1 + writeMegaPatch megaPatch2 + writeMegaPatch megaPatch3 + writeInventory names + apply megaPatch1 + apply megaPatch2 + apply megaPatch3 +initialise _ = error "Unknown arguments to initialise" + hunk ./MiniDarcs/Main.hs 16 - ["init"] -> initialise + "init" : args' -> initialise args' hunk ./MiniDarcs/Patch/Catch.hs 10 +import MiniDarcs.Patch.ShowRead +import MiniDarcs.Utils hunk ./MiniDarcs/Patch/Catch.hs 21 +instance Show2 Catch where + show2 = show + +instance ContextedRead Catch where + maybeContextedReads xs = case maybeReads xs of + Just (c, xs') -> Just (Anonymous1 c, xs') + Nothing -> Nothing + hunk ./MiniDarcs/Patch/MegaPatch.hs 9 +import MiniDarcs.Patch.Sequence hunk ./MiniDarcs/Patch/MegaPatch.hs 12 - MegaPatch :: Name -> Catch from to -> MegaPatch from to + MegaPatch :: Name -> Seq Catch from to -> MegaPatch from to hunk ./MiniDarcs/Patch/Name.hs 3 - Name(..), SubName(..), inverseName, inverseSubName + Name(..), Sign(..), SubName(..), inverseName, inverseSubName hunk ./MiniDarcs/Patch/Name.hs 9 - deriving (Eq, Show, Read) + deriving (Eq, Read) + +instance Show Name where + show (Name sign i j) = show sign ++ "-" ++ show i ++ "-" ++ show j hunk ./MiniDarcs/Patch/Name.hs 15 - deriving (Eq, Show, Read) + deriving (Eq, Read) + +instance Show SubName where + show (SubName n s) = show n ++ ":" ++ show s hunk ./MiniDarcs/Patch/Name.hs 21 - deriving (Eq, Show, Read) + deriving (Eq, Read) + +instance Show Sign where + show Positive = "P" + show Negative = "N" hunk ./MiniDarcs/Patch/Sequence.hs 11 + +infixr `Cons` hunk ./MiniDarcs/Repository.hs 4 - initialise, + initialiseRepo, hunk ./MiniDarcs/Repository.hs 48 -initialise :: IO () -initialise = do createDirectory repoRoot - createDirectory patchesDir - writeInventory [] +initialiseRepo :: IO () +initialiseRepo = do createDirectory repoRoot + createDirectory patchesDir + writeInventory [] hunk ./MiniDarcs/Command/Init.hs 19 - names = [name1, name2, name3] + allNames = [name1, name2, name3] hunk ./MiniDarcs/Command/Init.hs 58 - writeInventory names + writeInventory allNames addfile ./MiniDarcs/Command/Pull.hs hunk ./MiniDarcs/Command/Pull.hs 1 + +module MiniDarcs.Command.Pull (pull) where + +import MiniDarcs.Patch.Anonymous +import MiniDarcs.Patch.Commute +import MiniDarcs.Patch.Merge +import MiniDarcs.Patch.MegaPatch +import MiniDarcs.Patch.Sequence +import MiniDarcs.Repository + +import qualified Data.Set as Set +import Unsafe.Coerce + +pull :: [String] -> IO () +pull [repo] + = do localNames <- readInventory + remoteNames <- inRepo repo readInventory + let localNameSet = Set.fromList localNames + remoteNameSet = Set.fromList remoteNames + commonNameSet = localNameSet `Set.intersection` remoteNameSet + -- We can skip over the prefix that is common to both repos + isCommon n = n `Set.member` commonNameSet + localReadNames = case span isCommon localNames of + (_, x) -> x + remoteReadNames = case span isCommon remoteNames of + (_, x) -> x + -- XXX Should special-case remote = {} + + -- We need to commute the remainder so that it it partitioned + -- into the common patches, and those only in one of the repos + localReadPatches <- readMegaPatches localReadNames + remoteReadPatches <- inRepo repo $ readMegaPatches remoteReadNames + case commuteToPrefix commonNameSet localReadPatches of + _ `Then` localPatches -> + case commuteToPrefix commonNameSet remoteReadPatches of + _ `Then` remotePatches -> + -- XXX Should special-case local = {} + do let -- XXX Do this more nicely? + hackCtxt :: Seq MegaPatch from to + -> Seq MegaPatch from' to + hackCtxt = unsafeCoerce + fork = Fork localPatches + (hackCtxt remotePatches) + case merge fork of + Anonymous1 newLocalPatches -> + do let localNames' = localNames ++ + names newLocalPatches + writeMegaPatches newLocalPatches + writeInventory localNames' +pull _ = error "Unknown arguments to pull" + hunk ./MiniDarcs/Main.hs 2 -{-# OPTIONS_GHC -fno-warn-unused-imports #-} --- XXX -fno-warn-unused-imports is a hack while we don't actually have --- a proper implementation to actually used the code properly - hunk ./MiniDarcs/Main.hs 5 -import MiniDarcs.Patch.Merge +import MiniDarcs.Command.Pull hunk ./MiniDarcs/Main.hs 13 + "pull" : args' -> pull args' addfile ./MiniDarcs/Patch/Anonymous.hs hunk ./MiniDarcs/Patch/Anonymous.hs 1 + +module MiniDarcs.Patch.Anonymous where + +data Anonymous1 p where + Anonymous1 :: p ctxt -> Anonymous1 p + hunk ./MiniDarcs/Patch/Catch.hs 4 +import MiniDarcs.Patch.Anonymous hunk ./MiniDarcs/Patch/Catch.hs 8 +import MiniDarcs.Patch.Equality hunk ./MiniDarcs/Patch/Catch.hs 15 +import Unsafe.Coerce + hunk ./MiniDarcs/Patch/Catch.hs 28 +instance Equality Catch where + -- XXX Should actually check the catches are equal! + isEqual _ _ = unsafeCoerce IsEqual + hunk ./MiniDarcs/Patch/ContextedPatch.hs 4 +import MiniDarcs.Patch.Anonymous addfile ./MiniDarcs/Patch/Equality.hs hunk ./MiniDarcs/Patch/Equality.hs 1 + +module MiniDarcs.Patch.Equality where + +class Equality p where + isEqual :: p from to1 -> p from to2 -> IsEqual to1 to2 + +data IsEqual a b where + IsEqual :: IsEqual a a + NotEqual :: IsEqual a b hunk ./MiniDarcs/Patch/MegaPatch.hs 2 -module MiniDarcs.Patch.MegaPatch (MegaPatch(..)) where +module MiniDarcs.Patch.MegaPatch ( + MegaPatch(..), commuteToPrefix, name, names, + ) where hunk ./MiniDarcs/Patch/MegaPatch.hs 9 +import MiniDarcs.Patch.Equality hunk ./MiniDarcs/Patch/MegaPatch.hs 14 +import Data.Set (Set) +import qualified Data.Set as Set +import Unsafe.Coerce + hunk ./MiniDarcs/Patch/MegaPatch.hs 21 +name :: MegaPatch from to -> Name +name (MegaPatch n _) = n + +names :: Seq MegaPatch from to -> [Name] +names Nil = [] +names (Cons p ps) = name p : names ps + +instance Equality MegaPatch where + -- XXX Should actually check the MegaPatches are equal! + isEqual _ _ = unsafeCoerce IsEqual + hunk ./MiniDarcs/Patch/MegaPatch.hs 43 +-- XXX This is a bit inefficient, as we don't actually want the prefix +-- We could also stop earlier if we kept track of how many unwanted +-- patches there were left +commuteToPrefix :: Set Name -> Seq MegaPatch from to + -> Then (Seq MegaPatch) (Seq MegaPatch) from to +commuteToPrefix _ Nil = Nil `Then` Nil +commuteToPrefix ns (p@(MegaPatch n _) `Cons` ps) + = case commuteToPrefix ns ps of + qs `Then` rs + | n `Set.member` ns -> + Cons p qs `Then` rs + | otherwise -> + case commute (p `Then` qs) of + Just (qs' `Then` p') -> + qs' `Then` Cons p' rs + Nothing -> error "XXX Can't happen: commuteToPrefix failed" + hunk ./MiniDarcs/Patch/Merge.hs 4 +import MiniDarcs.Patch.Anonymous hunk ./MiniDarcs/Patch/Merge.hs 7 +import MiniDarcs.Patch.Equality hunk ./MiniDarcs/Patch/Merge.hs 9 +import MiniDarcs.Patch.MegaPatch +import MiniDarcs.Patch.Sequence hunk ./MiniDarcs/Patch/Merge.hs 12 -data Fork to1 to2 where - Fork :: Catch from to1 -> Catch from to2 -> Fork to1 to2 +data Fork p q to1 to2 where + Fork :: p from to1 -> q from to2 -> Fork p q to1 to2 hunk ./MiniDarcs/Patch/Merge.hs 15 -data Join from1 from2 where - Join :: Catch from1 to -> Catch from2 to -> Join from1 from2 +class Commute p q => Merge p q where + merge :: Fork p q mid1 mid2 -> Anonymous1 (q mid1) + +instance Merge Catch Catch where + merge (Fork c1 c2) = case commute (invert c1 `Then` c2) of + Just (c2' `Then` _) -> + Anonymous1 c2' + -- XXX Incomplete + Nothing -> undefined + +instance (Equality q, Merge p q) => Merge p (Seq q) where + merge (Fork _ Nil) = Anonymous1 Nil + merge (Fork p (Cons q qs)) + = case merge (Fork p q) of + Anonymous1 q' -> + case commute (p `Then` q') of + Just (q'' `Then` p') -> + case isEqual q q'' of + IsEqual -> + case merge (Fork p' qs) of + Anonymous1 qs' -> + Anonymous1 (Cons q' qs') + NotEqual -> error "XXX Can't happen merge/equal" + Nothing -> error "XXX Can't happen merge/commute" + +instance (Equality q, Merge p q) => Merge (Seq p) (Seq q) where + merge (Fork Nil qs) = Anonymous1 qs + merge (Fork (Cons p ps) qs) = case merge (Fork p qs) of + Anonymous1 qs' -> + merge (Fork ps qs') + +instance Merge MegaPatch MegaPatch where + merge (Fork (MegaPatch _ ps) (MegaPatch nq qs)) + = case merge (Fork ps qs) of + Anonymous1 qs' -> + Anonymous1 (MegaPatch nq qs') hunk ./MiniDarcs/Patch/Merge.hs 52 -merge :: Fork mid1 mid2 -> Join mid1 mid2 -merge (Fork c1 c2) = case commute (invert c1 `Then` c2) of - Just (c2' `Then` ic1') -> - Join c2' (invert ic1') - -- XXX Incomplete - Nothing -> undefined hunk ./MiniDarcs/Patch/Name.hs 9 - deriving (Eq, Read) + deriving (Eq, Ord, Read) hunk ./MiniDarcs/Patch/Name.hs 21 - deriving (Eq, Read) + deriving (Eq, Ord, Read) hunk ./MiniDarcs/Patch/Patch.hs 4 +import MiniDarcs.Patch.Anonymous hunk ./MiniDarcs/Patch/Sequence.hs 4 +import MiniDarcs.Patch.Anonymous hunk ./MiniDarcs/Patch/ShowRead.hs 7 +import MiniDarcs.Patch.Anonymous + hunk ./MiniDarcs/Patch/ShowRead.hs 15 -data Anonymous1 p where - Anonymous1 :: p ctxt -> Anonymous1 p - hunk ./MiniDarcs/Repository.hs 3 - repoRoot, + inRepo, hunk ./MiniDarcs/Repository.hs 8 + readMegaPatches, hunk ./MiniDarcs/Repository.hs 10 + writeMegaPatches, hunk ./MiniDarcs/Repository.hs 15 +import MiniDarcs.Patch.Sequence hunk ./MiniDarcs/Repository.hs 18 +import Control.Exception hunk ./MiniDarcs/Repository.hs 21 +import Unsafe.Coerce + +inRepo :: FilePath -> IO a -> IO a +inRepo dir io = do curDir <- getCurrentDirectory + (setCurrentDirectory dir >> io) + `finally` setCurrentDirectory curDir hunk ./MiniDarcs/Repository.hs 37 - Just names -> return names + Just ns -> return ns hunk ./MiniDarcs/Repository.hs 49 +readMegaPatches :: [Name] -> IO (Seq MegaPatch from to) +readMegaPatches [] = return (hackCtxt Nil) + where -- XXX Do this more nicely? + hackCtxt :: Seq MegaPatch from to1 -> Seq MegaPatch from to2 + hackCtxt = unsafeCoerce +readMegaPatches (n : ns) = do p <- readMegaPatch n + ps <- readMegaPatches ns + return (p `Cons` ps) + hunk ./MiniDarcs/Repository.hs 64 +writeMegaPatches :: Seq MegaPatch from to -> IO () +writeMegaPatches Nil = return () +writeMegaPatches (Cons p ps) = do writeMegaPatch p + writeMegaPatches ps + hunk ./minidarcs.cabal 20 - Build-Depends: base, directory, filepath + Build-Depends: base, directory, filepath, containers hunk ./MiniDarcs/Command/Init.hs 4 -import MiniDarcs.Patch.Apply -import MiniDarcs.Patch.Catch -import MiniDarcs.Patch.MegaPatch -import MiniDarcs.Patch.Name -import MiniDarcs.Patch.Patch -import MiniDarcs.Patch.Primitive -import MiniDarcs.Patch.Sequence hunk ./MiniDarcs/Command/Init.hs 8 -initialise ["test"] - = do let name1 = Name Positive 111 111 - name2 = Name Positive 222 222 - name3 = Name Positive 333 333 - allNames = [name1, name2, name3] - patch1a = Primitive (SubName name1 1) - (AddFile "foo") - patch1b = Primitive (SubName name1 2) - (AddFile "bar") - patch1c = Primitive (SubName name1 3) - (Hunk "foo" 0 [] - ["foo line 1", - "foo line 2", - "foo line 3", - "foo line 4", - "foo line 5"]) - patch2a = Primitive (SubName name2 1) - (Hunk "foo" 2 - ["foo line 3", "foo line 4"] - ["in between foo lines 2,5 1", - "in between foo lines 2,5 2", - "in between foo lines 2,5 3"]) - patch3a = Primitive (SubName name3 1) - (Hunk "foo" 1 [] - ["in between foo lines 1,2 1", - "in between foo lines 1,2 2", - "in between foo lines 1,2 3", - "in between foo lines 1,2 4"]) - catch1a = Patch patch1a - catch1b = Patch patch1b - catch1c = Patch patch1c - catch2a = Patch patch2a - catch3a = Patch patch3a - catches1 = catch1a `Cons` catch1b `Cons` catch1c `Cons` Nil - catches2 = catch2a `Cons` Nil - catches3 = catch3a `Cons` Nil - megaPatch1 = MegaPatch name1 catches1 - megaPatch2 = MegaPatch name2 catches2 - megaPatch3 = MegaPatch name3 catches3 - initialiseRepo - writeMegaPatch megaPatch1 - writeMegaPatch megaPatch2 - writeMegaPatch megaPatch3 - writeInventory allNames - apply megaPatch1 - apply megaPatch2 - apply megaPatch3 addfile ./MiniDarcs/Command/Test.hs hunk ./MiniDarcs/Command/Test.hs 1 + +module MiniDarcs.Command.Test (test) where + +import MiniDarcs.Patch.Apply +import MiniDarcs.Patch.Catch +import MiniDarcs.Patch.MegaPatch +import MiniDarcs.Patch.Name +import MiniDarcs.Patch.Patch +import MiniDarcs.Patch.Primitive +import MiniDarcs.Patch.Sequence +import MiniDarcs.Repository + +import Unsafe.Coerce + +test :: [String] -> IO () +test [] = do initialiseRepo + writeMegaPatches megaPatchesPQR + writeInventory namesPQR + apply megaPatchesPQR +test ["q"] = do initialiseRepo + writeMegaPatches megaPatchesPQ + writeInventory namesPQ + apply megaPatchesPQR +test ["r"] = do initialiseRepo + writeMegaPatches megaPatchesPR + writeInventory namesPR + apply megaPatchesPQR +test _ = error "Unknown arguments to test" + +nameP, nameQ, nameR :: Name +nameP = Name Positive 111 111 +nameQ = Name Positive 222 222 +nameR = Name Positive 333 333 + +namesPQR, namesPQ, namesPR :: [Name] +namesPQR = [nameP, nameQ, nameR] +namesPQ = [nameP, nameQ] +namesPR = [nameP, nameR] + +patchP1 :: Patch O O_P1 +patchP1 = Primitive (SubName nameP 1) + (AddFile "foo") +patchP2 :: Patch O_P1 O_P12 +patchP2 = Primitive (SubName nameP 2) + (AddFile "bar") +patchP3 :: Patch O_P12 O_P123 +patchP3 = Primitive (SubName nameP 3) + (Hunk "foo" 0 [] + ["foo line 1", + "foo line 2", + "foo line 3", + "foo line 4", + "foo line 5"]) +patchPQ1 :: Patch O_P123 O_P123_Q1 +patchPQ1 = Primitive (SubName nameQ 1) + (Hunk "foo" 2 + ["foo line 3", "foo line 4"] + ["in between foo lines 2,5 1", + "in between foo lines 2,5 2", + "in between foo lines 2,5 3"]) +patchPQR1 :: Patch O_P123_Q1 O_P123_Q1_R1 +patchPQR1 = Primitive (SubName nameR 1) + (Hunk "foo" 1 [] + ["in between foo lines 1,2 1", + "in between foo lines 1,2 2", + "in between foo lines 1,2 3", + "in between foo lines 1,2 4"]) + +patchPR1 :: Patch O_P123 O_P123_R1 +-- R actually looks identical regardless of whether or not Q has +-- already been applied +patchPR1 = unsafeCoerce patchPQR1 + +catchP1 :: Catch O O_P1 +catchP1 = Patch patchP1 + +catchP2 :: Catch O_P1 O_P12 +catchP2 = Patch patchP2 + +catchP3 :: Catch O_P12 O_P123 +catchP3 = Patch patchP3 + +catchPQ1 :: Catch O_P123 O_P123_Q1 +catchPQ1 = Patch patchPQ1 + +catchPR1 :: Catch O_P123 O_P123_R1 +catchPR1 = Patch patchPR1 + +catchPQR1 :: Catch O_P123_Q1 O_P123_Q1_R1 +catchPQR1 = Patch patchPQR1 + +catchesP :: Seq Catch O O_P123 +catchesP = catchP1 `Cons` catchP2 `Cons` catchP3 `Cons` Nil + +catchesPQ :: Seq Catch O_P123 O_P123_Q1 +catchesPQ = catchPQ1 `Cons` Nil + +catchesPR :: Seq Catch O_P123 O_P123_R1 +catchesPR = catchPR1 `Cons` Nil + +catchesPQR :: Seq Catch O_P123_Q1 O_P123_Q1_R1 +catchesPQR = catchPQR1 `Cons` Nil + +megaPatchP :: MegaPatch O O_P123 +megaPatchP = MegaPatch nameP catchesP + +megaPatchPQ :: MegaPatch O_P123 O_P123_Q1 +megaPatchPQ = MegaPatch nameQ catchesPQ + +megaPatchPR :: MegaPatch O_P123 O_P123_R1 +megaPatchPR = MegaPatch nameR catchesPR + +megaPatchPQR :: MegaPatch O_P123_Q1 O_P123_Q1_R1 +megaPatchPQR = MegaPatch nameR catchesPQR + +megaPatchesPQR :: Seq MegaPatch O O_P123_Q1_R1 +megaPatchesPQR = megaPatchP `Cons` megaPatchPQ `Cons` megaPatchPQR `Cons` Nil + +megaPatchesPQ :: Seq MegaPatch O O_P123_Q1 +megaPatchesPQ = megaPatchP `Cons` megaPatchPQ `Cons` Nil + +megaPatchesPR :: Seq MegaPatch O O_P123_R1 +megaPatchesPR = megaPatchP `Cons` megaPatchPR `Cons` Nil + +data O +data O_P1 +data O_P12 +data O_P123 +data O_P123_Q1 +data O_P123_R1 +data O_P123_Q1_R1 + hunk ./MiniDarcs/Main.hs 6 +import MiniDarcs.Command.Test hunk ./MiniDarcs/Main.hs 15 + "test" : args' -> test args' hunk ./minidarcs.cabal 25 - IncoherentInstances + IncoherentInstances, + EmptyDataDecls hunk ./MiniDarcs/Patch/Patch.hs 27 - = do (q' `Then` p') <- commute (p `Then` q) - return (Primitive nq q' `Then` Primitive np p') + = if np == inverseSubName nq + then Nothing + else do (q' `Then` p') <- commute (p `Then` q) + return (Primitive nq q' `Then` Primitive np p') hunk ./MiniDarcs/Command/Test.hs 23 - apply megaPatchesPQR + apply megaPatchesPQ hunk ./MiniDarcs/Command/Test.hs 27 - apply megaPatchesPQR + apply megaPatchesPR hunk ./MiniDarcs/Patch/Primitive.hs 13 +import Unsafe.Coerce hunk ./MiniDarcs/Patch/Primitive.hs 27 - commute (AddFile p_f `Then` AddFile q_f) - = Just (AddFile q_f `Then` AddFile p_f) - commute _ = Nothing + commute (p `Then` q) + = case (p, q) of + (AddFile p_f, AddFile q_f) -> + sanityCheck "AddFile f f" (p_f /= q_f) qp + (RmFile p_f, RmFile q_f) -> + sanityCheck "RmFile f f" (p_f /= q_f) qp + (AddFile p_f, RmFile q_f) + | p_f == q_f -> Nothing + | otherwise -> qp + (RmFile p_f, AddFile q_f) + | p_f == q_f -> Nothing + | otherwise -> qp + (AddFile p_f, Hunk q_f _ _ _) + | p_f == q_f -> Nothing + | otherwise -> qp + (Hunk p_f _ _ _, AddFile q_f) -> + sanityCheck "AddFile Hunk" (p_f /= q_f) qp + (RmFile p_f, Hunk q_f _ _ _) -> + sanityCheck "RmFile Hunk" (p_f /= q_f) qp + (Hunk p_f _ _ _, RmFile q_f) + | p_f == q_f -> Nothing + | otherwise -> qp + (Hunk p_f p_skip p_old p_new, Hunk q_f q_skip q_old q_new) + | p_f /= q_f -> qp + | p_skip + genericLength p_new < q_skip -> + let movement = genericLength p_new - genericLength p_old + in Just (Hunk q_f (q_skip - movement) q_old q_new `Then` p') + | q_skip + genericLength q_old < p_skip -> + let movement = genericLength q_new - genericLength q_old + in Just (q' `Then` Hunk p_f (p_skip - movement) p_old p_new) + -- XXX Are any more cases OK? + | otherwise -> Nothing + where p' = unsafeCoerce p + q' = unsafeCoerce q + qp = Just (q' `Then` p') hunk ./MiniDarcs/Utils.hs 5 + sanityCheck, hunk ./MiniDarcs/Utils.hs 55 +sanityCheck :: String -> Bool -> a -> a +sanityCheck _ True x = x +sanityCheck str False _ = error ("Insanity: " ++ str) + hunk ./MiniDarcs/Patch/Name.hs 6 +import MiniDarcs.Utils + hunk ./MiniDarcs/Patch/Name.hs 11 - deriving (Eq, Ord, Read) + deriving (Eq, Ord) hunk ./MiniDarcs/Patch/Name.hs 16 +instance Read Name where + readsPrec _ xs = case maybeReads xs of + Just (sign, '-' : xs') -> + case maybeReads xs' of + Just (i, '-' : xs'') -> + case maybeReads xs'' of + Just (j, xs''') -> + [(Name sign i j, xs''')] + Nothing -> [] + _ -> [] + _ -> [] + hunk ./MiniDarcs/Patch/Name.hs 29 - deriving (Eq, Read) + deriving Eq hunk ./MiniDarcs/Patch/Name.hs 34 +instance Read SubName where + readsPrec _ xs = case maybeReads xs of + Just (n, ':' : xs') -> + case maybeReads xs' of + Just (s, xs'') -> + [(SubName n s, xs'')] + Nothing -> [] + _ -> [] + hunk ./MiniDarcs/Patch/Name.hs 44 - deriving (Eq, Ord, Read) + deriving (Eq, Ord) hunk ./MiniDarcs/Patch/Name.hs 50 +instance Read Sign where + readsPrec _ ('P' : xs) = [(Positive, xs)] + readsPrec _ ('N' : xs) = [(Positive, xs)] + readsPrec _ _ = [] + hunk ./MiniDarcs/Patch/Primitive.hs 25 --- XXX Wrong: hunk ./MiniDarcs/Patch/Name.hs 8 +import Data.Char + hunk ./MiniDarcs/Patch/Name.hs 19 - readsPrec _ xs = case maybeReads xs of + readsPrec _ xs = case maybeReads $ dropWhile isSpace xs of hunk ./MiniDarcs/Patch/Sequence.hs 10 +import Data.Char hunk ./MiniDarcs/Patch/Sequence.hs 47 - case ys of + case dropWhile isSpace ys of hunk ./MiniDarcs/Command/Pull.hs 5 +import MiniDarcs.Patch.Apply hunk ./MiniDarcs/Command/Pull.hs 50 + apply newLocalPatches hunk ./MiniDarcs/Patch/Commute.hs 10 + hunk ./MiniDarcs/Patch/Equality.hs 2 -module MiniDarcs.Patch.Equality where +module MiniDarcs.Patch.Equality (Equality(..), IsEqual(..), sameStart) where + +import Unsafe.Coerce hunk ./MiniDarcs/Patch/Equality.hs 11 - NotEqual :: IsEqual a b + +sameStart :: p from1 to1 -> q from2 to2 -> IsEqual from1 from2 +sameStart _ _ = unsafeCoerce IsEqual + hunk ./MiniDarcs/Patch/MegaPatch.hs 2 -module MiniDarcs.Patch.MegaPatch ( - MegaPatch(..), commuteToPrefix, name, names, - ) where +module MiniDarcs.Patch.MegaPatch (MegaPatch(..), commuteToPrefix) where hunk ./MiniDarcs/Patch/MegaPatch.hs 12 -import Data.Set (Set) -import qualified Data.Set as Set hunk ./MiniDarcs/Patch/MegaPatch.hs 17 -name :: MegaPatch from to -> Name -name (MegaPatch n _) = n - -names :: Seq MegaPatch from to -> [Name] -names Nil = [] -names (Cons p ps) = name p : names ps +instance Named MegaPatch Name where + name (MegaPatch n _) = n hunk ./MiniDarcs/Patch/MegaPatch.hs 35 --- XXX This is a bit inefficient, as we don't actually want the prefix --- We could also stop earlier if we kept track of how many unwanted --- patches there were left -commuteToPrefix :: Set Name -> Seq MegaPatch from to - -> Then (Seq MegaPatch) (Seq MegaPatch) from to -commuteToPrefix _ Nil = Nil `Then` Nil -commuteToPrefix ns (p@(MegaPatch n _) `Cons` ps) - = case commuteToPrefix ns ps of - qs `Then` rs - | n `Set.member` ns -> - Cons p qs `Then` rs - | otherwise -> - case commute (p `Then` qs) of - Just (qs' `Then` p') -> - qs' `Then` Cons p' rs - Nothing -> error "XXX Can't happen: commuteToPrefix failed" - hunk ./MiniDarcs/Patch/Merge.hs 7 +import MiniDarcs.Patch.ContextedPatch hunk ./MiniDarcs/Patch/Merge.hs 13 +import qualified Data.Set as Set + hunk ./MiniDarcs/Patch/Merge.hs 22 - merge (Fork c1 c2) = case commute (invert c1 `Then` c2) of - Just (c2' `Then` _) -> - Anonymous1 c2' - -- XXX Incomplete - Nothing -> undefined + merge (Fork c1 c2) + = case commute (invert c1 `Then` c2) of + Just (c2' `Then` _) -> + Anonymous1 c2' + Nothing -> + case (c1, c2) of + (Patch p, Patch q) -> + let pConflict = ContextedPatch Nil p + qIdentity = ContextedPatch Nil q + in Anonymous1 $ Conflictor (invert p `Cons` Nil) + [pConflict] + qIdentity + (Conflictor pEffect _ pIdentity, Patch q) -> + let qIdentity = addToContext (invert pEffect) + $ ContextedPatch Nil q + in Anonymous1 (Conflictor Nil [pIdentity] qIdentity) + (Patch p, Conflictor qEffect qConflicts qIdentity) -> + let pConflict = addToContext (invert qEffect) + $ ContextedPatch Nil p + in Anonymous1 $ Conflictor (invert p `Cons` qEffect) + (pConflict : qConflicts) + qIdentity + (Conflictor pEffect _ pIdentity, + Conflictor qEffect qConflicts qIdentity) -> + let pEffectNames = names pEffect + qEffectNames = names qEffect + pEffectNameSet = Set.fromList pEffectNames + qEffectNameSet = Set.fromList qEffectNames + commonEffectNameSet = pEffectNameSet `Set.intersection` + qEffectNameSet + in case commuteToPrefix commonEffectNameSet pEffect of + _ `Then` pEffect' -> + case commuteToPrefix commonEffectNameSet qEffect of + _ `Then` qEffect' -> + case sameStart pEffect' qEffect' of + IsEqual -> + case commute (invert pEffect' `Then` qEffect') of + Nothing -> + error "XXX Can't happen? catch merge commute" + Just (qEffect'' `Then` ipEffect'') -> + let qConflicts' = map (addToContext ipEffect'') qConflicts + qConflicts'' = addToContext (invert qEffect'') pIdentity + : qConflicts' + qIdentity' = addToContext ipEffect'' qIdentity + in Anonymous1 (Conflictor qEffect'' qConflicts'' qIdentity') hunk ./MiniDarcs/Patch/Merge.hs 80 - NotEqual -> error "XXX Can't happen merge/equal" hunk ./MiniDarcs/Patch/Name.hs 3 - Name(..), Sign(..), SubName(..), inverseName, inverseSubName + Named(..), Name(..), Sign(..), SubName(..), inverseName, inverseSubName hunk ./MiniDarcs/Patch/Name.hs 10 +class Named p n | p -> n where + name :: p from to -> n + hunk ./MiniDarcs/Patch/Name.hs 34 - deriving Eq + deriving (Eq, Ord) hunk ./MiniDarcs/Patch/Patch.hs 16 + +instance Named Patch SubName where + name (Primitive sn _) = sn hunk ./MiniDarcs/Patch/Sequence.hs 2 -module MiniDarcs.Patch.Sequence (Seq(..)) where +module MiniDarcs.Patch.Sequence (Seq(..), names, commuteToPrefix) where hunk ./MiniDarcs/Patch/Sequence.hs 8 +import MiniDarcs.Patch.Name hunk ./MiniDarcs/Patch/Sequence.hs 13 +import Data.Set (Set) +import qualified Data.Set as Set hunk ./MiniDarcs/Patch/Sequence.hs 23 +names :: Named p n => Seq p from to -> [n] +names Nil = [] +names (Cons p ps) = name p : names ps + hunk ./MiniDarcs/Patch/Sequence.hs 88 +-- XXX This is a bit inefficient, as we don't actually want the prefix +-- We could also stop earlier if we kept track of how many unwanted +-- patches there were left +commuteToPrefix :: (Ord n, Commute p p, Named p n) + => Set n -> Seq p from to + -> Then (Seq p) (Seq p) from to +commuteToPrefix _ Nil = Nil `Then` Nil +commuteToPrefix ns (p `Cons` ps) + = case commuteToPrefix ns ps of + qs `Then` rs + | name p `Set.member` ns -> + Cons p qs `Then` rs + | otherwise -> + case commute (p `Then` qs) of + Just (qs' `Then` p') -> + qs' `Then` Cons p' rs + Nothing -> error "XXX Can't happen: commuteToPrefix failed" + hunk ./minidarcs.cabal 22 - Extensions: GADTs, MultiParamTypeClasses, + Extensions: GADTs, EmptyDataDecls, + MultiParamTypeClasses, FunctionalDependencies, hunk ./minidarcs.cabal 26 - IncoherentInstances, - EmptyDataDecls + IncoherentInstances hunk ./.boring 6 +^head$ +^stable$ hunk ./MiniDarcs/Command/Pull.hs 7 +import MiniDarcs.Patch.Equality hunk ./MiniDarcs/Command/Pull.hs 9 -import MiniDarcs.Patch.MegaPatch hunk ./MiniDarcs/Command/Pull.hs 13 -import Unsafe.Coerce hunk ./MiniDarcs/Command/Pull.hs 34 + -- XXX Should special-case local = {} hunk ./MiniDarcs/Command/Pull.hs 38 - -- XXX Should special-case local = {} - do let -- XXX Do this more nicely? - hackCtxt :: Seq MegaPatch from to - -> Seq MegaPatch from' to - hackCtxt = unsafeCoerce - fork = Fork localPatches - (hackCtxt remotePatches) - case merge fork of - Anonymous1 newLocalPatches -> - do let localNames' = localNames ++ - names newLocalPatches - writeMegaPatches newLocalPatches - apply newLocalPatches - writeInventory localNames' + case sameStart localPatches remotePatches of + IsEqual -> + case merge (Fork localPatches remotePatches) of + Anonymous1 newLocalPatches -> + do let localNames' = localNames ++ + names newLocalPatches + writeMegaPatches newLocalPatches + apply newLocalPatches + writeInventory localNames' hunk ./MiniDarcs/Command/Pull.hs 8 +import MiniDarcs.Patch.MegaPatch hunk ./MiniDarcs/Command/Pull.hs 10 +import MiniDarcs.Patch.Name hunk ./MiniDarcs/Command/Pull.hs 40 - case sameStart localPatches remotePatches of - IsEqual -> - case merge (Fork localPatches remotePatches) of - Anonymous1 newLocalPatches -> - do let localNames' = localNames ++ - names newLocalPatches - writeMegaPatches newLocalPatches - apply newLocalPatches - writeInventory localNames' + doPull localNames localPatches remotePatches hunk ./MiniDarcs/Command/Pull.hs 43 +-- GHC 6.8 can't top if this is inlined, so we make it a separate function +doPull :: [Name] -> Seq MegaPatch from1 to1 -> Seq MegaPatch from2 to2 -> IO () +doPull localNames localPatches remotePatches = + case sameStart localPatches remotePatches of + IsEqual -> + case merge (Fork localPatches remotePatches) of + Anonymous1 newLocalPatches -> + do let localNames' = localNames ++ + names newLocalPatches + writeMegaPatches newLocalPatches + apply newLocalPatches + writeInventory localNames' + hunk ./MiniDarcs/Patch/Merge.hs 12 +import MiniDarcs.Utils hunk ./MiniDarcs/Patch/Merge.hs 61 - error "XXX Can't happen? catch merge commute" + panic "catch merge commute" hunk ./MiniDarcs/Patch/Merge.hs 81 - Nothing -> error "XXX Can't happen merge/commute" + Nothing -> panic "sequence merge commute" hunk ./MiniDarcs/Patch/Sequence.hs 10 +import MiniDarcs.Utils hunk ./MiniDarcs/Patch/Sequence.hs 105 - Nothing -> error "XXX Can't happen: commuteToPrefix failed" + Nothing -> panic "commuteToPrefix failed" hunk ./MiniDarcs/Repository.hs 38 - Nothing -> error "Corrupt inventory?" + Nothing -> panic "Corrupt inventory?" hunk ./MiniDarcs/Repository.hs 62 - Nothing -> error "Corrupt patch?" + Nothing -> panic "Corrupt patch?" hunk ./MiniDarcs/Utils.hs 6 + panic, hunk ./MiniDarcs/Utils.hs 60 +panic :: String -> a +panic str = error ("Can't happen: " ++ str) + hunk ./MiniDarcs/Patch/Equality.hs 2 -module MiniDarcs.Patch.Equality (Equality(..), IsEqual(..), sameStart) where +module MiniDarcs.Patch.Equality ( + Equality(..), IsEqual(..), sameStart, sameEnd + ) where hunk ./MiniDarcs/Patch/Equality.hs 17 +sameEnd :: p from1 to1 -> q from2 to2 -> IsEqual to1 to2 +sameEnd _ _ = unsafeCoerce IsEqual + hunk ./MiniDarcs/Patch/Sequence.hs 7 +import MiniDarcs.Patch.Equality hunk ./MiniDarcs/Patch/Sequence.hs 17 -import Unsafe.Coerce hunk ./MiniDarcs/Patch/Sequence.hs 36 - Just (Anonymous1 ps, xs') -> [(hackCtxt ps, xs')] + Just (Anonymous1 ps, xs') -> ret ps xs' hunk ./MiniDarcs/Patch/Sequence.hs 38 - where -- XXX Do this more nicely? - hackCtxt :: Seq p from to1 -> Seq p from to2 - hackCtxt = unsafeCoerce + where -- GHC 6.8 can't cope if this is inlined + ret :: Seq p from cxt -> String -> [(Seq p from to, String)] + ret ps xs' = let resType :: Seq p from to + resType = undefined + in case sameEnd ps resType of + IsEqual -> [(ps, xs')] hunk ./minidarcs.cabal 23 + ScopedTypeVariables, hunk ./MiniDarcs/Patch/Equality.hs 3 - Equality(..), IsEqual(..), sameStart, sameEnd + Equality(..), IsEqual(..), sameStart, sameEnd, startIsEnd hunk ./MiniDarcs/Patch/Equality.hs 20 +startIsEnd :: p from to -> IsEqual from to +startIsEnd _ = unsafeCoerce IsEqual + hunk ./MiniDarcs/Repository.hs 13 +import MiniDarcs.Patch.Equality hunk ./MiniDarcs/Repository.hs 22 -import Unsafe.Coerce hunk ./MiniDarcs/Repository.hs 49 -readMegaPatches :: [Name] -> IO (Seq MegaPatch from to) -readMegaPatches [] = return (hackCtxt Nil) - where -- XXX Do this more nicely? - hackCtxt :: Seq MegaPatch from to1 -> Seq MegaPatch from to2 - hackCtxt = unsafeCoerce +readMegaPatches :: forall from to . [Name] -> IO (Seq MegaPatch from to) +readMegaPatches [] = let resType :: Seq MegaPatch from to + resType = undefined + in case startIsEnd resType of + IsEqual -> return Nil hunk ./MiniDarcs/Patch/Catch.hs 47 - -- XXX Actually, this is just wrong: We need the inverse patches in - -- the conflicts hunk ./MiniDarcs/Patch/Catch.hs 49 - (map (addToContext effect) conflicts) - (addToContext effect identity) + (map invertContextedPatch conflicts) + (invertContextedPatch identity) + where invertContextedPatch (ContextedPatch ps p) + = addToContext effect + $ addToContext ps + $ ContextedPatch (Cons p Nil) (invert p) hunk ./MiniDarcs/Patch/Catch.hs 10 +import MiniDarcs.Patch.Name hunk ./MiniDarcs/Patch/Catch.hs 40 + -- [p] [q] hunk ./MiniDarcs/Patch/Catch.hs 44 + -- [p] [p^, {:p}, :q] + commute (Patch p `Then` Conflictor qEffect [qConflict] qIdentity) + | name p == name1 qConflict + = case qEffect of + Cons pInverse Nil -> + let p' = invert pInverse + in case sameStart p p' of + IsEqual -> + case isEqual p (invert pInverse) of + IsEqual -> + case qConflict of + ContextedPatch Nil p'' -> + case isEqual p p'' of + IsEqual -> + case qIdentity of + ContextedPatch Nil q -> + Just (Patch q + `Then` + Conflictor (invert q `Cons` Nil) + [ContextedPatch Nil q] + (ContextedPatch Nil p)) + _ -> panic "XXX" + _ -> panic "XXX" + _ -> panic "XXX" hunk ./MiniDarcs/Patch/ContextedPatch.hs 7 +import MiniDarcs.Patch.Name hunk ./MiniDarcs/Patch/ContextedPatch.hs 17 +instance Named1 ContextedPatch SubName where + name1 (ContextedPatch _ p) = name p + hunk ./MiniDarcs/Patch/Name.hs 3 - Named(..), Name(..), Sign(..), SubName(..), inverseName, inverseSubName + Named(..), Named1(..), + Name(..), Sign(..), SubName(..), inverseName, inverseSubName hunk ./MiniDarcs/Patch/Name.hs 14 +class Named1 p n | p -> n where + name1 :: p from -> n + hunk ./MiniDarcs/Patch/Patch.hs 7 +import MiniDarcs.Patch.Equality hunk ./MiniDarcs/Patch/Patch.hs 14 +import Unsafe.Coerce + hunk ./MiniDarcs/Patch/Patch.hs 20 +instance Equality Patch where + -- XXX Should actually check the catches are equal! + isEqual _ _ = unsafeCoerce IsEqual + hunk ./MiniDarcs/Patch/Catch.hs 47 + -- To sanity check, we need to confirm: + -- p = qEffect^ + -- qConflict = : p + -- qIdentity = : q hunk ./MiniDarcs/Patch/Catch.hs 72 + -- [r, X, y] [, {y}, r^:q] + commute (Conflictor pEffect pConflicts pIdentity + `Then` + Conflictor qEffect [qConflict] qIdentity) + | name1 pIdentity == name1 qConflict + -- To sanity check, we need to confirm: + -- qEffect = \epsilon + -- qConflict = pIdentity + -- qIdentity = pEffect^ : q + = case qEffect of + Nil -> + -- XXX We need equality of contexted patches to be able to + -- do this: + -- case isEqual qConflict pIdentity of + -- IsEqual -> + case qIdentity of + ContextedPatch qCxt q -> + case isEqual qCxt (invert pEffect) of + IsEqual -> + Just (Patch q + `Then` + Conflictor (invert q `Cons` pEffect) + (qIdentity : pConflicts) + pIdentity) + _ -> panic "XXX" hunk ./MiniDarcs/Patch/Patch.hs 21 - -- XXX Should actually check the catches are equal! + -- XXX Should actually check the patches are equal! hunk ./MiniDarcs/Patch/Sequence.hs 17 +import Unsafe.Coerce hunk ./MiniDarcs/Patch/Sequence.hs 25 +instance (Equality p, Commute p p, Named p n) => Equality (Seq p) where + -- XXX Should actually check the sequences are equal! + isEqual _ _ = unsafeCoerce IsEqual + hunk ./MiniDarcs/Patch/Catch.hs 40 - -- [p] [q] + -- [p] [q] <-> [q'] [p'] hunk ./MiniDarcs/Patch/Catch.hs 44 - -- [p] [p^, {:p}, :q] + -- [p] [p^, {:p}, :q] <-> q [q^, {:q}, :p] hunk ./MiniDarcs/Patch/Catch.hs 72 - -- [r, X, y] [, {y}, r^:q] + -- [r, X, y] [, {y}, r^:q] <-> [q] [q^r, {r^:q} U X, y] hunk ./MiniDarcs/Patch/Catch.hs 16 +import Data.List +import qualified Data.Set as Set hunk ./MiniDarcs/Patch/Catch.hs 74 + -- [p] [p^ r, {r^:p} U X, y] <-> [r, X, y] [, {y}, r^:p] + -- XXX The qConflicts and qConflicts' type signatures are to fix + -- building on GHC 6.8 + commute (Patch p `Then` Conflictor qEffect (qConflicts :: [ContextedPatch from]) qIdentity) + | name p `elem` map name1 qConflicts + -- To sanity check, we need to confirm: + -- qEffect = p^ qEffect' + -- (qEffect'^ : p) \in qConflicts + -- qIdentity = : q + = case commuteToPrefix (Set.singleton (inverseSubName (name p))) qEffect of + Cons pInv Nil `Then` qEffect' -> + case isEqual (invert p) pInv of + IsEqual -> + case partition ((name p ==) . name1) qConflicts of + ([ContextedPatch cxt p'], qConflicts' :: [ContextedPatch from]) -> + -- XXX I think this is wrong: Some of (invert qEffect') + -- might be able to commute through p? + case isEqual (invert qEffect') cxt of + IsEqual -> + case isEqual p p' of + IsEqual -> + Just (Conflictor qEffect' + qConflicts' + qIdentity + `Then` + Conflictor Nil + [qIdentity] + (addToContext (invert qEffect') $ + ContextedPatch Nil p)) + _ -> panic "XXX" + _ -> panic "XXX" hunk ./MiniDarcs/Patch/Catch.hs 122 + -- XXX I think this is wrong: Some of (invert pEffect) + -- might be able to commute through q? hunk ./minidarcs.cabal 28 + if impl(ghc < 6.9) + Extensions: PatternSignatures hunk ./MiniDarcs/Patch/Catch.hs 132 + -- [r s, W, x] [t, {t^x} U Y, z] <-> [r t', s'Y, s'z] [s', z U t^W, t^x] + commute (Conflictor pEffect pConflicts pIdentity + `Then` + Conflictor qEffect qConflicts qIdentity) + | name1 pIdentity `elem` map name1 qConflicts + -- -- XXX This sanity check comment is wrong: + -- To sanity check, we need to confirm: + -- qEffect = p^ qEffect' + -- (qEffect'^ : p) \in qConflicts + = let pEffectNames = Set.fromList (names pEffect) + qConflictsName = Set.fromList (map name1 qConflicts) + commonFirstConflictNames = pEffectNames `Set.intersection` + qConflictsName + in case commuteToPrefix commonFirstConflictNames pEffect of + commonEffects `Then` pOnlyEffect -> + case commute (pOnlyEffect `Then` qEffect) of + Just (qEffect' `Then` pEffect') -> + case partition ((name1 pIdentity ==) . name1) qConflicts of + ([ContextedPatch _cxt _p'], qConflicts') -> + -- XXX Check that + -- ContextedPatch cxt p' + -- is the same as pIdentity + Just (Conflictor (commonEffects `concatSeq` + qEffect') + (map (addToContext pEffect') qConflicts') + (addToContext pEffect' qIdentity) + `Then` + Conflictor pEffect' + (qIdentity : + map (addToContext (invert qEffect)) pConflicts) + (addToContext (invert qEffect) pIdentity)) + _ -> panic "XXX" + _ -> panic "XXX" hunk ./MiniDarcs/Patch/Sequence.hs 2 -module MiniDarcs.Patch.Sequence (Seq(..), names, commuteToPrefix) where +module MiniDarcs.Patch.Sequence ( + Seq(..), names, commuteToPrefix, concatSeq + ) where hunk ./MiniDarcs/Patch/Sequence.hs 128 +concatSeq :: Seq p from mid -> Seq p mid to -> Seq p from to +concatSeq Nil qs = qs +concatSeq (Cons p ps) qs = Cons p (concatSeq ps qs) + hunk ./MiniDarcs/Patch/Catch.hs 7 +import MiniDarcs.Patch.CommutePast hunk ./MiniDarcs/Patch/Catch.hs 43 - -- [p] [q] <-> [q'] [p'] - commute (Patch p `Then` Patch q) - = do (q' `Then` p') <- commute (p `Then` q) - return (Patch q' `Then` Patch p') hunk ./MiniDarcs/Patch/Catch.hs 162 + + -- From now on we know that the catches aren't conflictors that + -- might conflict with each other + + -- patch/patch + commute (Patch p `Then` Patch q) + = do (q' `Then` p') <- commute (p `Then` q) + return (Patch q' `Then` Patch p') + -- patch/conflictor + commute (Patch p `Then` Conflictor qEffect qConflicts qIdentity) + = do qEffect' `Then` p' <- commute (p `Then` qEffect) + qConflicts' <- mapM (\qConf -> commutePast (p' `ThenOpen` qConf)) + qConflicts + qIdentity' <- commutePast (p' `ThenOpen` qIdentity) + return (Conflictor qEffect' qConflicts' qIdentity' + `Then` + Patch p') hunk ./MiniDarcs/Patch/Catch.hs 179 + -- conflictor/patch + commute (Conflictor pEffect pConflicts pIdentity `Then` Patch q) + = do q' `Then` pEffect' <- commute (pEffect `Then` q) + pConflicts' <- mapM (\pConf -> commutePast (invert q `ThenOpen` pConf)) + pConflicts + pIdentity' <- commutePast (invert q `ThenOpen` pIdentity) + return (Patch q' + `Then` + Conflictor pEffect' pConflicts' pIdentity') hunk ./MiniDarcs/Patch/Catch.hs 188 - commute _ = Nothing + -- conflictor/conflictor + commute (Conflictor pEffect pConflicts pIdentity + `Then` + Conflictor qEffect qConflicts qIdentity) + = let pEffectNames = Set.fromList (names pEffect) + qConflictsName = Set.fromList (map name1 qConflicts) + commonFirstConflictNames = pEffectNames `Set.intersection` + qConflictsName + in case commuteToPrefix commonFirstConflictNames pEffect of + commonEffects `Then` pOnlyEffect -> + do qEffect' `Then` pEffect' <- commute (pOnlyEffect `Then` qEffect) + pIdentity' <- commutePast (invert qEffect `ThenOpen` pIdentity) + qIdentity' <- commutePast (pEffect' `ThenOpen` qIdentity) + if any (\pConflict -> pConflict `conflictsWith` addToContext qEffect qIdentity) pConflicts || + any (\qConflict -> pIdentity `conflictsWith` addToContext qEffect qConflict) qConflicts || + (pIdentity `conflictsWith` addToContext qEffect qIdentity) + then Nothing + else return (Conflictor (commonEffects `concatSeq` + qEffect') + (map (addToContext pEffect') qConflicts) + qIdentity' + `Then` + Conflictor pEffect' + (map (addToContext (invert qEffect)) pConflicts) + pIdentity') hunk ./MiniDarcs/Patch/ContextedPatch.hs 2 -module MiniDarcs.Patch.ContextedPatch (ContextedPatch(..), addToContext) where +module MiniDarcs.Patch.ContextedPatch ( + ContextedPatch(..), addToContext, conflictsWith + ) where hunk ./MiniDarcs/Patch/ContextedPatch.hs 9 +import MiniDarcs.Patch.Invert hunk ./MiniDarcs/Patch/ContextedPatch.hs 51 +conflictsWith :: ContextedPatch from -> ContextedPatch from -> Bool +conflictsWith (ContextedPatch ps p) q + = case commutePast (invert p `ThenOpen` addToContext (invert ps) q) of + Just _ -> False + Nothing -> True + hunk ./MiniDarcs/Patch/Catch.hs 41 --- XXX Wrong: hunk ./MiniDarcs/Command/Init.hs 6 -initialise :: [String] -> IO () -initialise [] = initialiseRepo -initialise _ = error "Unknown arguments to initialise" +initialise :: Log -> [String] -> IO () +initialise _ [] = initialiseRepo +initialise _ _ = error "Unknown arguments to initialise" hunk ./MiniDarcs/Command/Pull.hs 16 -pull :: [String] -> IO () -pull [repo] - = do localNames <- readInventory +pull :: Log -> [String] -> IO () +pull l [repo] + = do logRepo l "remote" repo + localNames <- readInventory hunk ./MiniDarcs/Command/Pull.hs 42 -pull _ = error "Unknown arguments to pull" +pull _ _ = error "Unknown arguments to pull" hunk ./MiniDarcs/Command/Test.hs 15 -test :: [String] -> IO () -test [] = do initialiseRepo - writeMegaPatches megaPatchesPQR - writeInventory namesPQR - apply megaPatchesPQR -test ["q"] = do initialiseRepo - writeMegaPatches megaPatchesPQ - writeInventory namesPQ - apply megaPatchesPQ -test ["r"] = do initialiseRepo - writeMegaPatches megaPatchesPR - writeInventory namesPR - apply megaPatchesPR -test _ = error "Unknown arguments to test" +test :: Log -> [String] -> IO () +test _ [] = do initialiseRepo + writeMegaPatches megaPatchesPQR + writeInventory namesPQR + apply megaPatchesPQR +test _ ["q"] = do initialiseRepo + writeMegaPatches megaPatchesPQ + writeInventory namesPQ + apply megaPatchesPQ +test _ ["r"] = do initialiseRepo + writeMegaPatches megaPatchesPR + writeInventory namesPR + apply megaPatchesPR +test _ _ = error "Unknown arguments to test" hunk ./MiniDarcs/Main.hs 7 +import MiniDarcs.Repository hunk ./MiniDarcs/Main.hs 9 +import Control.Monad hunk ./MiniDarcs/Main.hs 14 + exists <- isThisARepo + let creatingRepo = case args of + "init" : _ -> True + "test" : _ -> True + _ -> False + when (creatingRepo == exists) $ error "creatingRepo == exists" + l <- startLog args hunk ./MiniDarcs/Main.hs 22 - "init" : args' -> initialise args' - "pull" : args' -> pull args' - "test" : args' -> test args' + "init" : args' -> initialise l args' + "pull" : args' -> pull l args' + "test" : args' -> test l args' hunk ./MiniDarcs/Main.hs 26 + logRepo l "after" "." hunk ./MiniDarcs/Repository.hs 3 + isThisARepo, hunk ./MiniDarcs/Repository.hs 12 + -- For logging (read: debugging) only: + Log, + startLog, + logRepo, hunk ./MiniDarcs/Repository.hs 33 +-- This contains the repo proper, as well as the logs +repoBase :: FilePath +repoBase = "_minidarcs" + hunk ./MiniDarcs/Repository.hs 38 -repoRoot = "_minidarcs" +repoRoot = repoBase "repo" hunk ./MiniDarcs/Repository.hs 82 -initialiseRepo = do createDirectory repoRoot +initialiseRepo = do -- We don't create things that startLog has already: + -- createDirectory repoBase + -- createDirectory logsDir + -- createDirectory repoRoot hunk ./MiniDarcs/Repository.hs 89 +isThisARepo :: IO Bool +isThisARepo = doesDirectoryExist repoRoot + +--------------------------------------------------------------------- +-- This is for logging (read: debugging) purposes only + +copyRepo :: FilePath -> FilePath -> IO () +copyRepo from to = copyTree (from repoRoot) to + +logsDir :: FilePath +logsDir = repoBase "logs" + +nextLogFile :: FilePath +nextLogFile = repoBase "nextLog" + +newtype Log = Log FilePath + +mkLog :: Integer -> IO Log +mkLog i = do curDir <- getCurrentDirectory + return $ Log (curDir logsDir show i) + +createLog :: Log -> IO () +createLog (Log fp) = createDirectory fp + +logFile :: Log -> FilePath -> FilePath +logFile (Log fp) file = fp file + +logDirectory :: Log -> FilePath -> FilePath +logDirectory (Log fp) dir = fp dir + +startLog :: [String] -> IO Log +startLog args + = do createDirectoryIfMissing False repoBase + createDirectoryIfMissing False logsDir + -- We create the repoRoot as otherwise copyRepo would have to + -- cope with it not existing + createDirectoryIfMissing False repoRoot + exists <- doesFileExist nextLogFile + nextLog <- if exists + then do xs <- readBinaryFile nextLogFile + case maybeRead xs of + Just nextLog -> return nextLog + Nothing -> panic "Couldn't read nextLog" + else return 1 + writeBinaryFile nextLogFile (show (nextLog + 1)) + l <- mkLog nextLog + createLog l + writeBinaryFile (logFile l "args") (show args) + logRepo l "before" "." + return l + +logRepo :: Log -> FilePath -> FilePath -> IO () +logRepo l repoName repoPath = copyRepo repoPath (logDirectory l repoName) + hunk ./MiniDarcs/Utils.hs 4 - readBinaryFile, writeBinaryFile, + readBinaryFile, writeBinaryFile, copyTree, hunk ./MiniDarcs/Utils.hs 12 +import System.Directory +import System.FilePath hunk ./MiniDarcs/Utils.hs 58 +copyTree :: FilePath -> FilePath -> IO () +copyTree from to = do createDirectory to + xs <- getDirectoryContents from + mapM_ copyEntry xs + where copyEntry "." = return () + copyEntry ".." = return () + copyEntry x = do let fromX = from x + toX = to x + file <- doesFileExist fromX + directory <- doesDirectoryExist fromX + case (file, directory) of + (True, True) -> panic "Both file and directory" + (True, False) -> copyFile fromX toX + (False, True) -> copyTree fromX toX + (False, False) -> panic "What is it?" + hunk ./MiniDarcs/Patch/Catch.hs 150 - Just (Conflictor (commonEffects `concatSeq` + Just (Conflictor (commonEffects `appendSeq` hunk ./MiniDarcs/Patch/Catch.hs 204 - else return (Conflictor (commonEffects `concatSeq` + else return (Conflictor (commonEffects `appendSeq` hunk ./MiniDarcs/Patch/Sequence.hs 3 - Seq(..), names, commuteToPrefix, concatSeq + Seq(..), names, commuteToPrefix, appendSeq hunk ./MiniDarcs/Patch/Sequence.hs 128 -concatSeq :: Seq p from mid -> Seq p mid to -> Seq p from to -concatSeq Nil qs = qs -concatSeq (Cons p ps) qs = Cons p (concatSeq ps qs) +appendSeq :: Seq p from mid -> Seq p mid to -> Seq p from to +appendSeq Nil qs = qs +appendSeq (Cons p ps) qs = Cons p (appendSeq ps qs) adddir ./Data adddir ./Data/List adddir ./Data/List/LCS addfile ./Data/List/LCS/HuntSzymanski.hs hunk ./Data/List/LCS/HuntSzymanski.hs 1 + +----------------------------------------------------------------------------- +-- | +-- Module : Data.List.LCS.HuntSzymanski +-- Copyright : (c) Ian Lynagh 2005 +-- License : BSD or GPL v2 +-- +-- Maintainer : igloo@earth.li +-- Stability : provisional +-- Portability : non-portable (uses STUArray) +-- +-- This is an implementation of the Hunt-Szymanski LCS algorithm. +-- Derived from the description in \"String searching algorithms\" by +-- Graham A Stephen, ISBN 981021829X. +----------------------------------------------------------------------------- + +module Data.List.LCS.HuntSzymanski ( + -- * Algorithm + -- $algorithm + + -- * LCS + lcs + ) where + +import Data.Array (listArray, (!)) +import Data.Array.MArray (MArray, newArray, newArray_) +import Data.Array.Base (unsafeRead, unsafeWrite) +import Data.Array.ST (STArray, STUArray) +import Control.Monad (when) +import Control.Monad.ST (ST, runST) +import Data.List (groupBy, sort) + +{- $algorithm +We take two sequences, @xs@ and @ys@, of length @\#xs@ and @\#ys@. + +First we make an array + +> matchlist[i=0..(#xs-1)] + +such that + +> (matchlist[i] = js) => ((j `elem` js) <=> (xs !! i == ys !! j)) +> && sort js == reverse js + +i.e. @matchlist[i]@ is the indices of elements of @ys@ equal to the +ith element of @xs@, in descending order. + +Let @\#xys@ be the minimum of @\#xs@ and @\#ys@. Trivially this is the maximum +possible length of the LCS of @xs@ and @ys@. Then we can imagine an array + +> k[i=0..#xs][l=0..#xys] + +such that @k[i][l] = j@ where @j@ is the smallest value such that the +LCS of @xs[0..i]@ and @ys[0..j]@ has length @l@. We use @\#ys@ to +mean there is no such @j@. + +We will not need to whole array at once, though. Instead we use an array + +> kk[l=0..#xys] + +representing a row of @kk@ for a particular @i@. Initially it is for +@i = -1@, so @kk[0] = -1@ and @kk[l] = \#ys@ otherwise. As the algorithm +progresses we will increase @i@ by one at the outer level and compute +the replacement values for @k@'s elements. + +But we want more than just the length of the LCS, we also want the LCS +itself. Another array + +> revres[l=0..#xys] + +stores the list of @xs@ indices an LCS of length @l@, if one is known, +at @revres[l]@. + +Now, suppose @kk@ contains @k[i-1]@. We consider each @j@ in @matchlist[i]@ +in turn. We find the @l@ such that @k[l-1] < j <= k[l]@. If @j < k[l]@ then +we updated @k[l]@ to be @j@ and set @revres[l]@ to be @i:revres[l-1]@. + +Finding @l@ is basically binary search, but there are some tricks we can +do. First, as the @j@s are decreasing the last @l@ we had for this @i@ is +an upper bound on this @l@. Second, we use another array + +> lastl[j=0..#ys-1] + +to store the @l@ we got last time for this @j@, initially all @1@. As the +values in @kk[j]@ monotonically decrease this is a lower bound for @l@. +We also test to see whether this old @l@ is still @l@ before we start the +binary search. +-} + +-- |The 'lcs' function takes two lists and returns a list with a longest +-- common subsequence of the two. +lcs :: Ord a => [a] -> [a] -> [a] +-- Start off by returning the common prefix +lcs [] _ = [] +lcs _ [] = [] +lcs (c1:c1s) (c2:c2s) + | c1 == c2 = c1 : lcs c1s c2s +-- Then reverse everything, get the backwards LCS and reverse it +lcs s1 s2 = lcs_tail [] (reverse s1) (reverse s2) + +-- To get the backwards LCS, we again start off by returning the common +-- prefix (or suffix, however you want to think of it :-) ) +lcs_tail :: Ord a => [a] -> [a] -> [a] -> [a] +lcs_tail acc (c1:c1s) (c2:c2s) + | c1 == c2 = lcs_tail (c1:acc) c1s c2s +lcs_tail acc [] _ = acc +lcs_tail acc _ [] = acc +-- Then we begin the real algorithm +lcs_tail acc s1 s2 = runST (lcs' acc s1 s2) + +lcs' :: Ord a => [a] -> [a] -> [a] -> ST s [a] +lcs' acc xs ys = + do let max_xs = length xs + max_ys = length ys + minmax = max_xs `min` max_ys + -- Initialise all the arrays + matchlist <- newArray_ (0, max_xs - 1) + mk_matchlist matchlist xs ys + kk <- newArray (0, minmax) max_ys + unsafeWrite kk 0 (-1) + lastl <- newArray (0, max_ys - 1) 1 + revres <- newArray_ (0, minmax) + unsafeWrite revres 0 [] + -- Pass the buck to lcs'' to finish the job off + is <- lcs'' matchlist lastl kk revres max_xs max_ys minmax + -- Convert the list of i indices into the result sequence + let axs = listArray (0, max_xs - 1) xs + return $ map (axs !) is ++ acc + +eqFst :: Eq a => (a, b) -> (a, b) -> Bool +eqFst (x, _) (y, _) = x == y + +-- mk_matchlist fills the matchlist array such that if +-- xs !! i == ys !! j then (j+1) `elem` matchlist ! i +-- and matchlist ! i is decreasing for all i +mk_matchlist :: Ord a => STArray s Int [Int] -> [a] -> [a] -> ST s () +mk_matchlist matchlist xs ys = + do let -- xs' is a list of (string, ids with that string in xs) + xs' = map (\sns -> (fst (head sns), map snd sns)) + $ groupBy eqFst $ sort $ zip xs [0..] + -- ys' is similar, only the ids are reversed + ys' = map (\sns -> (fst (head sns), reverse $ map snd sns)) + $ groupBy eqFst $ sort $ zip ys [0..] + -- add_to_matchlist does all the hardwork + add_to_matchlist all_xs@((sx, idsx):xs'') all_ys@((sy, idsy):ys'') + = case compare sx sy of + -- If we have the same string in xs'' and ys'' then all + -- the indices in xs'' must map to the indices in ys'' + EQ -> do sequence_ [ unsafeWrite matchlist i idsy + | i <- idsx ] + add_to_matchlist xs'' ys'' + -- If the string in xs'' is smaller then there are no + -- corresponding indices in ys so we assign all the xs'' + -- indices the empty list + LT -> do sequence_ [ unsafeWrite matchlist i [] + | i <- idsx ] + add_to_matchlist xs'' all_ys + -- Otherwise the string appears in ys only, so we ignore it + GT -> do add_to_matchlist all_xs ys'' + -- If we run out of ys'' altogether then just go through putting + -- in [] for the list of indices of each index remaining in xs'' + add_to_matchlist ((_, idsx):xs'') [] + = do sequence_ [ unsafeWrite matchlist i [] | i <- idsx ] + add_to_matchlist xs'' [] + -- When we run out of xs'' we are done + add_to_matchlist [] _ = return () + -- Finally, actually call add_to_matchlist to populate matchlist + add_to_matchlist xs' ys' + +lcs'' :: STArray s Int [Int] -- matchlist + -> STUArray s Int Int -- lastl + -> STUArray s Int Int -- kk + -> STArray s Int [Int] -- revres + -> Int -> Int -> Int -> ST s [Int] +lcs'' matchlist lastl kk revres max_xs max_ys minmax = + do let -- Out the outermost level we loop over the indices i of xs + loop_i = sequence_ [ loop_j i | i <- [0..max_xs - 1] ] + -- For each i we loop over the matching indices j of elements of ys + loop_j i = do js <- unsafeRead matchlist i + with_js i js minmax + -- Deal with this i and j + with_js i (j:js) max_bound = + do x0 <- unsafeRead lastl j + l <- find_l j x0 max_bound + unsafeWrite lastl j l + vl <- unsafeRead kk l + when (j < vl) $ do + unsafeWrite kk l j + rs <- unsafeRead revres (l - 1) + unsafeWrite revres l (i:rs) + with_js i js l + with_js _ [] _ = return () + -- find_l returns the l such that kk ! (l-1) < j <= kk ! l + find_l j x0 z0 + = let f x z + | x + 1 == z = return z + | otherwise = let y = (x + z) `div` 2 + in do vy <- unsafeRead kk y + if vy < j + then f y z + else f x y + in j `seq` do q1 <- unsafeRead kk x0 + if j <= q1 + then return x0 + else f x0 z0 + -- Do the hard work + loop_i + -- Find where the result starts + succ_l <- find_l max_ys 1 (minmax + 1) + -- Get the result + unsafeRead revres (succ_l - 1) + addfile ./Data/List/LCS.hs hunk ./Data/List/LCS.hs 1 + +----------------------------------------------------------------------------- +-- | +-- Module : Data.List.LCS +-- Copyright : (c) Ian Lynagh 2005, 2008 +-- License : BSD or GPL v2 +-- +-- Maintainer : igloo@earth.li +-- Stability : provisional +-- Portability : non-portable (HuntSzymanski implementation is non-portable) +-- +-- Provides a function lcs that takes two lists and returns a longest common +-- sublist. For example, lcs "abcd" "acbd" is either "abd" or "acd". +----------------------------------------------------------------------------- + +module Data.List.LCS (lcs) where + +import Data.List.LCS.HuntSzymanski (lcs) + hunk ./MiniDarcs/Command/Pull.hs 5 -import MiniDarcs.Patch.Apply hunk ./MiniDarcs/Command/Pull.hs 53 - apply newLocalPatches + applyToRepo newLocalPatches addfile ./MiniDarcs/Command/Record.hs hunk ./MiniDarcs/Command/Record.hs 1 + +module MiniDarcs.Command.Record (record) where + +import MiniDarcs.Patch.Catch +import MiniDarcs.Patch.Equality +import MiniDarcs.Patch.MegaPatch +import MiniDarcs.Patch.Name +import MiniDarcs.Patch.Patch +import MiniDarcs.Patch.Primitive +import MiniDarcs.Patch.Sequence +import MiniDarcs.Repository +import MiniDarcs.Utils + +import Data.List +import Data.List.LCS +import System.Directory +import System.FilePath + +record :: Log -> [String] -> IO () +record _ [] = do directory <- recordDirectory pristineDir "." + adds <- recordAdds + n <- genName + let primitives = directory `appendSeq` adds + patches = mkPatches n 1 primitives + catches = mkCatches patches + megaPatch = MegaPatch n catches + writeMegaPatch megaPatch + applyToPristine megaPatch + ns <- readInventory + writeInventory (ns ++ [n]) +record _ _ = error "Unknown arguments to record" + +mkPatches :: Name -> Integer -> Seq Primitive from to -> Seq Patch from to +mkPatches _ _ Nil = Nil +mkPatches n i (Cons p ps) = Primitive (SubName n i) p + `Cons` + mkPatches n (i + 1) ps + +mkCatches :: Seq Patch from to -> Seq Catch from to +mkCatches Nil = Nil +mkCatches (Cons p ps) = Patch p `Cons` mkCatches ps + +recordAdds :: IO (Seq Primitive from to) +recordAdds = do fps <- readAdds + writeAdds [] + recordAddedFiles fps + +recordAddedFiles :: forall from to . [FilePath] -> IO (Seq Primitive from to) +recordAddedFiles [] = let resType :: Seq Primitive from to + resType = undefined + in case startIsEnd resType of + IsEqual -> return Nil +recordAddedFiles (fp : fps) = do this <- addedFile fp + rest <- recordAddedFiles fps + return (this `appendSeq` rest) + +addedFile :: FilePath -> IO (Seq Primitive from to) +addedFile newPath = do new <- readBinaryFile newPath + let newLines = myLines new + addFile = AddFile newPath + hunk = Hunk newPath 0 [] newLines + return (addFile `Cons` hunk `Cons` Nil) + +recordDirectory :: FilePath -> FilePath -> IO (Seq Primitive from to) +recordDirectory oldPath newPath = do entries <- getDirectoryContents oldPath + recordEntries entries + where recordEntries :: forall from to . + [FilePath] -> IO (Seq Primitive from to) + recordEntries [] = let resType :: Seq Primitive from to + resType = undefined + in case startIsEnd resType of + IsEqual -> return Nil + recordEntries ("." : es) = recordEntries es + recordEntries (".." : es) = recordEntries es + recordEntries (e : es) = do let oldE = oldPath e + newE = newPath e + stillExists <- doesFileExist newE + this <- if stillExists + then recordFile oldE newE + else removedFile oldE newE + rest <- recordEntries es + return (appendSeq this rest) + +removedFile :: FilePath -> FilePath -> IO (Seq Primitive from to) +removedFile oldPath newPath = do old <- readBinaryFile oldPath + let oldLines = myLines old + hunk = Hunk newPath 0 oldLines [] + rmFile = RmFile newPath + return (hunk `Cons` rmFile `Cons` Nil) + +recordFile :: FilePath -> FilePath -> IO (Seq Primitive from to) +recordFile oldPath newPath = do old <- readBinaryFile oldPath + new <- readBinaryFile newPath + let oldLines = myLines old + newLines = myLines new + commonLines = lcs oldLines newLines + return $ mkDiff newPath 0 commonLines oldLines newLines + +mkDiff :: forall from to . + FilePath -> Integer -> [String] -> [String] -> [String] + -> Seq Primitive from to +mkDiff fp skipped (common:cs) (old:os) (new:ns) + | common == old && common == new + = mkDiff fp (skipped + 1) cs os ns +mkDiff fp skipped cs@(common:_) os ns + = case break (common ==) ns of + (reallyNew, ns') -> + case break (common ==) os of + (reallyOld, os') -> + let skipped' = skipped + genericLength reallyNew + in Cons (Hunk fp skipped reallyOld reallyNew) + (mkDiff fp skipped' cs os' ns') +mkDiff _ _ [] [] [] = let resType :: Seq Primitive from to + resType = undefined + in case startIsEnd resType of + IsEqual -> Nil +mkDiff fp skipped [] os ns = Cons (Hunk fp skipped os ns) Nil + hunk ./MiniDarcs/Command/Test.hs 4 -import MiniDarcs.Patch.Apply hunk ./MiniDarcs/Command/Test.hs 18 - apply megaPatchesPQR + applyToRepo megaPatchesPQR hunk ./MiniDarcs/Command/Test.hs 22 - apply megaPatchesPQ + applyToRepo megaPatchesPQ hunk ./MiniDarcs/Command/Test.hs 26 - apply megaPatchesPR + applyToRepo megaPatchesPR hunk ./MiniDarcs/Main.hs 6 +import MiniDarcs.Command.Record hunk ./MiniDarcs/Main.hs 25 + "record" : args' -> record l args' hunk ./MiniDarcs/Repository.hs 8 + readAdds, + writeAdds, hunk ./MiniDarcs/Repository.hs 14 + pristineDir, + applyToRepo, + applyToPristine, + genName, hunk ./MiniDarcs/Repository.hs 24 +import MiniDarcs.Patch.Apply hunk ./MiniDarcs/Repository.hs 31 -import Control.Exception hunk ./MiniDarcs/Repository.hs 33 +import System.Time hunk ./MiniDarcs/Repository.hs 36 -inRepo dir io = do curDir <- getCurrentDirectory - (setCurrentDirectory dir >> io) - `finally` setCurrentDirectory curDir +inRepo = inDir hunk ./MiniDarcs/Repository.hs 48 +addsFile :: FilePath +addsFile = repoRoot "adds" + +pristineDir :: FilePath +pristineDir = repoRoot "pristine" + hunk ./MiniDarcs/Repository.hs 63 +readAdds :: IO [FilePath] +readAdds = do content <- readBinaryFile addsFile + case maybeRead content of + Just fps -> return fps + Nothing -> panic "Corrupt adds?" + +writeAdds :: [FilePath] -> IO () +writeAdds ns = writeBinaryFile addsFile (show ns) + hunk ./MiniDarcs/Repository.hs 101 +applyToRepo :: Apply p => p from to -> IO () +applyToRepo ps = do applyToPristine ps + -- XXX This is wrong if there are local changes + apply ps + +applyToPristine :: Apply p => p from to -> IO () +applyToPristine ps = inDir pristineDir $ apply ps + hunk ./MiniDarcs/Repository.hs 115 + createDirectory pristineDir hunk ./MiniDarcs/Repository.hs 117 + writeAdds [] hunk ./MiniDarcs/Repository.hs 122 +genName :: IO Name +genName = do TOD i j <- getClockTime + -- Avoid names already in our inventory + ns <- readInventory + return $ head $ [ n + | j' <- [j..], + let n = Name Positive i j', + n `notElem` ns ] + hunk ./MiniDarcs/Utils.hs 4 - readBinaryFile, writeBinaryFile, copyTree, + readBinaryFile, writeBinaryFile, copyTree, inDir, hunk ./MiniDarcs/Utils.hs 74 +inDir :: FilePath -> IO a -> IO a +inDir dir io = do curDir <- getCurrentDirectory + (setCurrentDirectory dir >> io) + `finally` setCurrentDirectory curDir + hunk ./minidarcs.cabal 20 - Build-Depends: base, directory, filepath, containers + Build-Depends: base, directory, filepath, containers, array, old-time addfile ./MiniDarcs/Command/Add.hs hunk ./MiniDarcs/Command/Add.hs 1 + +module MiniDarcs.Command.Add (add) where + +import MiniDarcs.Repository + +add :: Log -> [String] -> IO () +add _ [] = error "No arguments to add" +add _ paths = do current <- readAdds + writeAdds (current ++ paths) + hunk ./MiniDarcs/Main.hs 4 +import MiniDarcs.Command.Add hunk ./MiniDarcs/Main.hs 24 + "add" : args' -> add l args' hunk ./MiniDarcs/Command/Record.hs 61 - hunk = Hunk newPath 0 [] newLines + hunk = Hunk newPath 0 [""] newLines hunk ./MiniDarcs/Command/Record.hs 87 - hunk = Hunk newPath 0 oldLines [] + hunk = Hunk newPath 0 oldLines [""] hunk ./MiniDarcs/Main.hs 11 +import Prelude hiding (catch) +import Control.Exception hunk ./MiniDarcs/Main.hs 15 +import System.IO hunk ./MiniDarcs/Main.hs 26 - case args of - "add" : args' -> add l args' - "init" : args' -> initialise l args' - "pull" : args' -> pull l args' - "record" : args' -> record l args' - "test" : args' -> test l args' - _ -> error "Unrecognised args" + doit l args `catch` \e -> do + logException l e + hPutStrLn stderr "Got an exception:" + hPutStrLn stderr $ show e hunk ./MiniDarcs/Main.hs 32 +doit :: Log -> [String] -> IO () +doit l args = case args of + "add" : args' -> add l args' + "init" : args' -> initialise l args' + "pull" : args' -> pull l args' + "record" : args' -> record l args' + "test" : args' -> test l args' + _ -> error "Unrecognised args" + hunk ./MiniDarcs/Repository.hs 22 + logException, hunk ./MiniDarcs/Repository.hs 32 +import Control.Exception hunk ./MiniDarcs/Repository.hs 184 +logException :: Log -> Exception -> IO () +logException l e = writeBinaryFile (logFile l "exception") (show e) + addfile ./MiniDarcs/Command/Inventory.hs hunk ./MiniDarcs/Command/Inventory.hs 1 + +module MiniDarcs.Command.Inventory (inventory) where + +import MiniDarcs.Repository + +import qualified Data.Set as Set + +inventory :: Log -> [String] -> IO () +inventory _ [] = do ns <- readInventory + mapM_ print ns +inventory _ [repo] + = do localNames <- readInventory + remoteNames <- inRepo repo readInventory + let localNameSet = Set.fromList localNames + remoteNameSet = Set.fromList remoteNames + localOnly = localNameSet `Set.difference` remoteNameSet + remoteOnly = remoteNameSet `Set.difference` localNameSet + putStrLn "Here only:" + mapM_ print $ Set.toList localOnly + putStrLn "" + putStrLn "There only:" + mapM_ print $ Set.toList remoteOnly +inventory _ _ = error "Bad arguments to inventory" + hunk ./MiniDarcs/Main.hs 6 +import MiniDarcs.Command.Inventory hunk ./MiniDarcs/Main.hs 37 + "inventory" : args' -> inventory l args' hunk ./MiniDarcs/Command/Pull.hs 12 +import MiniDarcs.Utils hunk ./MiniDarcs/Command/Pull.hs 17 -pull l [repo] +pull l (repo : wantedPatches) hunk ./MiniDarcs/Command/Pull.hs 41 - doPull localNames localPatches remotePatches + case wantedPatches of + [] -> + doPull localNames localPatches remotePatches + _ -> + case mapM maybeRead wantedPatches of + Just wantedNames -> + let wantedNameSet = Set.fromList wantedNames + in case tryCommuteToPrefix wantedNameSet remotePatches of + Just (remotePatches' `Then` _) -> + doPull localNames localPatches remotePatches' + Nothing -> + error "Can't pull those patches due to dependencies" + Nothing -> + error "Can't parse the patch names you want" hunk ./MiniDarcs/Patch/Sequence.hs 3 - Seq(..), names, commuteToPrefix, appendSeq + Seq(..), names, commuteToPrefix, tryCommuteToPrefix, appendSeq hunk ./MiniDarcs/Patch/Sequence.hs 99 --- XXX This is a bit inefficient, as we don't actually want the prefix +-- XXX This is a bit inefficient, as we don't normally actually want +-- the prefix. hunk ./MiniDarcs/Patch/Sequence.hs 107 -commuteToPrefix ns (p `Cons` ps) - = case commuteToPrefix ns ps of - qs `Then` rs +commuteToPrefix ns ps = case tryCommuteToPrefix ns ps of + Just x -> x + Nothing -> panic "commuteToPrefix failed" + +tryCommuteToPrefix :: (Ord n, Commute p p, Named p n) + => Set n -> Seq p from to + -> Maybe (Then (Seq p) (Seq p) from to) +tryCommuteToPrefix _ Nil = Just (Nil `Then` Nil) +tryCommuteToPrefix ns (p `Cons` ps) + = case tryCommuteToPrefix ns ps of + Just (qs `Then` rs) hunk ./MiniDarcs/Patch/Sequence.hs 119 - Cons p qs `Then` rs + Just (Cons p qs `Then` rs) hunk ./MiniDarcs/Patch/Sequence.hs 123 - qs' `Then` Cons p' rs - Nothing -> panic "commuteToPrefix failed" + Just (qs' `Then` Cons p' rs) + Nothing -> Nothing + Nothing -> Nothing hunk ./MiniDarcs/Repository.hs 184 -logException :: Log -> Exception -> IO () +#if __GLASGOW_HASKELL__ >= 609 +type ExceptionType = SomeException +#else +type ExceptionType = Exception +#endif + +logException :: Log -> ExceptionType -> IO () hunk ./minidarcs.cabal 27 - IncoherentInstances + IncoherentInstances, + CPP addfile ./LICENSE hunk ./LICENSE 1 +Copyright (c) The Regents of the University of California. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. Neither the name of the University nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. hunk ./MiniDarcs/Main.hs 20 - exists <- isThisARepo - let creatingRepo = case args of - "init" : _ -> True - "test" : _ -> True - _ -> False - when (creatingRepo == exists) $ error "creatingRepo == exists" - l <- startLog args - doit l args `catch` \e -> do - logException l e - hPutStrLn stderr "Got an exception:" - hPutStrLn stderr $ show e - logRepo l "after" "." + if null args + then usageInfo + else doSomething args hunk ./MiniDarcs/Main.hs 24 -doit :: Log -> [String] -> IO () -doit l args = case args of +usageInfo :: IO () +usageInfo = mapM_ putStrLn [ + "", + "This is MiniDarcs.", + "", + "This is not intended to be a useful application. Rather, it is a", + "prototype of a proposed replacement patch theory for darcs.", + "", + "This is not designed to be a robust application. If you do things", + "like ask minidarcs to add a file that doesn't exist to your repo,", + "then it'll probably rudely crash. Directories are not supported,", + "only files. You certainly wouldn't want to use it for a real project.", + "", + "However, if missing features etc are getting in the way of working", + "on and testing the patch theory, let me know.", + "", + "The following commands are supported:", + "", + " init Initialise a repository", + "", + " add file_1 ... file_n Mark file_1 ... file_n as to be added", + " to the repository on the next 'record'", + "", + " record Record all pending adds, hunks for any", + " changed files already in the repo, and", + " remove any files that are no longer in", + " the working directory", + "", + " inventory List the patch names in this repository", + "", + " inventory /repo/path/ Show the list of patch names in this", + " repo and not in /repo/path/, and", + " vice-versa", + "", + " pull /repo/path/ Pull all the patches from /repo/path/", + "", + " pull /repo/path/ name_1 ... name_n", + " Pull only the patches called name_1 ...", + " name_n from /repo/path/", + ""] + + +doSomething :: [String] -> IO () +doSomething args + = do exists <- isThisARepo + let creatingRepo = case args of + "init" : _ -> True + "test" : _ -> True + _ -> False + when (creatingRepo == exists) $ error "creatingRepo == exists" + l <- startLog args + doIt l args `catch` \e -> do + logException l e + hPutStrLn stderr "Got an exception:" + hPutStrLn stderr $ show e + logRepo l "after" "." + +doIt :: Log -> [String] -> IO () +doIt l args = case args of hunk ./LICENSE 1 -Copyright (c) The Regents of the University of California. +Copyright (c) Ian Lynagh, 2008. hunk ./LICENSE 12 -3. Neither the name of the University nor the names of its contributors +3. Neither the name of the Authors nor the names of any contributors hunk ./LICENSE 16 -THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND +THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND hunk ./LICENSE 19 -ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE +ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE hunk ./MiniDarcs/Main.hs 35 - "only files. You certainly wouldn't want to use it for a real project.", + "only files. None of the commands are interactive at all.", + "", + "You certainly wouldn't want to use this for a real project.", hunk ./MiniDarcs/Command/Inventory.hs 4 +import MiniDarcs.Patch.Pretty hunk ./MiniDarcs/Command/Inventory.hs 11 - mapM_ print ns + mapM_ (putStrLn . pprint) ns hunk ./MiniDarcs/Command/Inventory.hs 20 - mapM_ print $ Set.toList localOnly + mapM_ (putStrLn . pprint) $ Set.toList localOnly hunk ./MiniDarcs/Command/Inventory.hs 23 - mapM_ print $ Set.toList remoteOnly + mapM_ (putStrLn . pprint) $ Set.toList remoteOnly hunk ./MiniDarcs/Patch/Catch.hs 13 +import MiniDarcs.Patch.Pretty hunk ./MiniDarcs/Patch/Catch.hs 28 - deriving (Show, Read) + deriving Read hunk ./MiniDarcs/Patch/Catch.hs 30 -instance Show2 Catch where - show2 = show +instance Ppr (Catch from to) where + ppr (Patch p) = text "Patch" <+> pprAtomic p + ppr (Conflictor effect conflicts identity) + = text "Conflictor" + $$ nest 4 (pprAtomic effect) + $$ nest 4 (pprAtomic conflicts) + $$ nest 4 (pprAtomic identity) hunk ./MiniDarcs/Patch/ContextedPatch.hs 12 +import MiniDarcs.Patch.Pretty hunk ./MiniDarcs/Patch/ContextedPatch.hs 17 +import Data.Char + hunk ./MiniDarcs/Patch/ContextedPatch.hs 26 -instance Show (ContextedPatch from) where - show (ContextedPatch ps p) = show ps ++ " : " ++ show p +instance Ppr (ContextedPatch from) where + ppr (ContextedPatch ps p) = pprAtomic ps <+> colon <+> pprAtomic p hunk ./MiniDarcs/Patch/ContextedPatch.hs 29 +-- This is an ugly mess, but it works well enough for our purposes hunk ./MiniDarcs/Patch/ContextedPatch.hs 31 - readsPrec _ xs = case maybeContextedReads xs of + readsPrec _ = f + where f xs = case dropWhile isSpace xs of + xs'@('(' : xs'') -> + case g xs' of + [] -> case f xs'' of + [(y, ')' : xs''')] -> [(y, xs''')] + _ -> [] + res -> res + xs' -> g xs' + g xs = case maybeContextedReads xs of hunk ./MiniDarcs/Patch/ContextedPatch.hs 42 - case maybeRead xs' of + case maybeReads xs' of hunk ./MiniDarcs/Patch/Name.hs 7 +import MiniDarcs.Patch.Pretty hunk ./MiniDarcs/Patch/Name.hs 23 -instance Show Name where - show (Name sign i j) = show sign ++ "-" ++ show i ++ "-" ++ show j +instance Ppr Name where + ppr (Name sign i j) + = ppr sign <> char '-' <> integer i <> char '-' <> integer j + pprAtomic = ppr hunk ./MiniDarcs/Patch/Name.hs 43 -instance Show SubName where - show (SubName n s) = show n ++ ":" ++ show s +instance Ppr SubName where + ppr (SubName n s) = ppr n <> colon <> integer s + pprAtomic = ppr hunk ./MiniDarcs/Patch/Name.hs 59 -instance Show Sign where - show Positive = "P" - show Negative = "N" +instance Ppr Sign where + ppr Positive = char 'P' + ppr Negative = char 'N' + pprAtomic = ppr hunk ./MiniDarcs/Patch/Patch.hs 10 +import MiniDarcs.Patch.Pretty hunk ./MiniDarcs/Patch/Patch.hs 19 - deriving (Show, Read) + deriving Read + +instance Ppr (Patch from to) where + ppr (Primitive sn p) = text "Primitive" <+> pprAtomic sn <+> pprAtomic p hunk ./MiniDarcs/Patch/Patch.hs 31 -instance Show2 Patch where - show2 = show - addfile ./MiniDarcs/Patch/Pretty.hs hunk ./MiniDarcs/Patch/Pretty.hs 1 + +module MiniDarcs.Patch.Pretty (Ppr(..), pprint, module Text.PrettyPrint) where + +import Text.PrettyPrint + +pprint :: Ppr a => a -> String +pprint = render . ppr + +class Ppr a where + ppr :: a -> Doc + pprAtomic :: a -> Doc + pprAtomic = parens . ppr + +instance Ppr a => Ppr [a] where + ppr xs = brackets $ vcat $ punctuate comma $ map ppr xs + pprAtomic = ppr + hunk ./MiniDarcs/Patch/Primitive.hs 7 +import MiniDarcs.Patch.Pretty hunk ./MiniDarcs/Patch/Primitive.hs 24 - deriving (Show, Read) + deriving Read + +instance Ppr (Primitive from to) where + ppr (AddFile fp) = text "AddFile" <+> text (show fp) + ppr (RmFile fp) = text "RmFile" <+> text (show fp) + ppr (Hunk fp skip old new) + = text "Hunk" <+> text (show fp) <+> integer skip + $$ nest 4 (brackets $ vcat $ map (text . show) old) + $$ nest 4 (brackets $ vcat $ map (text . show) new) hunk ./MiniDarcs/Patch/Sequence.hs 12 +import MiniDarcs.Patch.Pretty hunk ./MiniDarcs/Patch/Sequence.hs 36 -instance Show2 p => Show2 (Seq p) where - show2 xs = "Seq [\n" ++ f xs ++ "]" - where f :: Show2 p => Seq p from to -> String - f Nil = "" - f (Cons p ps) = " " ++ show p ++ ";\n" ++ f ps +data Cxt +contextHack :: p from to -> p Cxt Cxt +contextHack = unsafeCoerce hunk ./MiniDarcs/Patch/Sequence.hs 40 +instance Ppr (p Cxt Cxt) => Ppr (Seq p from to) where + ppr Nil = text "Seq []" + ppr xs = text "Seq [" + $$ nest 4 (f (contextHack xs)) + $$ text "]" + where f :: Seq p Cxt Cxt -> Doc + f Nil = empty + f (Cons p ps) = ppr (contextHack p) <> semi + $$ f (contextHack ps) + +-- This is an ugly mess, but it works well enough for our purposes hunk ./MiniDarcs/Patch/Sequence.hs 63 - maybeContextedReads xs = case stripPrefix "Seq [" xs of - Nothing -> Nothing - Just xs' -> f xs' + maybeContextedReads xs + = case stripPrefix "Seq [" $ dropWhile isSpace xs of + Nothing -> + case stripPrefix "(Seq [" $ dropWhile isSpace xs of + Nothing -> Nothing + Just xs' -> + case f xs' of + Just (y, ')' : xs'') -> Just (y, xs'') + _ -> Nothing + Just xs' -> f xs' hunk ./MiniDarcs/Patch/ShowRead.hs 8 - -class Show2 p where - show2 :: p from to -> String - -instance Show2 p => Show (p from to) where - show = show2 hunk ./MiniDarcs/Repository.hs 29 +import MiniDarcs.Patch.Pretty hunk ./MiniDarcs/Repository.hs 61 - Nothing -> panic "Corrupt inventory?" + Nothing -> panic ("Corrupt inventory?\n" ++ content) hunk ./MiniDarcs/Repository.hs 64 -writeInventory ns = writeBinaryFile inventoryFile (show ns) +writeInventory ns = writeBinaryFile inventoryFile (pprint ns) hunk ./MiniDarcs/Repository.hs 70 - Nothing -> panic "Corrupt adds?" + Nothing -> panic ("Corrupt adds?\n" ++ content) hunk ./MiniDarcs/Repository.hs 79 -patchFile n = patchesDir show n +patchFile n = patchesDir pprint n hunk ./MiniDarcs/Repository.hs 94 - Nothing -> panic "Corrupt patch?" + Nothing -> panic ("Corrupt patch?\n" ++ content) hunk ./MiniDarcs/Repository.hs 102 -writeMegaPatch (MegaPatch n c) = writeBinaryFile (patchFile n) (show c) +writeMegaPatch (MegaPatch n c) = writeBinaryFile (patchFile n) (pprint c) hunk ./minidarcs.cabal 20 - Build-Depends: base, directory, filepath, containers, array, old-time + Build-Depends: base, directory, filepath, containers, array, + old-time, pretty hunk ./MiniDarcs/Patch/Merge.hs 11 +import MiniDarcs.Patch.Pretty hunk ./MiniDarcs/Patch/Merge.hs 62 - panic "catch merge commute" + panic ("catch merge commute\n\n" + ++ "catch 1:\n" + ++ pprint c1 + ++ "\n\n" + ++ "catch 2:\n" + ++ pprint c2 + ++ "\n\n" + ++ "effect 1:\n" + ++ pprint pEffect' + ++ "\n\n" + ++ "effect 2:\n" + ++ pprint qEffect' + ++ "\n") hunk ./MiniDarcs/Patch/MegaPatch.hs 10 +import MiniDarcs.Patch.Pretty hunk ./MiniDarcs/Patch/MegaPatch.hs 18 +instance Ppr (MegaPatch from to) where + ppr (MegaPatch n p) = text "MegaPatch" <+> pprAtomic n <+> pprAtomic p + hunk ./MiniDarcs/Patch/MegaPatch.hs 19 - ppr (MegaPatch n p) = text "MegaPatch" <+> pprAtomic n <+> pprAtomic p + ppr (MegaPatch n p) = text "MegaPatch" <+> pprAtomic n + $$ nest 4 (pprAtomic p) hunk ./MiniDarcs/Patch/Patch.hs 22 - ppr (Primitive sn p) = text "Primitive" <+> pprAtomic sn <+> pprAtomic p + ppr (Primitive sn p) = text "Primitive" <+> pprAtomic sn + $$ nest 4 (pprAtomic p) hunk ./MiniDarcs/Patch/Name.hs 66 - readsPrec _ ('N' : xs) = [(Positive, xs)] + readsPrec _ ('N' : xs) = [(Negative, xs)] hunk ./MiniDarcs/Main.hs 65 + "", + " Note: If you have local changes then pulling may go wrong.", + " It is not meant to handle them.", hunk ./minidarcs.cabal 10 - Mini Darcs + Mini Darcs hunk ./minidarcs.cabal 17 - + other-modules: Data.List.LCS, Data.List.LCS.HuntSzymanski + MiniDarcs.Utils, MiniDarcs.Types, MiniDarcs.Patch.Name, + MiniDarcs.Patch.Merge, MiniDarcs.Patch.Primitive, + MiniDarcs.Patch.MegaPatch, MiniDarcs.Patch.Sequence, MiniDarcs.Patch.Anonymous, + MiniDarcs.Patch.Equality, MiniDarcs.Patch.Patch, MiniDarcs.Patch.CommutePast, + MiniDarcs.Patch.Commute, MiniDarcs.Patch.Apply, MiniDarcs.Patch.Invert, + MiniDarcs.Patch.ShowRead, MiniDarcs.Patch.Catch, MiniDarcs.Patch.ContextedPatch, + MiniDarcs.Patch.Pretty, MiniDarcs.Repository, MiniDarcs.Command.Init, + MiniDarcs.Command.Test, MiniDarcs.Command.Inventory, MiniDarcs.Command.Record, + MiniDarcs.Command.Pull, MiniDarcs.Command.Add hunk ./minidarcs.cabal 17 - other-modules: Data.List.LCS, Data.List.LCS.HuntSzymanski - MiniDarcs.Utils, MiniDarcs.Types, MiniDarcs.Patch.Name, - MiniDarcs.Patch.Merge, MiniDarcs.Patch.Primitive, - MiniDarcs.Patch.MegaPatch, MiniDarcs.Patch.Sequence, MiniDarcs.Patch.Anonymous, - MiniDarcs.Patch.Equality, MiniDarcs.Patch.Patch, MiniDarcs.Patch.CommutePast, - MiniDarcs.Patch.Commute, MiniDarcs.Patch.Apply, MiniDarcs.Patch.Invert, - MiniDarcs.Patch.ShowRead, MiniDarcs.Patch.Catch, MiniDarcs.Patch.ContextedPatch, - MiniDarcs.Patch.Pretty, MiniDarcs.Repository, MiniDarcs.Command.Init, - MiniDarcs.Command.Test, MiniDarcs.Command.Inventory, MiniDarcs.Command.Record, - MiniDarcs.Command.Pull, MiniDarcs.Command.Add + other-modules: + Data.List.LCS + Data.List.LCS.HuntSzymanski + MiniDarcs.Command.Add + MiniDarcs.Command.Init + MiniDarcs.Command.Inventory + MiniDarcs.Command.Pull + MiniDarcs.Command.Record + MiniDarcs.Command.Test + MiniDarcs.Patch.Anonymous + MiniDarcs.Patch.Apply + MiniDarcs.Patch.Catch + MiniDarcs.Patch.Commute + MiniDarcs.Patch.CommutePast + MiniDarcs.Patch.ContextedPatch + MiniDarcs.Patch.Equality + MiniDarcs.Patch.Invert + MiniDarcs.Patch.MegaPatch + MiniDarcs.Patch.Merge + MiniDarcs.Patch.Name + MiniDarcs.Patch.Patch + MiniDarcs.Patch.Pretty + MiniDarcs.Patch.Primitive + MiniDarcs.Patch.Sequence + MiniDarcs.Patch.ShowRead + MiniDarcs.Repository + MiniDarcs.Types + MiniDarcs.Utils move ./MiniDarcs ./Camp move ./minidarcs.cabal ./camp.cabal hunk ./Camp/Command/Add.hs 2 -module MiniDarcs.Command.Add (add) where +module Camp.Command.Add (add) where hunk ./Camp/Command/Add.hs 4 -import MiniDarcs.Repository +import Camp.Repository hunk ./Camp/Command/Init.hs 2 -module MiniDarcs.Command.Init (initialise) where +module Camp.Command.Init (initialise) where hunk ./Camp/Command/Init.hs 4 -import MiniDarcs.Repository +import Camp.Repository hunk ./Camp/Command/Inventory.hs 2 -module MiniDarcs.Command.Inventory (inventory) where +module Camp.Command.Inventory (inventory) where hunk ./Camp/Command/Inventory.hs 4 -import MiniDarcs.Patch.Pretty -import MiniDarcs.Repository +import Camp.Patch.Pretty +import Camp.Repository hunk ./Camp/Command/Pull.hs 2 -module MiniDarcs.Command.Pull (pull) where +module Camp.Command.Pull (pull) where hunk ./Camp/Command/Pull.hs 4 -import MiniDarcs.Patch.Anonymous -import MiniDarcs.Patch.Commute -import MiniDarcs.Patch.Equality -import MiniDarcs.Patch.MegaPatch -import MiniDarcs.Patch.Merge -import MiniDarcs.Patch.Name -import MiniDarcs.Patch.Sequence -import MiniDarcs.Repository -import MiniDarcs.Utils +import Camp.Patch.Anonymous +import Camp.Patch.Commute +import Camp.Patch.Equality +import Camp.Patch.MegaPatch +import Camp.Patch.Merge +import Camp.Patch.Name +import Camp.Patch.Sequence +import Camp.Repository +import Camp.Utils hunk ./Camp/Command/Record.hs 2 -module MiniDarcs.Command.Record (record) where +module Camp.Command.Record (record) where hunk ./Camp/Command/Record.hs 4 -import MiniDarcs.Patch.Catch -import MiniDarcs.Patch.Equality -import MiniDarcs.Patch.MegaPatch -import MiniDarcs.Patch.Name -import MiniDarcs.Patch.Patch -import MiniDarcs.Patch.Primitive -import MiniDarcs.Patch.Sequence -import MiniDarcs.Repository -import MiniDarcs.Utils +import Camp.Patch.Catch +import Camp.Patch.Equality +import Camp.Patch.MegaPatch +import Camp.Patch.Name +import Camp.Patch.Patch +import Camp.Patch.Primitive +import Camp.Patch.Sequence +import Camp.Repository +import Camp.Utils hunk ./Camp/Command/Test.hs 2 -module MiniDarcs.Command.Test (test) where +module Camp.Command.Test (test) where hunk ./Camp/Command/Test.hs 4 -import MiniDarcs.Patch.Catch -import MiniDarcs.Patch.MegaPatch -import MiniDarcs.Patch.Name -import MiniDarcs.Patch.Patch -import MiniDarcs.Patch.Primitive -import MiniDarcs.Patch.Sequence -import MiniDarcs.Repository +import Camp.Patch.Catch +import Camp.Patch.MegaPatch +import Camp.Patch.Name +import Camp.Patch.Patch +import Camp.Patch.Primitive +import Camp.Patch.Sequence +import Camp.Repository hunk ./Camp/Main.hs 4 -import MiniDarcs.Command.Add -import MiniDarcs.Command.Init -import MiniDarcs.Command.Inventory -import MiniDarcs.Command.Pull -import MiniDarcs.Command.Record -import MiniDarcs.Command.Test -import MiniDarcs.Repository +import Camp.Command.Add +import Camp.Command.Init +import Camp.Command.Inventory +import Camp.Command.Pull +import Camp.Command.Record +import Camp.Command.Test +import Camp.Repository hunk ./Camp/Main.hs 27 - "This is MiniDarcs.", + "This is Camp.", hunk ./Camp/Main.hs 30 - "prototype of a proposed replacement patch theory for darcs.", + "prototype of a new patch theory, similar to that used in darcs.", hunk ./Camp/Main.hs 33 - "like ask minidarcs to add a file that doesn't exist to your repo,", + "like ask camp to add a file that doesn't exist to your repo,", hunk ./Camp/Patch/Anonymous.hs 2 -module MiniDarcs.Patch.Anonymous where +module Camp.Patch.Anonymous where hunk ./Camp/Patch/Apply.hs 2 -module MiniDarcs.Patch.Apply (Apply(..)) where +module Camp.Patch.Apply (Apply(..)) where hunk ./Camp/Patch/Catch.hs 2 -module MiniDarcs.Patch.Catch (Catch(..)) where +module Camp.Patch.Catch (Catch(..)) where hunk ./Camp/Patch/Catch.hs 4 -import MiniDarcs.Patch.Anonymous -import MiniDarcs.Patch.Apply -import MiniDarcs.Patch.Commute -import MiniDarcs.Patch.CommutePast -import MiniDarcs.Patch.ContextedPatch -import MiniDarcs.Patch.Equality -import MiniDarcs.Patch.Invert -import MiniDarcs.Patch.Name -import MiniDarcs.Patch.Patch -import MiniDarcs.Patch.Pretty -import MiniDarcs.Patch.Sequence -import MiniDarcs.Patch.ShowRead -import MiniDarcs.Utils +import Camp.Patch.Anonymous +import Camp.Patch.Apply +import Camp.Patch.Commute +import Camp.Patch.CommutePast +import Camp.Patch.ContextedPatch +import Camp.Patch.Equality +import Camp.Patch.Invert +import Camp.Patch.Name +import Camp.Patch.Patch +import Camp.Patch.Pretty +import Camp.Patch.Sequence +import Camp.Patch.ShowRead +import Camp.Utils hunk ./Camp/Patch/Commute.hs 2 -module MiniDarcs.Patch.Commute (Then(..), Commute(..)) where +module Camp.Patch.Commute (Then(..), Commute(..)) where hunk ./Camp/Patch/CommutePast.hs 2 -module MiniDarcs.Patch.CommutePast (ThenOpen(..), CommutePast(..)) where +module Camp.Patch.CommutePast (ThenOpen(..), CommutePast(..)) where hunk ./Camp/Patch/ContextedPatch.hs 2 -module MiniDarcs.Patch.ContextedPatch ( +module Camp.Patch.ContextedPatch ( hunk ./Camp/Patch/ContextedPatch.hs 6 -import MiniDarcs.Patch.Anonymous -import MiniDarcs.Patch.Commute -import MiniDarcs.Patch.CommutePast -import MiniDarcs.Patch.Invert -import MiniDarcs.Patch.Name -import MiniDarcs.Patch.Patch -import MiniDarcs.Patch.Pretty -import MiniDarcs.Patch.Sequence -import MiniDarcs.Patch.ShowRead -import MiniDarcs.Utils +import Camp.Patch.Anonymous +import Camp.Patch.Commute +import Camp.Patch.CommutePast +import Camp.Patch.Invert +import Camp.Patch.Name +import Camp.Patch.Patch +import Camp.Patch.Pretty +import Camp.Patch.Sequence +import Camp.Patch.ShowRead +import Camp.Utils hunk ./Camp/Patch/Equality.hs 2 -module MiniDarcs.Patch.Equality ( +module Camp.Patch.Equality ( hunk ./Camp/Patch/Invert.hs 2 -module MiniDarcs.Patch.Invert (Invert(..)) where +module Camp.Patch.Invert (Invert(..)) where hunk ./Camp/Patch/MegaPatch.hs 2 -module MiniDarcs.Patch.MegaPatch (MegaPatch(..), commuteToPrefix) where +module Camp.Patch.MegaPatch (MegaPatch(..), commuteToPrefix) where hunk ./Camp/Patch/MegaPatch.hs 4 -import MiniDarcs.Patch.Apply -import MiniDarcs.Patch.Catch -import MiniDarcs.Patch.Commute -import MiniDarcs.Patch.Equality -import MiniDarcs.Patch.Invert -import MiniDarcs.Patch.Name -import MiniDarcs.Patch.Pretty -import MiniDarcs.Patch.Sequence +import Camp.Patch.Apply +import Camp.Patch.Catch +import Camp.Patch.Commute +import Camp.Patch.Equality +import Camp.Patch.Invert +import Camp.Patch.Name +import Camp.Patch.Pretty +import Camp.Patch.Sequence hunk ./Camp/Patch/Merge.hs 2 -module MiniDarcs.Patch.Merge where +module Camp.Patch.Merge where hunk ./Camp/Patch/Merge.hs 4 -import MiniDarcs.Patch.Anonymous -import MiniDarcs.Patch.Catch -import MiniDarcs.Patch.Commute -import MiniDarcs.Patch.ContextedPatch -import MiniDarcs.Patch.Equality -import MiniDarcs.Patch.Invert -import MiniDarcs.Patch.MegaPatch -import MiniDarcs.Patch.Pretty -import MiniDarcs.Patch.Sequence -import MiniDarcs.Utils +import Camp.Patch.Anonymous +import Camp.Patch.Catch +import Camp.Patch.Commute +import Camp.Patch.ContextedPatch +import Camp.Patch.Equality +import Camp.Patch.Invert +import Camp.Patch.MegaPatch +import Camp.Patch.Pretty +import Camp.Patch.Sequence +import Camp.Utils hunk ./Camp/Patch/Name.hs 2 -module MiniDarcs.Patch.Name ( +module Camp.Patch.Name ( hunk ./Camp/Patch/Name.hs 7 -import MiniDarcs.Patch.Pretty -import MiniDarcs.Utils +import Camp.Patch.Pretty +import Camp.Utils hunk ./Camp/Patch/Patch.hs 2 -module MiniDarcs.Patch.Patch (Patch(..)) where +module Camp.Patch.Patch (Patch(..)) where hunk ./Camp/Patch/Patch.hs 4 -import MiniDarcs.Patch.Anonymous -import MiniDarcs.Patch.Apply -import MiniDarcs.Patch.Commute -import MiniDarcs.Patch.Equality -import MiniDarcs.Patch.Invert -import MiniDarcs.Patch.Name -import MiniDarcs.Patch.Pretty -import MiniDarcs.Patch.Primitive -import MiniDarcs.Patch.ShowRead -import MiniDarcs.Utils +import Camp.Patch.Anonymous +import Camp.Patch.Apply +import Camp.Patch.Commute +import Camp.Patch.Equality +import Camp.Patch.Invert +import Camp.Patch.Name +import Camp.Patch.Pretty +import Camp.Patch.Primitive +import Camp.Patch.ShowRead +import Camp.Utils hunk ./Camp/Patch/Pretty.hs 2 -module MiniDarcs.Patch.Pretty (Ppr(..), pprint, module Text.PrettyPrint) where +module Camp.Patch.Pretty (Ppr(..), pprint, module Text.PrettyPrint) where hunk ./Camp/Patch/Primitive.hs 2 -module MiniDarcs.Patch.Primitive (Primitive(..)) where +module Camp.Patch.Primitive (Primitive(..)) where hunk ./Camp/Patch/Primitive.hs 4 -import MiniDarcs.Patch.Apply -import MiniDarcs.Patch.Commute -import MiniDarcs.Patch.Invert -import MiniDarcs.Patch.Pretty -import MiniDarcs.Types -import MiniDarcs.Utils +import Camp.Patch.Apply +import Camp.Patch.Commute +import Camp.Patch.Invert +import Camp.Patch.Pretty +import Camp.Types +import Camp.Utils hunk ./Camp/Patch/Sequence.hs 2 -module MiniDarcs.Patch.Sequence ( +module Camp.Patch.Sequence ( hunk ./Camp/Patch/Sequence.hs 6 -import MiniDarcs.Patch.Anonymous -import MiniDarcs.Patch.Apply -import MiniDarcs.Patch.Commute -import MiniDarcs.Patch.Equality -import MiniDarcs.Patch.Invert -import MiniDarcs.Patch.Name -import MiniDarcs.Patch.Pretty -import MiniDarcs.Patch.ShowRead -import MiniDarcs.Utils +import Camp.Patch.Anonymous +import Camp.Patch.Apply +import Camp.Patch.Commute +import Camp.Patch.Equality +import Camp.Patch.Invert +import Camp.Patch.Name +import Camp.Patch.Pretty +import Camp.Patch.ShowRead +import Camp.Utils hunk ./Camp/Patch/ShowRead.hs 5 -module MiniDarcs.Patch.ShowRead where +module Camp.Patch.ShowRead where hunk ./Camp/Patch/ShowRead.hs 7 -import MiniDarcs.Patch.Anonymous +import Camp.Patch.Anonymous hunk ./Camp/Repository.hs 2 -module MiniDarcs.Repository ( +module Camp.Repository ( hunk ./Camp/Repository.hs 25 -import MiniDarcs.Patch.Apply -import MiniDarcs.Patch.Equality -import MiniDarcs.Patch.MegaPatch -import MiniDarcs.Patch.Name -import MiniDarcs.Patch.Pretty -import MiniDarcs.Patch.Sequence -import MiniDarcs.Utils +import Camp.Patch.Apply +import Camp.Patch.Equality +import Camp.Patch.MegaPatch +import Camp.Patch.Name +import Camp.Patch.Pretty +import Camp.Patch.Sequence +import Camp.Utils hunk ./Camp/Repository.hs 43 -repoBase = "_minidarcs" +repoBase = "_camp" hunk ./Camp/Types.hs 2 -module MiniDarcs.Types where +module Camp.Types where hunk ./Camp/Utils.hs 2 -module MiniDarcs.Utils ( +module Camp.Utils ( hunk ./camp.cabal 1 -Name: minidarcs +Name: camp hunk ./camp.cabal 8 -Synopsis: Mini Darcs +Synopsis: Camp hunk ./camp.cabal 10 - Mini Darcs + Camp (Commute And Merge Patches) hunk ./camp.cabal 16 - Main-Is: MiniDarcs/Main.hs + Main-Is: Camp/Main.hs hunk ./camp.cabal 18 + Camp.Command.Add + Camp.Command.Init + Camp.Command.Inventory + Camp.Command.Pull + Camp.Command.Record + Camp.Command.Test + Camp.Patch.Anonymous + Camp.Patch.Apply + Camp.Patch.Catch + Camp.Patch.Commute + Camp.Patch.CommutePast + Camp.Patch.ContextedPatch + Camp.Patch.Equality + Camp.Patch.Invert + Camp.Patch.MegaPatch + Camp.Patch.Merge + Camp.Patch.Name + Camp.Patch.Patch + Camp.Patch.Pretty + Camp.Patch.Primitive + Camp.Patch.Sequence + Camp.Patch.ShowRead + Camp.Repository + Camp.Types + Camp.Utils hunk ./camp.cabal 45 - MiniDarcs.Command.Add - MiniDarcs.Command.Init - MiniDarcs.Command.Inventory - MiniDarcs.Command.Pull - MiniDarcs.Command.Record - MiniDarcs.Command.Test - MiniDarcs.Patch.Anonymous - MiniDarcs.Patch.Apply - MiniDarcs.Patch.Catch - MiniDarcs.Patch.Commute - MiniDarcs.Patch.CommutePast - MiniDarcs.Patch.ContextedPatch - MiniDarcs.Patch.Equality - MiniDarcs.Patch.Invert - MiniDarcs.Patch.MegaPatch - MiniDarcs.Patch.Merge - MiniDarcs.Patch.Name - MiniDarcs.Patch.Patch - MiniDarcs.Patch.Pretty - MiniDarcs.Patch.Primitive - MiniDarcs.Patch.Sequence - MiniDarcs.Patch.ShowRead - MiniDarcs.Repository - MiniDarcs.Types - MiniDarcs.Utils adddir ./packages adddir ./packages/lcs move ./Data ./packages/lcs/Data hunk ./camp.cabal 15 +Flag have_lcs + Description: Do we have the lcs package? + hunk ./camp.cabal 20 - other-modules: + Other-Modules: hunk ./camp.cabal 46 - Data.List.LCS - Data.List.LCS.HuntSzymanski + Hs-Source-Dirs: . + hunk ./camp.cabal 63 + -- This is a hack to avoid needing to install the lcs package + -- when building with the GHC HEAD: + if flag(have_lcs) + Build-Depends: lcs + else + Hs-Source-Dirs: packages/lcs + Other-Modules: + Data.List.LCS + Data.List.LCS.HuntSzymanski + hunk ./camp.cabal 64 - -- when building with the GHC HEAD: + -- when building with the GHC HEAD. The in-tree sources are from + -- lcs 0.2 hunk ./LICENSE 28 + + +The contents of packages/ may be under different licences. + + hunk ./camp.cabal 18 -Executable minidarcs +Executable camp adddir ./cbits addfile ./Camp/Curl.hsc hunk ./Camp/Curl.hsc 1 + +-- We don't know what type CURLcode (for example) is going to be, so we +-- don't know whether or not we need Data.Int and Data.Word: +{-# OPTIONS_GHC -fno-warn-unused-imports #-} + +module Camp.Curl ( + withGlobalCurl, withCurl, perform, + setURL + ) where + +#include + +import Control.Exception +import Control.Monad +import Data.Int +import Data.Word +import Foreign +import Foreign.C + +type URL = String + +type CURLcode = #type CURLcode +type CURLoption = #type CURLoption +-- type CurlOff = #type curl_off_t + +data CurlHandle +newtype Curl = Curl (Ptr CurlHandle) + +-- We always use CURL_GLOBAL_ALL rather than trying to predict what +-- we'll want to use +withGlobalCurl :: IO a -> IO a +withGlobalCurl = bracket_ initialise cleanup + where initialise = do cc <- curl_global_init initialiseAll + checkForCurlException cc + cleanup = curl_global_cleanup + +initialiseAll :: CLong +initialiseAll = #const CURL_GLOBAL_ALL + +withCurl :: (Curl -> IO a) -> IO a +withCurl = bracket initialise cleanup + where initialise = do c@(Curl p) <- curl_easy_init + when (p == nullPtr) $ + -- XXX Make a nicer exception + error "Curl init failed" + return c + cleanup = curl_easy_cleanup + +perform :: Curl -> IO () +perform c = do cc <- curl_easy_perform c + checkForCurlException cc + +setURL :: Curl -> URL -> IO () +setURL c url = do cc <- withCString url $ curl_easy_setopt_ptr c opt + checkForCurlException cc + where opt = #const CURLOPT_URL + +checkForCurlException :: CURLcode -> IO () +checkForCurlException cc + = when (cc /= 0) $ + do err <- curl_easy_strerror cc + str <- peekCString err + -- XXX Should make nicer exceptions + let str' = "Curl code " ++ show cc ++ ": " ++ str + error str' + +-- CURLcode curl_global_init(long flags ); +foreign import ccall unsafe "curl_global_init" + curl_global_init :: CLong -> IO CURLcode + +-- void curl_global_cleanup(void); +foreign import ccall unsafe "curl_global_cleanup" + curl_global_cleanup :: IO () + +-- CURL *curl_easy_init( ); +foreign import ccall unsafe "curl_easy_init" + curl_easy_init :: IO Curl + +-- void curl_easy_cleanup(CURL * handle ); +foreign import ccall unsafe "curl_easy_cleanup" + curl_easy_cleanup :: Curl -> IO () + +-- CURLcode curl_easy_perform(CURL * handle ); +foreign import ccall unsafe "curl_easy_perform" + curl_easy_perform :: Curl -> IO CURLcode + +-- CURLcode curl_easy_setopt(CURL *handle, CURLoption option, parameter); +-- parameter can be: +-- a long +-- a function pointer +-- an object pointer +-- a curl_off_t +{- +foreign import ccall unsafe "curl_easy_setopt_long" + curl_easy_setopt_long :: Curl -> CURLoption -> CLong -> IO CURLcode +-} +foreign import ccall unsafe "curl_easy_setopt_ptr" + curl_easy_setopt_ptr :: Curl -> CURLoption -> Ptr a -> IO CURLcode +{- +foreign import ccall unsafe "curl_easy_setopt_off" + curl_easy_setopt_off :: Curl -> CURLoption -> CurlOff -> IO CURLcode +-} + +-- const char *curl_easy_strerror(CURLcode errornum ); +foreign import ccall unsafe "curl_easy_strerror" + curl_easy_strerror :: CURLcode -> IO CString + hunk ./Camp/Main.hs 10 +import Camp.Curl hunk ./Camp/Main.hs 23 - else doSomething args + else -- XXX For now we always initialise curl + withGlobalCurl $ doSomething args hunk ./camp.cabal 27 + Camp.Curl hunk ./camp.cabal 48 + C-Sources: cbits/curl.c hunk ./camp.cabal 55 + Pkgconfig-Depends: libcurl + hunk ./camp.cabal 63 - CPP + CPP, ForeignFunctionInterface addfile ./cbits/curl.c hunk ./cbits/curl.c 1 + +#include + +CURLcode +curl_easy_setopt_long(CURL *handle, CURLoption option, long parameter) { + curl_easy_setopt(handle, option, parameter); +} + +CURLcode +curl_easy_setopt_ptr(CURL *handle, CURLoption option, void *parameter) { + curl_easy_setopt(handle, option, parameter); +} + +CURLcode +curl_easy_setopt_off(CURL *handle, CURLoption option, curl_off_t parameter) { + curl_easy_setopt(handle, option, parameter); +} hunk ./Camp/Curl.hsc 8 - setURL + setURL, setWriteFunction hunk ./Camp/Curl.hsc 58 +setWriteFunction :: Curl -> Maybe WriteFunction -> IO () +setWriteFunction c mwf = + do p <- case mwf of + Just wf -> makeWriteFunction wf + Nothing -> return nullFunPtr + cc <- curl_easy_setopt_funptr c opt p + checkForCurlException cc + where opt = #const CURLOPT_WRITEFUNCTION + hunk ./Camp/Curl.hsc 106 +foreign import ccall unsafe "curl_easy_setopt_funptr" + curl_easy_setopt_funptr :: Curl -> CURLoption -> FunPtr a -> IO CURLcode hunk ./Camp/Curl.hsc 115 +-- CURLOPT_WRITEFUNCTION +-- size_t function( void *ptr, size_t size, size_t nmemb, void *stream) +type WriteFunction = CString -> CSize -> CSize -> Ptr () -> IO CSize + +foreign import ccall "wrapper" + makeWriteFunction :: WriteFunction -> IO (FunPtr WriteFunction) + hunk ./cbits/curl.c 14 +CURLcode +curl_easy_setopt_funptr(CURL *handle, CURLoption option, void *parameter) { + curl_easy_setopt(handle, option, parameter); +} + hunk ./Camp/Curl.hsc 8 - setURL, setWriteFunction + setURL, setRange, unsetRange, + setSSHPrivateKey, setSSHPublicKey, setVerbosity, + setWriteFunction, makeWriteFunction, + setDebugFunction, makeDebugFunction hunk ./Camp/Curl.hsc 28 +type CurlInfo = #type curl_infotype hunk ./Camp/Curl.hsc 57 +setVerbosity :: Curl -> Bool -> IO () +setVerbosity c verbose = do cc <- curl_easy_setopt_long c opt v + checkForCurlException cc + where opt = #const CURLOPT_VERBOSE + v = if verbose then 1 else 0 + hunk ./Camp/Curl.hsc 68 -setWriteFunction :: Curl -> Maybe WriteFunction -> IO () -setWriteFunction c mwf = - do p <- case mwf of - Just wf -> makeWriteFunction wf - Nothing -> return nullFunPtr - cc <- curl_easy_setopt_funptr c opt p +setRange :: Curl -> Integer -> Integer -> IO () +setRange c from to = do cc <- withCString (show from ++ "-" ++ show to) $ + curl_easy_setopt_ptr c opt + checkForCurlException cc + where opt = #const CURLOPT_RANGE + +unsetRange :: Curl -> IO () +unsetRange c = do cc <- curl_easy_setopt_ptr c opt nullPtr + checkForCurlException cc + where opt = #const CURLOPT_RANGE + +setSSHPrivateKey :: Curl -> FilePath -> IO () +setSSHPrivateKey c fp = do cc <- withCString fp $ curl_easy_setopt_ptr c opt + checkForCurlException cc + where opt = #const CURLOPT_SSH_PRIVATE_KEYFILE + +setSSHPublicKey :: Curl -> FilePath -> IO () +setSSHPublicKey c fp = do cc <- withCString fp $ curl_easy_setopt_ptr c opt + checkForCurlException cc + where opt = #const CURLOPT_SSH_PUBLIC_KEYFILE + +setWriteFunction :: Curl -> FunPtr WriteFunction -> IO () +setWriteFunction c wf = + do cc <- curl_easy_setopt_funptr c opt wf hunk ./Camp/Curl.hsc 95 +setDebugFunction :: Curl -> FunPtr DebugFunction -> IO () +setDebugFunction c df = + do cc <- curl_easy_setopt_funptr c opt df + checkForCurlException cc + where opt = #const CURLOPT_DEBUGFUNCTION + hunk ./Camp/Curl.hsc 127 -foreign import ccall unsafe "curl_easy_perform" +foreign import ccall safe "curl_easy_perform" hunk ./Camp/Curl.hsc 136 -{- hunk ./Camp/Curl.hsc 138 --} hunk ./Camp/Curl.hsc 154 +-- CURLOPT_DEBUGFUNCTION +-- int curl_debug_callback (CURL *, curl_infotype, char *, size_t, void *); +type DebugFunction = Curl -> CurlInfo -> CString -> CSize -> Ptr () -> IO CInt + +foreign import ccall "wrapper" + makeDebugFunction :: DebugFunction -> IO (FunPtr DebugFunction) + addfile ./Camp/Patch/InputOutput.hs hunk ./Camp/Command/Inventory.hs 10 -inventory _ [] = do ns <- readInventory +inventory _ [] = do i <- readInventory + let ns = [ n | InventoryItem n _ _ _ <- i ] hunk ./Camp/Command/Inventory.hs 14 - = do localNames <- readInventory - remoteNames <- inRepo repo readInventory - let localNameSet = Set.fromList localNames + = do localInventory <- readInventory + remoteInventory <- inRepo repo readInventory + let localNames = [ n | InventoryItem n _ _ _ <- localInventory ] + remoteNames = [ n | InventoryItem n _ _ _ <- remoteInventory ] + localNameSet = Set.fromList localNames hunk ./Camp/Command/Pull.hs 7 +import Camp.Patch.InputOutput hunk ./Camp/Command/Pull.hs 13 -import Camp.Utils hunk ./Camp/Command/Pull.hs 14 +import Data.ByteString.Lazy (ByteString) +import qualified Data.ByteString.Lazy.Char8 as BSC + hiding (ByteString) -- XXX Warning hack hunk ./Camp/Command/Pull.hs 22 - localNames <- readInventory - remoteNames <- inRepo repo readInventory - let localNameSet = Set.fromList localNames + localInventory <- readInventory + remoteInventory <- inRepo repo readInventory + let wantedPatches' = map BSC.pack wantedPatches + localNames = [ n | InventoryItem n _ _ _ <- localInventory ] + remoteNames = [ n | InventoryItem n _ _ _ <- remoteInventory ] + localNameSet = Set.fromList localNames hunk ./Camp/Command/Pull.hs 31 - isCommon n = n `Set.member` commonNameSet - localReadNames = case span isCommon localNames of - (_, x) -> x - remoteReadNames = case span isCommon remoteNames of - (_, x) -> x + isCommon (InventoryItem n _ _ _) = n `Set.member` commonNameSet + localReadInventory = case span isCommon localInventory of + (_, x) -> x + remoteReadInventory = case span isCommon remoteInventory of + (_, x) -> x hunk ./Camp/Command/Pull.hs 40 - localReadPatches <- readMegaPatches localReadNames - remoteReadPatches <- inRepo repo $ readMegaPatches remoteReadNames + localReadPatches <- readMegaPatches localReadInventory + remoteReadPatches <- inRepo repo $ readMegaPatches remoteReadInventory hunk ./Camp/Command/Pull.hs 49 - doPull localNames localPatches remotePatches + doPull localInventory localPatches remotePatches hunk ./Camp/Command/Pull.hs 51 - case mapM maybeRead wantedPatches of - Just wantedNames -> - let wantedNameSet = Set.fromList wantedNames - in case tryCommuteToPrefix wantedNameSet remotePatches of - Just (remotePatches' `Then` _) -> - doPull localNames localPatches remotePatches' - Nothing -> - error "Can't pull those patches due to dependencies" - Nothing -> - error "Can't parse the patch names you want" + if all validName wantedPatches' + then let wantedNames = -- XXX Should check snd == "" + map (fst . input) wantedPatches' + wantedNameSet = Set.fromList wantedNames + in case tryCommuteToPrefix wantedNameSet remotePatches of + Just (remotePatches' `Then` _) -> + doPull localInventory localPatches remotePatches' + Nothing -> + error "Can't pull those patches due to dependencies" + else error "Can't parse the patch names you want" hunk ./Camp/Command/Pull.hs 63 +validName :: ByteString -> Bool +validName bs = case valid bs (undefined :: Name) of + Left _ -> False + Right _ -> True + hunk ./Camp/Command/Pull.hs 69 -doPull :: [Name] -> Seq MegaPatch from1 to1 -> Seq MegaPatch from2 to2 -> IO () -doPull localNames localPatches remotePatches = +doPull :: [InventoryItem] -> Seq MegaPatch from1 to1 -> Seq MegaPatch from2 to2 + -> IO () +doPull localInventory localPatches remotePatches = hunk ./Camp/Command/Pull.hs 76 - do let localNames' = localNames ++ - names newLocalPatches - writeMegaPatches newLocalPatches + do is <- writeMegaPatches newLocalPatches + -- XXX We could change to a repo format that allows for append + let localInventory' = localInventory ++ is hunk ./Camp/Command/Pull.hs 80 - writeInventory localNames' + writeInventory localInventory' hunk ./Camp/Command/Record.hs 27 - writeMegaPatch megaPatch + i <- writeMegaPatch megaPatch hunk ./Camp/Command/Record.hs 29 - ns <- readInventory - writeInventory (ns ++ [n]) + is <- readInventory + -- XXX We could change to a repo format that allows for append + writeInventory (is ++ [i]) hunk ./Camp/Command/Test.hs 12 +import qualified Data.ByteString.Lazy.Char8 as BSC hunk ./Camp/Command/Test.hs 17 - writeMegaPatches megaPatchesPQR - writeInventory namesPQR + is <- writeMegaPatches megaPatchesPQR + writeInventory is hunk ./Camp/Command/Test.hs 21 - writeMegaPatches megaPatchesPQ - writeInventory namesPQ + is <- writeMegaPatches megaPatchesPQ + writeInventory is hunk ./Camp/Command/Test.hs 25 - writeMegaPatches megaPatchesPR - writeInventory namesPR + is <- writeMegaPatches megaPatchesPR + writeInventory is hunk ./Camp/Command/Test.hs 31 -nameP = Name Positive 111 111 -nameQ = Name Positive 222 222 -nameR = Name Positive 333 333 - -namesPQR, namesPQ, namesPR :: [Name] -namesPQR = [nameP, nameQ, nameR] -namesPQ = [nameP, nameQ] -namesPR = [nameP, nameR] +nameP = Name Positive (BSC.pack "111-111") +nameQ = Name Positive (BSC.pack "222-222") +nameR = Name Positive (BSC.pack "333-333") hunk ./Camp/Patch/Catch.hs 4 -import Camp.Patch.Anonymous hunk ./Camp/Patch/Catch.hs 9 +import Camp.Patch.InputOutput hunk ./Camp/Patch/Catch.hs 15 -import Camp.Patch.ShowRead hunk ./Camp/Patch/Catch.hs 17 +import qualified Data.ByteString.Lazy as BS hunk ./Camp/Patch/Catch.hs 28 - deriving Read + +instance InputOutput (Catch from to) where + input bs = case BS.head bs of + 0 -> case input (BS.tail bs) of + (p, bs') -> (Patch p, bs') + 1 -> case input (BS.tail bs) of + (effect, bs') -> + case input bs' of + (conflicts, bs'') -> + case input bs'' of + (identity, bs''') -> + (Conflictor effect conflicts identity, bs''') + _ -> error "InputOutput Catch: Bad value" + valid bs _ = case BS.uncons bs of + Just (0, bs') -> valid bs' (undefined :: Patch from to) + Just (1, bs') -> + case valid bs' (undefined :: Seq Patch from to) of + Left err -> Left err + Right bs'' -> + case valid bs'' (undefined :: [ContextedPatch to]) of + Left err -> Left err + Right bs''' -> + valid bs''' (undefined :: ContextedPatch to) + _ -> Left ("InputOutput Catch: Bad value", bs) + output (Patch p) = 0 `BS.cons` output p + output (Conflictor effect conflicts identity) + = 1 `BS.cons` output effect + `BS.append` output conflicts + `BS.append` output identity + +instance InputOutput2 Catch where + input2 = input + output2 = output + valid2 = valid hunk ./Camp/Patch/Catch.hs 75 -instance ContextedRead Catch where - maybeContextedReads xs = case maybeReads xs of - Just (c, xs') -> Just (Anonymous1 c, xs') - Nothing -> Nothing - hunk ./Camp/Patch/ContextedPatch.hs 6 -import Camp.Patch.Anonymous hunk ./Camp/Patch/ContextedPatch.hs 8 +import Camp.Patch.InputOutput hunk ./Camp/Patch/ContextedPatch.hs 14 -import Camp.Patch.ShowRead -import Camp.Utils hunk ./Camp/Patch/ContextedPatch.hs 15 -import Data.Char +import qualified Data.ByteString.Lazy as BS hunk ./Camp/Patch/ContextedPatch.hs 21 +instance InputOutput (ContextedPatch from) where + input bs = case input bs of + (ps, bs') -> + case input bs' of + (p, bs'') -> + (ContextedPatch ps p, bs'') + valid bs _ = case valid bs (undefined :: Seq Patch from ()) of + Left err -> Left err + Right bs' -> valid bs' (undefined :: Patch () to) + output (ContextedPatch ps p) = output ps `BS.append` output p + hunk ./Camp/Patch/ContextedPatch.hs 38 --- This is an ugly mess, but it works well enough for our purposes -instance Read (ContextedPatch from) where - readsPrec _ = f - where f xs = case dropWhile isSpace xs of - xs'@('(' : xs'') -> - case g xs' of - [] -> case f xs'' of - [(y, ')' : xs''')] -> [(y, xs''')] - _ -> [] - res -> res - xs' -> g xs' - g xs = case maybeContextedReads xs of - Just (Anonymous1 ps, ' ':':':' ':xs') -> - case maybeReads xs' of - Just (p, xs'') -> - [(ContextedPatch ps p, xs'')] - Nothing -> [] - _ -> [] - hunk ./Camp/Patch/InputOutput.hs 1 + +module Camp.Patch.InputOutput (InputOutput(..), InputOutput2(..)) where + +import qualified Data.ByteString.Lazy as BS +import Data.ByteString.Lazy (ByteString) +import Data.Bits +import Data.Int +import Data.List +import Data.Word + +class InputOutput a where + input :: ByteString -> (a, ByteString) + -- XXX We should probably do the ShowS trick, but this'll do for now. + -- Our structures are shallow, after all. + output :: a -> ByteString + + -- XXX This is a bit of a mess, because we don't get any help from + -- the type checker as to what recursive calls we need to make. + -- The idea is that (read >>= validate; read >>= input >>= apply) + -- should be able to run in constant space, and input cannot fail + -- it validate succeeded. And in fact, validate actually does even + -- more checking than that, e.g. it checks that filenames are + -- actually valid, that names only contain valid name character, + -- etc. + + -- "valid bs undefined" should return True if the bytestring + -- can be successfully read as a value of type a + valid :: ByteString -> a -> Either (String, -- Error + ByteString) -- where the error happened + ByteString -- remainder of the input + +class InputOutput2 p where + input2 :: ByteString -> (p from to, ByteString) + output2 :: p from to -> ByteString + valid2 :: ByteString -> p from to -> Either (String, ByteString) ByteString + +instance InputOutput ByteString where + input bs = case input bs of + (w, bs') -> + BS.splitAt (fromIntegral (w :: Word64)) bs' + valid bs _ = case valid bs (undefined :: Word64) of + Left err -> Left err + Right _ -> + case input bs of + (w, bs') -> + let len = fromIntegral (w :: Word64) + in if BS.length bs' < len + then Left ("InputOutput ByteString truncated", bs) + else Right (BS.drop len bs') + output bs = output (fromIntegral (BS.length bs) :: Word64) `BS.append` bs + +instance InputOutput Int64 where + input bs = case input bs of + (w, bs') -> (fromIntegral (w :: Word64), bs') + valid bs _ = valid bs (undefined :: Word64) + output i = output (fromIntegral i :: Word64) + +instance InputOutput Word64 where + input bs = case BS.splitAt 8 bs of + (xs, bs') -> + (foldl1' f $ map fromIntegral $ BS.unpack xs, bs') + where f x y = (x `shiftL` 8) .|. y + valid bs _ = if BS.length bs < 8 + then Left ("InputOutput Word64 not enough bytes", bs) + else Right (BS.drop 8 bs) + output w = BS.pack [fromIntegral (w `shiftR` 56), + fromIntegral (w `shiftR` 48), + fromIntegral (w `shiftR` 40), + fromIntegral (w `shiftR` 32), + fromIntegral (w `shiftR` 24), + fromIntegral (w `shiftR` 16), + fromIntegral (w `shiftR` 8), + fromIntegral w] + +instance InputOutput Integer where + input bs = case BS.head bs of + 0 -> case input (BS.tail bs) of + (i, bs') -> (fromIntegral (i :: Int64), bs') + 1 -> case input (BS.tail bs) of + (ws, bs') -> (fromWord64s ws, bs') + 2 -> case input (BS.tail bs) of + (ws, bs') -> (negate $ fromWord64s ws, bs') + _ -> error "InputOutput Integer: Bad value" + where fromWord64s :: [Word64] -> Integer + fromWord64s ws = foldr1 f $ map fromIntegral ws + f x y = (x `shiftL` 64) .|. y + valid bs _ = case BS.uncons bs of + Just (0, bs') -> valid bs' (undefined :: Int64) + Just (1, bs') -> valid bs' (undefined :: [Word64]) + Just (2, bs') -> valid bs' (undefined :: [Word64]) + _ -> Left ("InputOutput Integer: Bad value", bs) + output i = if (i <= fromIntegral (maxBound :: Int64)) && + (i >= fromIntegral (minBound :: Int64)) + then 0 `BS.cons` output (fromIntegral i :: Int64) + else if i > 0 + then 1 `BS.cons` output (toWord64s i) + else 2 `BS.cons` output (toWord64s (negate i)) + where toWord64s :: Integer -> [Word64] + toWord64s 0 = [] + toWord64s j = fromIntegral j : toWord64s (j `shiftR` 64) + +instance InputOutput a => InputOutput [a] where + input bs = case BS.head bs of + 0 -> ([], BS.tail bs) + 1 -> case input (BS.tail bs) of + (x, bs') -> + case input bs' of + (xs, bs'') -> (x : xs, bs'') + _ -> error "InputOutput []: Bad value" + valid bs _ = case BS.uncons bs of + Just (0, bs') -> Right bs' + Just (1, bs') -> + case valid bs' (undefined :: a) of + Right bs'' -> + valid bs'' (undefined :: [a]) + Left err -> Left err + _ -> Left ("InputOutput []: Bad value", bs) + output [] = BS.singleton 0 + output (x : xs) = 1 `BS.cons` output x `BS.append` output xs + hunk ./Camp/Patch/MegaPatch.hs 8 +import Camp.Patch.InputOutput hunk ./Camp/Patch/MegaPatch.hs 14 +import qualified Data.ByteString.Lazy as BS hunk ./Camp/Patch/MegaPatch.hs 20 +instance InputOutput (MegaPatch from to) where + input bs = case input bs of + (n, bs') -> + case input bs' of + (cs, bs'') -> + (MegaPatch n cs, bs'') + valid bs _ = case valid bs (undefined :: Name) of + Left err -> Left err + Right bs' -> valid bs' (undefined :: Seq Catch from to) + output (MegaPatch n cs) = output n `BS.append` output cs + hunk ./Camp/Patch/Name.hs 8 -import Camp.Utils +import Camp.Patch.InputOutput hunk ./Camp/Patch/Name.hs 10 +import qualified Data.ByteString.Lazy as BS +import qualified Data.ByteString.Lazy.Char8 as BSC +import Data.ByteString.Lazy (ByteString) hunk ./Camp/Patch/Name.hs 21 --- Designed for using --- TOD :: Integer -> Integer -> ClockTime -data Name = Name Sign Integer Integer +data Name = Name Sign ByteString hunk ./Camp/Patch/Name.hs 25 - ppr (Name sign i j) - = ppr sign <> char '-' <> integer i <> char '-' <> integer j + ppr (Name sign n) = ppr sign <> char '-' <> text (BSC.unpack n) hunk ./Camp/Patch/Name.hs 28 -instance Read Name where - readsPrec _ xs = case maybeReads $ dropWhile isSpace xs of - Just (sign, '-' : xs') -> - case maybeReads xs' of - Just (i, '-' : xs'') -> - case maybeReads xs'' of - Just (j, xs''') -> - [(Name sign i j, xs''')] - Nothing -> [] - _ -> [] - _ -> [] +instance InputOutput Name where + input bs = case input bs of + (sign, bs') -> + case input bs' of + (n, bs'') -> (Name sign n, bs'') + valid bs _ = case valid bs (undefined :: Sign) of + Left err -> Left err + Right bs' -> + case valid bs' (undefined :: ByteString) of + Left err -> Left err + Right bs'' -> + case input bs' of + (n, _) -> + if BS.null n || not (BS.all goodChar n) + then Left ("InputOutput Name Bad value", bs'') + else Right bs'' + where goodChar w = ((w >= ord' '0') && (w <= ord' '9')) + || ((w >= ord' 'a') && (w <= ord' 'z')) + || ((w >= ord' 'A') && (w <= ord' 'Z')) + || (w == ord' '.') + || (w == ord' '-') + ord' = fromIntegral . ord + output (Name sign n) = output sign `BS.append` output n hunk ./Camp/Patch/Name.hs 59 -instance Read SubName where - readsPrec _ xs = case maybeReads xs of - Just (n, ':' : xs') -> - case maybeReads xs' of - Just (s, xs'') -> - [(SubName n s, xs'')] - Nothing -> [] - _ -> [] +instance InputOutput SubName where + input bs = case input bs of + (n, bs') -> + case input bs' of + (s, bs'') -> + (SubName n s, bs'') + valid bs _ = case valid bs (undefined :: Name) of + Left err -> Left err + Right bs' -> + case BSC.uncons bs' of + Just (':', bs'') -> + valid bs'' (undefined :: Integer) + _ -> + Left ("InputOutput SubName Expected colon", bs') + output (SubName n s) = output n `BS.append` output s hunk ./Camp/Patch/Name.hs 78 +instance InputOutput Sign where + input bs = case BS.head bs of + 0 -> (Positive, BS.tail bs) + 1 -> (Negative, BS.tail bs) + _ -> error "InputOutput Sign: Bad value" + valid bs _ = case BS.uncons bs of + Just (0, bs') -> Right bs' + Just (1, bs') -> Right bs' + _ -> Left ("InputOutput Sign: Bad value", bs) + output Positive = BS.singleton 0 + output Negative = BS.singleton 1 + hunk ./Camp/Patch/Name.hs 101 -inverseName (Name Positive i1 i2) = Name Negative i1 i2 -inverseName (Name Negative i1 i2) = Name Positive i1 i2 +inverseName (Name Positive n) = Name Negative n +inverseName (Name Negative n) = Name Positive n hunk ./Camp/Patch/Patch.hs 4 -import Camp.Patch.Anonymous hunk ./Camp/Patch/Patch.hs 7 +import Camp.Patch.InputOutput hunk ./Camp/Patch/Patch.hs 12 -import Camp.Patch.ShowRead -import Camp.Utils hunk ./Camp/Patch/Patch.hs 13 +import qualified Data.ByteString.Lazy as BS hunk ./Camp/Patch/Patch.hs 18 - deriving Read + +instance InputOutput (Patch from to) where + input bs = case input bs of + (sn, bs') -> + case input bs' of + (p, bs'') -> + (Primitive sn p, bs'') + valid bs _ = case valid bs (undefined :: SubName) of + Left err -> Left err + Right bs' -> valid bs' (undefined :: Patch from to) + output (Primitive sn p) = output sn `BS.append` output p + +instance InputOutput2 Patch where + input2 = input + output2 = output + valid2 = valid hunk ./Camp/Patch/Patch.hs 46 -instance ContextedRead Patch where - maybeContextedReads xs = case maybeReads xs of - Just (p, xs') -> Just (Anonymous1 p, xs') - Nothing -> Nothing - hunk ./Camp/Patch/Primitive.hs 6 +import Camp.Patch.InputOutput hunk ./Camp/Patch/Primitive.hs 12 +import Data.ByteString.Lazy (ByteString) +import qualified Data.ByteString.Lazy as BS +import qualified Data.ByteString.Lazy.Char8 as BSC hunk ./Camp/Patch/Primitive.hs 28 - deriving Read + +-- XXX This instance assumes that FilePath is [Char8] +instance InputOutput (Primitive from to) where + input bs = case BS.head bs of + 0 -> case input (BS.tail bs) of + (fp, bs') -> (AddFile (BSC.unpack fp), bs') + 1 -> case input (BS.tail bs) of + (fp, bs') -> (RmFile (BSC.unpack fp), bs') + -- XXX This is hideous, but we're going to change the + -- hunk format shortly anyway: + 2 -> case input (BS.tail bs) of + (fp, bs1) -> + case input bs1 of + (skip, bs2) -> + case input bs2 of + (old, bs3) -> + case input bs3 of + (new, bs4) -> + (Hunk (BSC.unpack fp) + skip + (map BSC.unpack old) + (map BSC.unpack new), + bs4) + _ -> error "InputOutput Primitive: Bad value" + -- XXX We ought to sanity check the filenames (no leading /, no NUL, + -- not a reserved name on Windows, etc). Having a Filename type + -- would help for that; then it would have its own instance. + -- Not clear we actually want to check for resvered names here, as + -- we might want a --force for them. + valid bs _ = case BS.uncons bs of + Just (0, bs') -> + valid bs' (undefined :: ByteString) + Just (1, bs') -> + valid bs' (undefined :: ByteString) + Just (2, bs1) -> + case valid bs1 (undefined :: Integer) of + Left err -> Left err + Right bs2 -> + case valid bs2 (undefined :: [ByteString]) of + Left err -> Left err + Right bs3 -> + valid bs3 (undefined :: [ByteString]) + _ -> Left ("InputOutput Primitive: Bad value", bs) + output (AddFile fp) = 0 `BS.cons` output (BSC.pack fp) + output (RmFile fp) = 1 `BS.cons` output (BSC.pack fp) + -- XXX This is hideous, but we're going to change the hunk format + -- shortly anyway: + output (Hunk fp skip old new) + = 2 `BS.cons` output (BSC.pack fp) + `BS.append` output skip + `BS.append` output (map BSC.pack old) + `BS.append` output (map BSC.pack new) hunk ./Camp/Patch/Sequence.hs 6 -import Camp.Patch.Anonymous hunk ./Camp/Patch/Sequence.hs 9 +import Camp.Patch.InputOutput hunk ./Camp/Patch/Sequence.hs 13 -import Camp.Patch.ShowRead hunk ./Camp/Patch/Sequence.hs 15 -import Data.Char -import Data.List +import qualified Data.ByteString.Lazy as BS hunk ./Camp/Patch/Sequence.hs 38 +instance InputOutput2 p => InputOutput (Seq p from to) where + input bs = case BS.head bs of + 0 -> (unsafeCoerce Nil, BS.tail bs) + 1 -> case input2 (BS.tail bs) of + (x, bs') -> + case input bs' of + (xs, bs'') -> + (x `Cons` xs, bs'') + _ -> error "InputOutput Seq: Bad value" + valid bs _ = case BS.uncons bs of + Just (0, bs') -> Right bs' + Just (1, bs') -> + case valid2 bs' (undefined :: p from ()) of + Right bs'' -> + valid bs'' (undefined :: Seq p () to) + Left err -> Left err + _ -> Left ("InputOutput Seq: Bad value", bs) + output Nil = BS.singleton 0 + output (x `Cons` xs) = 1 `BS.cons` output2 x `BS.append` output xs + hunk ./Camp/Patch/Sequence.hs 68 --- This is an ugly mess, but it works well enough for our purposes -instance ContextedRead (Seq p) => Read (Seq p from to) where - readsPrec _ xs = case maybeContextedReads xs of - Just (Anonymous1 ps, xs') -> ret ps xs' - Nothing -> [] - where -- GHC 6.8 can't cope if this is inlined - ret :: Seq p from cxt -> String -> [(Seq p from to, String)] - ret ps xs' = let resType :: Seq p from to - resType = undefined - in case sameEnd ps resType of - IsEqual -> [(ps, xs')] - -instance ContextedRead p => ContextedRead (Seq p) where - maybeContextedReads xs - = case stripPrefix "Seq [" $ dropWhile isSpace xs of - Nothing -> - case stripPrefix "(Seq [" $ dropWhile isSpace xs of - Nothing -> Nothing - Just xs' -> - case f xs' of - Just (y, ')' : xs'') -> Just (y, xs'') - _ -> Nothing - Just xs' -> f xs' - where f :: ContextedRead p - => String -> Maybe (Anonymous1 (Seq p from), String) - f ys = case maybeContextedReads ys of - Just (Anonymous1 p, ';':ys') -> - case f ys' of - Just (Anonymous1 ps, ys'') -> - Just (Anonymous1 (Cons p ps), ys'') - Nothing -> Nothing - _ -> - case dropWhile isSpace ys of - ']' : ys' -> Just (Anonymous1 Nil, ys') - _ -> Nothing - hunk ./Camp/Repository.hs 8 + InventoryItem(..), hunk ./Camp/Repository.hs 28 +import Camp.Patch.InputOutput hunk ./Camp/Repository.hs 31 -import Camp.Patch.Pretty hunk ./Camp/Repository.hs 35 +import Data.ByteString.Lazy (ByteString) +import qualified Data.ByteString.Lazy as BS +import qualified Data.ByteString.Lazy.Char8 as BSC +import Data.Char +import Numeric hunk ./Camp/Repository.hs 42 +import System.IO hunk ./Camp/Repository.hs 64 -readInventory :: IO [Name] -readInventory = do content <- readBinaryFile inventoryFile - case maybeRead content of - Just ns -> return ns - Nothing -> panic ("Corrupt inventory?\n" ++ content) +-- XXX Pull may benefit from a contexted sequence of these +data InventoryItem = InventoryItem Name ByteString{-FilePath/Filename-} Integer Integer hunk ./Camp/Repository.hs 67 -writeInventory :: [Name] -> IO () -writeInventory ns = writeBinaryFile inventoryFile (pprint ns) +instance InputOutput InventoryItem where + input bs0 = case input bs0 of + (n, bs1) -> + case input bs1 of + (fp, bs2) -> + case input bs2 of + (from, bs3) -> + case input bs3 of + (to, bs4) -> + (InventoryItem n fp from to, bs4) + valid bs0 _ = case valid bs0 (undefined :: Name) of + Left err -> Left err + Right bs1 -> + case valid bs1 (undefined :: ByteString) of + Left err -> Left err + Right bs2 -> + case valid bs2 (undefined :: Integer) of + Left err -> Left err + Right bs3 -> + valid bs3 (undefined :: Integer) + output (InventoryItem n fp from to) + = output n `BS.append` + output fp `BS.append` + output from `BS.append` + output to + +-- XXX Need a safe variant for remote repos, "camp check", etc +-- We don't use readFile because we want to strictly read the inventory. +readInventory :: IO [InventoryItem] +readInventory = do h <- openBinaryFile inventoryFile ReadMode + size <- hFileSize h + content <- BS.hGet h (fromIntegral size) + hClose h + -- XXX check snd == ""? + return (fst $ input content) + +writeInventory :: [InventoryItem] -> IO () +writeInventory ns = BS.writeFile inventoryFile (output ns) hunk ./Camp/Repository.hs 118 -patchFile :: Name -> FilePath -patchFile n = patchesDir pprint n +patchFile :: FilePath +patchFile = patchesDir "patchFile" hunk ./Camp/Repository.hs 121 -readMegaPatches :: forall from to . [Name] -> IO (Seq MegaPatch from to) +-- XXX This should possibly use a variant readMegaPatch that takes the +-- handle; we'd have to check that the filename matches +readMegaPatches :: forall from to . + [InventoryItem] -> IO (Seq MegaPatch from to) hunk ./Camp/Repository.hs 133 -readMegaPatch :: Name -> IO (MegaPatch from to) -readMegaPatch n = do content <- readBinaryFile $ patchFile n - case maybeRead content of - Just c -> return (MegaPatch n c) - Nothing -> panic ("Corrupt patch?\n" ++ content) +-- XXX Need a variant that checks validity, works with remote repos, etc +readMegaPatch :: InventoryItem -> IO (MegaPatch from to) +-- XXX We sometimes want this to be lazy, making chunks on demand with +-- unsafeInterleaveIO, and closing the handle when we get to "to". +-- e.g. when applying a patch. +-- However, when pulling patches we may need to do a merge, at which point +-- we want to be strict. +-- For now we are just always strict +readMegaPatch (InventoryItem _ fp from to) + = do h <- openBinaryFile (BSC.unpack fp) ReadMode + hSeek h AbsoluteSeek from + content <- BS.hGet h (fromIntegral (1 + to - from)) + hClose h + -- XXX check snd == ""? + return $ fst $ input content hunk ./Camp/Repository.hs 149 -writeMegaPatches :: Seq MegaPatch from to -> IO () -writeMegaPatches Nil = return () -writeMegaPatches (Cons p ps) = do writeMegaPatch p - writeMegaPatches ps +-- XXX This should use a variant writeMegaPatch that takes the handle +-- and start size +writeMegaPatches :: Seq MegaPatch from to -> IO [InventoryItem] +writeMegaPatches Nil = return [] +writeMegaPatches (Cons p ps) = do i <- writeMegaPatch p + is <- writeMegaPatches ps + return (i : is) hunk ./Camp/Repository.hs 157 -writeMegaPatch :: MegaPatch from to -> IO () -writeMegaPatch (MegaPatch n c) = writeBinaryFile (patchFile n) (pprint c) +writeMegaPatch :: MegaPatch from to -> IO InventoryItem +writeMegaPatch m@(MegaPatch n _) + = do h <- openBinaryFile patchFile AppendMode + startSize <- hFileSize h + BS.hPut h $ output m + endSize <- hFileSize h + hClose h + return (InventoryItem n (BSC.pack patchFile) startSize (endSize - 1)) hunk ./Camp/Repository.hs 187 +-- XXX Should use the repo name and the patch metadata too hunk ./Camp/Repository.hs 189 -genName = do TOD i j <- getClockTime +genName = do TOD i j <- getClockTime -- XXX Error on i or j < 0? hunk ./Camp/Repository.hs 191 - ns <- readInventory + inv <- readInventory + let ns = [ n | InventoryItem n _ _ _ <- inv ] + mkName j' = Name Positive + (BSC.pack (showBase62 i ++ "-" ++ showBase62 j')) hunk ./Camp/Repository.hs 197 - let n = Name Positive i j', + let n = mkName j', hunk ./Camp/Repository.hs 200 +showBase62 :: Integer -> String +showBase62 i = showIntAtBase 62 toBase62Digit i "" + where toBase62Digit x = let y = fromIntegral x + in if y < 10 then chr (ord '0' + y) + else if i < 36 then chr (ord 'a' + y - 10) + else chr (ord 'A' + y - 36) + hunk ./camp.cabal 35 + Camp.Patch.InputOutput hunk ./camp.cabal 54 - old-time, pretty + old-time, pretty, bytestring hunk ./Camp/Patch/ShowRead.hs 1 - -{-# OPTIONS_GHC -fno-warn-orphans #-} --- We have an orphan we can't get rid of - -module Camp.Patch.ShowRead where - -import Camp.Patch.Anonymous - -class ContextedRead p where - maybeContextedReads :: String -> Maybe (Anonymous1 (p from), String) - rmfile ./Camp/Patch/ShowRead.hs hunk ./camp.cabal 44 - Camp.Patch.ShowRead hunk ./Camp/Command/Pull.hs 69 +-- And anyway, we now have two different uses of it. hunk ./Camp/Command/Pull.hs 76 - Anonymous1 newLocalPatches -> - do is <- writeMegaPatches newLocalPatches - -- XXX We could change to a repo format that allows for append - let localInventory' = localInventory ++ is - applyToRepo newLocalPatches - writeInventory localInventory' + Anonymous1 newLocalPatches -> + do is <- writeMegaPatches newLocalPatches + -- XXX We could change to a repo format that allows for append + let localInventory' = localInventory ++ is + applyToRepo newLocalPatches + writeInventory localInventory' hunk ./Camp/Patch/ContextedPatch.hs 27 - valid bs _ = case valid bs (undefined :: Seq Patch from ()) of + valid bs _ = case valid bs (undefined :: Seq Patch from mid) of hunk ./Camp/Patch/ContextedPatch.hs 29 - Right bs' -> valid bs' (undefined :: Patch () to) + Right bs' -> valid bs' (undefined :: Patch mid to) hunk ./Camp/Patch/Sequence.hs 50 - case valid2 bs' (undefined :: p from ()) of + case valid2 bs' (undefined :: p from mid) of hunk ./Camp/Patch/Sequence.hs 52 - valid bs'' (undefined :: Seq p () to) + valid bs'' (undefined :: Seq p mid to) adddir ./tests adddir ./tests/simple_merge addfile ./tests/run_tests.sh hunk ./tests/run_tests.sh 1 +#!/bin/sh + +set -e + +CAMP="${CAMP:-`pwd`/../dist/build/camp/camp}" +export CAMP + +FAILED=0 +for TEST in */ +do + echo Running $TEST + cd $TEST + if ! sh run_test.sh + then + FAILED=$(($FAILED + 1)) + fi + cd .. +done + +echo +if [ $FAILED -eq 0 ] +then + echo All tests successful +else + echo $FAILED tests failed + exit 1 +fi + addfile ./tests/simple_merge/run_test.sh hunk ./tests/simple_merge/run_test.sh 1 +#!/bin/sh + +set -e + +CAMP="${CAMP:-`pwd`/../../dist/build/camp/camp}" + +cleanup() { + rm -rf a + rm -rf b + rm -f f +} + +cleanup + +if [ "$1" = "clean-only" ] +then + exit 0 +fi + +if [ "$1" != "no-clean-after" ] +then + trap cleanup EXIT +fi + +# Set up the base repo in "a" +mkdir a +cd a +"$CAMP" init +printf 'line1\nline2\nline3\n' > f +"$CAMP" add f +"$CAMP" record +cd .. + +# Make a copy in "b" and record a change +mkdir b +cd b +"$CAMP" init +"$CAMP" pull ../a +printf 'line1\nline2\nbetween 2 and 3\nline3\n' > f +"$CAMP" record +cd .. + +# Make a change in "a" too +cd a +printf 'line1\nbetween 1 and 2\nline2\nline3\n' > f +"$CAMP" record +cd .. + +# Now pull in both directions, so both repos have both patches +cd a +"$CAMP" pull ../b +cd .. +cd b +"$CAMP" pull ../a +cd .. + +# Now check that the file content is what we expect +printf 'line1\nbetween 1 and 2\nline2\nbetween 2 and 3\nline3\n' > f +diff -u a/f f +diff -u b/f f + hunk ./tests/simple_merge/run_test.sh 2 + +# This test tests the basic functionality, i.e.: +# * Initialising a repo works +# * Adding a file works +# * Recording a patch works +# * Pulling from a repo works +# * Trivial non-conflicting merges work hunk ./Camp/Types.hs 4 +type Bytes = Integer hunk ./Camp/Patch/Primitive.hs 29 +{- +XXX The hunk format should be more efficient. Perhaps this: + + Hunk :: FilePath + -> Bytes -- Skip this many bytes... + -> Line -- ...which means this many '\n's + -- At this point we are either at the beginning + -- of the file or immediately after a '\n' + -> ByteString -- Remove these bytes + -- If not "", either we have removed the remaining + -- file contents or the last byte is '\n' + -> Line -- We removed this many '\n's + -> ByteString -- Add these bytes + -- If not "", either we are at the end of the + -- file or the last byte is '\n' + -> Line -- We added this many '\n's + -> Primitive from to +Problem?: If the file content changes from "foo\n" to "foo" +then we will remove and rewrite the "foo" +-} + hunk ./Camp/Patch/Primitive.hs 21 + AddDir :: FilePath -> Primitive from to + RmDir :: FilePath -> Primitive from to + MvDir :: FilePath -> FilePath -> Primitive from to hunk ./Camp/Patch/Primitive.hs 26 + MvFile :: FilePath -> FilePath -> Primitive from to hunk ./Camp/Patch/Primitive.hs 58 - (fp, bs') -> (AddFile (BSC.unpack fp), bs') + (fp, bs1) -> (AddDir (BSC.unpack fp), bs1) hunk ./Camp/Patch/Primitive.hs 60 - (fp, bs') -> (RmFile (BSC.unpack fp), bs') + (fp, bs1) -> (RmDir (BSC.unpack fp), bs1) + 2 -> case input (BS.tail bs) of + (from, bs1) -> + case input bs1 of + (to, bs2) -> + (MvDir (BSC.unpack from) (BSC.unpack to), bs2) + 3 -> case input (BS.tail bs) of + (fp, bs1) -> (AddFile (BSC.unpack fp), bs1) + 4 -> case input (BS.tail bs) of + (fp, bs1) -> (RmFile (BSC.unpack fp), bs1) + 5 -> case input (BS.tail bs) of + (from, bs1) -> + case input bs1 of + (to, bs2) -> + (MvFile (BSC.unpack from) (BSC.unpack to), bs2) hunk ./Camp/Patch/Primitive.hs 77 - 2 -> case input (BS.tail bs) of + 6 -> case input (BS.tail bs) of hunk ./Camp/Patch/Primitive.hs 97 - Just (0, bs') -> - valid bs' (undefined :: ByteString) - Just (1, bs') -> - valid bs' (undefined :: ByteString) + -- AddDir + Just (0, bs1) -> + valid bs1 (undefined :: ByteString) + -- RmDir + Just (1, bs1) -> + valid bs1 (undefined :: ByteString) + -- MvDir hunk ./Camp/Patch/Primitive.hs 105 + case valid bs1 (undefined :: ByteString) of + Left err -> Left err + Right bs2 -> + valid bs2 (undefined :: ByteString) + -- AddFile + Just (3, bs1) -> + valid bs1 (undefined :: ByteString) + -- RmFile + Just (4, bs1) -> + valid bs1 (undefined :: ByteString) + -- MvFile + Just (5, bs1) -> + case valid bs1 (undefined :: ByteString) of + Left err -> Left err + Right bs2 -> + valid bs2 (undefined :: ByteString) + -- Hunk + Just (6, bs1) -> hunk ./Camp/Patch/Primitive.hs 131 - output (AddFile fp) = 0 `BS.cons` output (BSC.pack fp) - output (RmFile fp) = 1 `BS.cons` output (BSC.pack fp) + output (AddDir fp) = 0 `BS.cons` output (BSC.pack fp) + output (RmDir fp) = 1 `BS.cons` output (BSC.pack fp) + output (MvDir from to) = 2 `BS.cons` output (BSC.pack from) + `BS.append` output (BSC.pack to) + output (AddFile fp) = 3 `BS.cons` output (BSC.pack fp) + output (RmFile fp) = 4 `BS.cons` output (BSC.pack fp) + output (MvFile from to) = 5 `BS.cons` output (BSC.pack from) + `BS.append` output (BSC.pack to) hunk ./Camp/Patch/Primitive.hs 142 - = 2 `BS.cons` output (BSC.pack fp) + = 6 `BS.cons` output (BSC.pack fp) hunk ./Camp/Patch/Primitive.hs 148 + ppr (AddDir fp) = text "AddDir" <+> text (show fp) + ppr (RmDir fp) = text "RmDir" <+> text (show fp) + ppr (MvDir from to) = text "MvDir" <+> text (show from) + <+> text (show to) hunk ./Camp/Patch/Primitive.hs 154 + ppr (MvFile from to) = text "MvFile" <+> text (show from) + <+> text (show to) hunk ./Camp/Patch/Primitive.hs 162 + -- XXX Structuring this needs some thought hunk ./Camp/Patch/Primitive.hs 178 + (AddDir _, AddDir _) -> Nothing -- XXX far too conservative + (AddDir _, RmDir _) -> Nothing -- XXX far too conservative + (AddDir _, AddFile _) -> Nothing -- XXX far too conservative + (AddDir _, RmFile _) -> Nothing -- XXX far too conservative + (AddDir _, MvDir _ _) -> Nothing -- XXX far too conservative + (AddDir _, MvFile _ _) -> Nothing -- XXX far too conservative + (RmDir _, AddDir _) -> Nothing -- XXX far too conservative + (RmDir _, RmDir _) -> Nothing -- XXX far too conservative + (RmDir _, AddFile _) -> Nothing -- XXX far too conservative + (RmDir _, RmFile _) -> Nothing -- XXX far too conservative + (RmDir _, MvDir _ _) -> Nothing -- XXX far too conservative + (RmDir _, MvFile _ _) -> Nothing -- XXX far too conservative + (AddFile _, AddDir _) -> Nothing -- XXX far too conservative + (AddFile _, RmDir _) -> Nothing -- XXX far too conservative + (AddFile _, MvDir _ _) -> Nothing -- XXX far too conservative + (AddFile _, MvFile _ _) -> Nothing -- XXX far too conservative + (RmFile _, AddDir _) -> Nothing -- XXX far too conservative + (RmFile _, RmDir _) -> Nothing -- XXX far too conservative + (RmFile _, MvDir _ _) -> Nothing -- XXX far too conservative + (RmFile _, MvFile _ _) -> Nothing -- XXX far too conservative + (Hunk {}, AddDir _) -> Nothing -- XXX far too conservative + (Hunk {}, RmDir _) -> Nothing -- XXX far too conservative + (Hunk {}, MvDir _ _) -> Nothing -- XXX far too conservative + (Hunk {}, MvFile _ _) -> Nothing -- XXX far too conservative + (MvDir {}, _) -> Nothing -- XXX far too conservative + (MvFile {}, _) -> Nothing -- XXX far too conservative + (AddDir _, Hunk _ _ _ _) + -- XXX sanity check hunk file not in dir + | otherwise -> qp + (RmDir _, Hunk _ _ _ _) + -- XXX sanity check hunk file not in dir + | otherwise -> qp hunk ./Camp/Patch/Primitive.hs 227 - where p' = unsafeCoerce p - q' = unsafeCoerce q + where p' = unsafeCoerce p -- XXX Euch + q' = unsafeCoerce q -- XXX Euch hunk ./Camp/Patch/Primitive.hs 232 - invert (AddFile f) = RmFile f - invert (RmFile f) = AddFile f + invert (AddDir fp) = RmDir fp + invert (RmDir fp) = AddDir fp + invert (MvDir from to) = MvDir to from + invert (AddFile fp) = RmFile fp + invert (RmFile fp) = AddFile fp + invert (MvFile from to) = MvFile to from hunk ./Camp/Patch/Primitive.hs 244 - apply (AddFile fp) = do - fileExists <- doesFileExist fp + apply (AddDir fp) = do fileExists <- doesFileExist fp + directoryExists <- doesDirectoryExist fp + if fileExists || directoryExists + then error ("Already exists: " ++ show fp) + else createDirectory fp + apply (RmDir fp) = do -- XXX Should check for emptiness + removeDirectory fp + apply (MvDir from to) + = do fromDirectoryExists <- doesDirectoryExist from + toFileExists <- doesFileExist to + toDirectoryExists <- doesDirectoryExist to + if fromDirectoryExists + then if toFileExists || toDirectoryExists + then error ("Already exists: " ++ show to) + else renameDirectory from to + else error ("Not a directory: " ++ show from) + apply (AddFile fp) = do fileExists <- doesFileExist fp hunk ./Camp/Patch/Primitive.hs 270 + apply (MvFile from to) + = do fromFileExists <- doesFileExist from + toFileExists <- doesFileExist to + toDirectoryExists <- doesDirectoryExist to + if fromFileExists + then if toFileExists || toDirectoryExists + then error ("Already exists: " ++ show to) + else renameFile from to + else error ("Not a file: " ++ show from) adddir ./tests/darcs_issue1043_b addfile ./tests/darcs_issue1043_b/run_test.sh hunk ./tests/darcs_issue1043_b/run_test.sh 1 +#!/bin/sh + +# This tests that camp works in the second of two tests for a darcs bug, +# issue 1043. I'm not sure what broke exactly - conflict marking code? + +set -e + +CAMP="${CAMP:-`pwd`/../../dist/build/camp/camp}" + +cleanup() { + rm -rf a + rm -rf b + rm -f f +} + +cleanup + +if [ "$1" = "clean-only" ] +then + exit 0 +fi + +if [ "$1" != "no-clean-after" ] +then + trap cleanup EXIT +fi + +# Set up the base repo in "a" +mkdir a +cd a +"$CAMP" init +printf 'original - apple\noriginal - banana\n' > f +"$CAMP" add f +"$CAMP" record +cd .. + +# Make a copy in "b" and record a change +mkdir b +cd b +"$CAMP" init +"$CAMP" pull ../a +cd .. + +# Make a change in "a" +cd a +printf 'original - apple\nconflict 1 - brocolli\n' > f +"$CAMP" record +printf 'conflict 1 - artichoke\noriginal - banana\n' > f +"$CAMP" record +cd .. + +# Make a change in "b" +cd b +printf 'conflict 2 - aardvark\noriginal - banana\nconflict 2 - cougar\n' > f +"$CAMP" record +cd .. + +# Merge and resolve in b +cd b +"$CAMP" pull ../a +printf 'resolution\noriginal - apple\noriginal - banana\n' > f +"$CAMP" record +cd .. + +# Make another change in "a" +cd a +printf 'original - apple\n' > f +"$CAMP" record +cd .. + +# Merge in b +cd b +"$CAMP" pull ../a +cd .. + +# Now check that the file content is what we expect +printf 'original - apple\noriginal - banana\n' > f +diff -u b/f f + hunk ./Camp/Patch/Primitive.hs 36 + -- In hunks, we think of '\n's as being the start of a line. + -- The first line has no leading '\n', of course. + -- A "hunk point" is one of: + -- * the start of the file + -- * the end of the file + -- * immediately before a '\n' hunk ./Camp/Patch/Primitive.hs 44 - -> Line -- ...which means this many '\n's - -- At this point we are either at the beginning - -- of the file or immediately after a '\n' - -> ByteString -- Remove these bytes - -- If not "", either we have removed the remaining - -- file contents or the last byte is '\n' - -> Line -- We removed this many '\n's - -> ByteString -- Add these bytes - -- If not "", either we are at the end of the - -- file or the last byte is '\n' - -> Line -- We added this many '\n's + -> Line -- ...which means this many '\n's. + -- We are now at a hunk point. + -> ByteString -- Remove these bytes. + -> Line -- We removed this many '\n's. + -- We are now at a hunk point. + -> ByteString -- Add these bytes. + -> Line -- We added this many '\n's. + -- We are still at a hunk point. hunk ./Camp/Patch/Primitive.hs 53 -Problem?: If the file content changes from "foo\n" to "foo" -then we will remove and rewrite the "foo" hunk ./Camp/Main.hs 10 -import Camp.Curl +-- import Camp.Curl hunk ./Camp/Main.hs 24 - withGlobalCurl $ doSomething args + {- withGlobalCurl $ -} doSomething args hunk ./camp.cabal 27 - Camp.Curl + -- Camp.Curl hunk ./camp.cabal 48 - C-Sources: cbits/curl.c + -- C-Sources: cbits/curl.c hunk ./camp.cabal 55 - Pkgconfig-Depends: libcurl + -- Pkgconfig-Depends: libcurl hunk ./Camp/Repository.hs 204 - else if i < 36 then chr (ord 'a' + y - 10) + else if y < 36 then chr (ord 'a' + y - 10) hunk ./Camp/Repository.hs 189 -genName = do TOD i j <- getClockTime -- XXX Error on i or j < 0? - -- Avoid names already in our inventory - inv <- readInventory - let ns = [ n | InventoryItem n _ _ _ <- inv ] - mkName j' = Name Positive - (BSC.pack (showBase62 i ++ "-" ++ showBase62 j')) - return $ head $ [ n - | j' <- [j..], - let n = mkName j', - n `notElem` ns ] +genName + = do TOD i j <- getClockTime -- XXX Error on i or j < 0? + -- Avoid names already in our inventory + inv <- readInventory + let ns = [ n | InventoryItem n _ _ _ <- inv ] + mkName j' = Name Positive + (BSC.pack (showBase62 i ++ "-" ++ showBase62 j')) + newName = head [ n + | j' <- [j..], + let n = mkName j', + n `notElem` ns ] + return newName hunk ./Camp/Command/Record.hs 14 +import Data.ByteString.Lazy (ByteString) +import qualified Data.ByteString.Lazy as BS +import qualified Data.ByteString.Lazy.Char8 as BSC hunk ./Camp/Command/Record.hs 61 +-- XXX This is wrong - whether or not it ends in \n is important hunk ./Camp/Command/Record.hs 63 -addedFile newPath = do new <- readBinaryFile newPath - let newLines = myLines new - addFile = AddFile newPath - hunk = Hunk newPath 0 [""] newLines - return (addFile `Cons` hunk `Cons` Nil) +addedFile newPath + = do new <- BS.readFile newPath + let addFile = AddFile newPath + hunk = Hunk newPath 0 0 + BS.empty 0 + -- XXX fromIntegral + new (fromIntegral (BSC.count '\n' new) + 1) + return (addFile `Cons` hunk `Cons` Nil) + +-- XXX This is wrong - whether or not it ends in \n is important +removedFile :: FilePath -> FilePath -> IO (Seq Primitive from to) +removedFile oldPath newPath + = do old <- BS.readFile oldPath + let hunk = Hunk newPath 0 0 + -- XXX fromIntegral + old (fromIntegral (BSC.count '\n' old) + 1) + BS.empty 0 + rmFile = RmFile newPath + return (hunk `Cons` rmFile `Cons` Nil) hunk ./Camp/Command/Record.hs 103 -removedFile :: FilePath -> FilePath -> IO (Seq Primitive from to) -removedFile oldPath newPath = do old <- readBinaryFile oldPath - let oldLines = myLines old - hunk = Hunk newPath 0 oldLines [""] - rmFile = RmFile newPath - return (hunk `Cons` rmFile `Cons` Nil) - hunk ./Camp/Command/Record.hs 104 -recordFile oldPath newPath = do old <- readBinaryFile oldPath - new <- readBinaryFile newPath +recordFile oldPath newPath = do old <- BS.readFile oldPath + new <- BS.readFile newPath hunk ./Camp/Command/Record.hs 109 - return $ mkDiff newPath 0 commonLines oldLines newLines + -- XXX -1 is a hack because files don't begin with \n + return $ mkDiff newPath (-1) 0 commonLines oldLines newLines hunk ./Camp/Command/Record.hs 113 - FilePath -> Integer -> [String] -> [String] -> [String] + FilePath -> Integer -> Integer + -> [ByteString] -> [ByteString] -> [ByteString] hunk ./Camp/Command/Record.hs 116 -mkDiff fp skipped (common:cs) (old:os) (new:ns) +mkDiff fp skipBytes skipLines (common:cs) (old:os) (new:ns) hunk ./Camp/Command/Record.hs 118 - = mkDiff fp (skipped + 1) cs os ns -mkDiff fp skipped cs@(common:_) os ns - = case break (common ==) ns of - (reallyNew, ns') -> - case break (common ==) os of - (reallyOld, os') -> - let skipped' = skipped + genericLength reallyNew - in Cons (Hunk fp skipped reallyOld reallyNew) - (mkDiff fp skipped' cs os' ns') -mkDiff _ _ [] [] [] = let resType :: Seq Primitive from to - resType = undefined - in case startIsEnd resType of - IsEqual -> Nil + -- XXX fromIntegral + = mkDiff fp (skipBytes + fromIntegral (BS.length common) + 1) + (skipLines + 1) cs os ns +mkDiff fp skipBytes skipLines cs@(common:_) os ns + = case (break (common ==) os, break (common ==) ns) of + (([], _), ([], _)) -> + error "XXX mkDiff: Can't happen" + ((reallyOld, os'), ([], _)) -> + Cons (Hunk fp skipBytes skipLines + -- XXX This is wrong at the beginning of the file + (BS.concat (map ('\n' `BSC.cons`) reallyOld)) + -- XXX fromIntegral + (fromIntegral (length reallyOld)) + BS.empty 0) + (mkDiff fp skipBytes skipLines cs os' ns) + (([], _), (reallyNew, ns')) -> + let newBytes = BS.concat (map ('\n' `BSC.cons`) reallyNew) + newLines = genericLength reallyNew + -- XXX fromIntegral + skipBytes' = skipBytes + fromIntegral (BS.length newBytes) + skipLines' = skipLines + newLines + in Cons (Hunk fp skipBytes skipLines + BS.empty 0 + -- XXX This is wrong at the beginning of the file + newBytes newLines) + (mkDiff fp skipBytes' skipLines' cs os ns') + ((reallyOld, os'), (reallyNew, ns')) -> + let oldBytes = BS.concat (intersperse (BSC.singleton '\n') reallyOld) + oldLines = genericLength reallyOld + newBytes = BS.concat (intersperse (BSC.singleton '\n') reallyNew) + newLines = genericLength reallyNew + -- XXX fromIntegral + skipBytes' = skipBytes + fromIntegral (BS.length newBytes) + skipLines' = skipLines + newLines + in Cons (Hunk fp skipBytes skipLines + oldBytes oldLines + newBytes newLines) + (mkDiff fp skipBytes' skipLines' cs os' ns') +mkDiff _ _ _ [] [] [] = let resType :: Seq Primitive from to + resType = undefined + in case startIsEnd resType of + IsEqual -> Nil +{- hunk ./Camp/Command/Record.hs 162 +-} +mkDiff _ _ _ _ _ _ = error "XXX mkDiff fallthrough" hunk ./Camp/Main.hs 9 -import Camp.Command.Test +import Camp.Command.Show +-- import Camp.Command.Test hunk ./Camp/Main.hs 96 - "test" : args' -> test l args' + "show" : args' -> showC l args' + -- XXX "test" : args' -> test l args' hunk ./Camp/Patch/Primitive.hs 12 +import Control.Exception hunk ./Camp/Patch/Primitive.hs 28 - Hunk :: FilePath - -> Line -- skip this many lines - -> [String] -- remove these lines - -> [String] -- add these lines - -> Primitive from to - -{- -XXX The hunk format should be more efficient. Perhaps this: - - -- In hunks, we think of '\n's as being the start of a line. + -- In hunks, we mostly think of '\n's as being the start of a line. + -- The exception is when adding or removing lines from the start + -- of the file, when we can't have a newline at the start. hunk ./Camp/Patch/Primitive.hs 47 --} hunk ./Camp/Patch/Primitive.hs 74 - (skip, bs2) -> + (skipBytes, bs2) -> hunk ./Camp/Patch/Primitive.hs 76 - (old, bs3) -> + (skipLines, bs3) -> hunk ./Camp/Patch/Primitive.hs 78 - (new, bs4) -> - (Hunk (BSC.unpack fp) - skip - (map BSC.unpack old) - (map BSC.unpack new), - bs4) + (oldBytes, bs4) -> + case input bs4 of + (oldLines, bs5) -> + case input bs5 of + (newBytes, bs6) -> + case input bs6 of + (newLines, bs7) -> + (Hunk (BSC.unpack fp) + skipBytes + skipLines + oldBytes + oldLines + newBytes + newLines, + bs7) hunk ./Camp/Patch/Primitive.hs 125 - Just (6, bs1) -> - case valid bs1 (undefined :: Integer) of - Left err -> Left err - Right bs2 -> - case valid bs2 (undefined :: [ByteString]) of - Left err -> Left err - Right bs3 -> - valid bs3 (undefined :: [ByteString]) + Just (6, _) -> + error "XXX" hunk ./Camp/Patch/Primitive.hs 138 - output (Hunk fp skip old new) + output (Hunk fp skipBytes skipLines oldBytes oldLines newBytes newLines) hunk ./Camp/Patch/Primitive.hs 140 - `BS.append` output skip - `BS.append` output (map BSC.pack old) - `BS.append` output (map BSC.pack new) + `BS.append` output skipBytes + `BS.append` output skipLines + `BS.append` output oldBytes + `BS.append` output oldLines + `BS.append` output newBytes + `BS.append` output newLines hunk ./Camp/Patch/Primitive.hs 156 - ppr (Hunk fp skip old new) - = text "Hunk" <+> text (show fp) <+> integer skip - $$ nest 4 (brackets $ vcat $ map (text . show) old) - $$ nest 4 (brackets $ vcat $ map (text . show) new) + ppr (Hunk fp skipBytes skipLines oldBytes oldLines newBytes newLines) + = text "Hunk" <+> text (show fp) + <+> integer skipBytes + <+> integer skipLines + <+> integer oldLines + <+> integer newLines + -- XXX do something about lines?: + $$ nest 4 (text $ show $ BSC.unpack oldBytes) + $$ nest 4 (text $ show $ BSC.unpack newBytes) hunk ./Camp/Patch/Primitive.hs 180 - (AddFile p_f, Hunk q_f _ _ _) + (AddFile p_f, Hunk q_f _ _ _ _ _ _) hunk ./Camp/Patch/Primitive.hs 209 - (AddDir _, Hunk _ _ _ _) + (AddDir {}, Hunk {}) hunk ./Camp/Patch/Primitive.hs 212 - (RmDir _, Hunk _ _ _ _) + (RmDir {}, Hunk {}) hunk ./Camp/Patch/Primitive.hs 215 - (Hunk p_f _ _ _, AddFile q_f) -> + (Hunk p_f _ _ _ _ _ _, AddFile q_f) -> hunk ./Camp/Patch/Primitive.hs 217 - (RmFile p_f, Hunk q_f _ _ _) -> + (RmFile p_f, Hunk q_f _ _ _ _ _ _) -> hunk ./Camp/Patch/Primitive.hs 219 - (Hunk p_f _ _ _, RmFile q_f) + (Hunk p_f _ _ _ _ _ _, RmFile q_f) hunk ./Camp/Patch/Primitive.hs 222 - (Hunk p_f p_skip p_old p_new, Hunk q_f q_skip q_old q_new) - | p_f /= q_f -> qp - | p_skip + genericLength p_new < q_skip -> - let movement = genericLength p_new - genericLength p_old - in Just (Hunk q_f (q_skip - movement) q_old q_new `Then` p') - | q_skip + genericLength q_old < p_skip -> - let movement = genericLength q_new - genericLength q_old - in Just (q' `Then` Hunk p_f (p_skip - movement) p_old p_new) + (Hunk p_fp p_skipBytes p_skipLines + p_oldBytes p_oldLines + p_newBytes p_newLines, + Hunk q_fp q_skipBytes q_skipLines + q_oldBytes q_oldLines + q_newBytes q_newLines) + | p_fp /= q_fp -> qp + | p_skipLines + p_newLines < q_skipLines -> + let byteMovement = BS.length p_newBytes - BS.length p_oldBytes + lineMovement = p_newLines - p_oldLines + -- XXX fromIntegral + in Just (Hunk q_fp (q_skipBytes - fromIntegral byteMovement) + (q_skipLines - fromIntegral lineMovement) + q_oldBytes q_oldLines + q_newBytes q_newLines + `Then` p') + | q_skipLines + q_oldLines < p_skipLines -> + let byteMovement = BS.length q_newBytes - BS.length q_oldBytes + lineMovement = q_newLines - q_oldLines + in Just (q' `Then` + -- XXX fromIntegral + Hunk p_fp (p_skipBytes - fromIntegral byteMovement) + (p_skipLines - fromIntegral lineMovement) + p_oldBytes p_oldLines + p_newBytes p_newLines) hunk ./Camp/Patch/Primitive.hs 260 - invert (Hunk f skip remove add) = Hunk f skip add remove + invert (Hunk fp skipBytes skipLines oldBytes oldLines newBytes newLines) + = Hunk fp skipBytes skipLines newBytes newLines oldBytes oldLines hunk ./Camp/Patch/Primitive.hs 302 - apply (Hunk fp skip old new) - = do content <- readBinaryFile fp - case mySplitAt skip $ myLines content of - Just (skipped, rest) -> - case stripPrefix old rest of - Just rest' -> do - let content' = myUnlines (skipped ++ new ++ rest') - writeBinaryFile fp content' - Nothing -> error "Old patch content is wrong" - Nothing -> error "Not enough lines to skip" + apply (Hunk fp skipBytes _ oldBytes _ newBytes _) + -- XXX This should check that the file is big enough etc + = do content <- BS.readFile fp + -- XXX fromIntegral + case BS.splitAt (fromIntegral skipBytes) content of + -- XXX Should check length of skipped + (skipped, rest) -> + -- XXX fromIntegral + case BS.splitAt (fromIntegral (BS.length oldBytes)) rest of + (_old, rest') -> + -- XXX sanity check oldBytes == old + do let content' = skipped `BS.append` newBytes + `BS.append` rest' + evaluate $ BS.length content -- XXX + BS.writeFile fp content' + -- Nothing -> error "Old patch content is wrong" + -- Nothing -> error "Not enough lines to skip" hunk ./Camp/Utils.hs 10 +import Data.ByteString.Lazy (ByteString) +import qualified Data.ByteString.Lazy as BS +import qualified Data.ByteString.Lazy.Char8 as BSC hunk ./Camp/Utils.hs 19 +-- XXX Can we use BSC.lines, or BSC.split '\n' instead? hunk ./Camp/Utils.hs 24 -myLines :: String -> [String] -myLines xs = case break ('\n' ==) xs of - (ys, _ : zs) -> ys : myLines zs - (_, []) -> [xs] +myLines :: ByteString -> [ByteString] +myLines xs = case BSC.break ('\n' ==) xs of + (ys, zs) -> + case BS.uncons zs of + Just (_, zs') -> ys : myLines zs' + Nothing -> [xs] hunk ./Camp/Patch/Primitive.hs 69 - -- XXX This is hideous, but we're going to change the - -- hunk format shortly anyway: hunk ./Camp/Patch/Primitive.hs 124 - error "XXX" + error "XXX valid/Hunk not written yet" hunk ./Camp/Patch/Primitive.hs 134 - -- XXX This is hideous, but we're going to change the hunk format - -- shortly anyway: hunk ./Camp/Command/Record.hs 12 +import Camp.Types hunk ./Camp/Command/Record.hs 69 - -- XXX fromIntegral - new (fromIntegral (BSC.count '\n' new) + 1) + new (BSC.count '\n' new + 1) hunk ./Camp/Command/Record.hs 77 - -- XXX fromIntegral - old (fromIntegral (BSC.count '\n' old) + 1) + old (BSC.count '\n' old + 1) hunk ./Camp/Command/Record.hs 112 - FilePath -> Integer -> Integer + FilePath -> Bytes -> Line hunk ./Camp/Command/Record.hs 117 - -- XXX fromIntegral - = mkDiff fp (skipBytes + fromIntegral (BS.length common) + 1) + = mkDiff fp (skipBytes + BS.length common + 1{- '\n' -}) hunk ./Camp/Command/Record.hs 127 - -- XXX fromIntegral - (fromIntegral (length reallyOld)) + (genericLength reallyOld) hunk ./Camp/Command/Record.hs 133 - -- XXX fromIntegral - skipBytes' = skipBytes + fromIntegral (BS.length newBytes) + skipBytes' = skipBytes + BS.length newBytes hunk ./Camp/Command/Record.hs 145 - -- XXX fromIntegral - skipBytes' = skipBytes + fromIntegral (BS.length newBytes) + skipBytes' = skipBytes + BS.length newBytes hunk ./Camp/Patch/Pretty.hs 2 -module Camp.Patch.Pretty (Ppr(..), pprint, module Text.PrettyPrint) where +module Camp.Patch.Pretty (Ppr(..), pprint, + module Text.PrettyPrint, integral) where hunk ./Camp/Patch/Pretty.hs 19 +integral :: Integral a => a -> Doc +integral = integer . fromIntegral + hunk ./Camp/Patch/Primitive.hs 154 - <+> integer skipBytes - <+> integer skipLines - <+> integer oldLines - <+> integer newLines + <+> integral skipBytes + <+> integral skipLines + <+> integral oldLines + <+> integral newLines hunk ./Camp/Patch/Primitive.hs 228 - -- XXX fromIntegral - in Just (Hunk q_fp (q_skipBytes - fromIntegral byteMovement) - (q_skipLines - fromIntegral lineMovement) + in Just (Hunk q_fp (q_skipBytes - byteMovement) + (q_skipLines - lineMovement) hunk ./Camp/Patch/Primitive.hs 237 - -- XXX fromIntegral - Hunk p_fp (p_skipBytes - fromIntegral byteMovement) - (p_skipLines - fromIntegral lineMovement) + Hunk p_fp (p_skipBytes - byteMovement) + (p_skipLines - lineMovement) hunk ./Camp/Patch/Primitive.hs 299 - -- XXX fromIntegral - case BS.splitAt (fromIntegral skipBytes) content of + case BS.splitAt skipBytes content of hunk ./Camp/Patch/Primitive.hs 302 - -- XXX fromIntegral - case BS.splitAt (fromIntegral (BS.length oldBytes)) rest of + case BS.splitAt (BS.length oldBytes) rest of hunk ./Camp/Types.hs 4 -type Bytes = Integer -type Line = Integer +import Data.Int + +-- We pick the types of Bytes to match the type that +-- Data.ByteString.Lazy uses. +type Bytes = Int64 +-- If we have more lines than bytes then we're in trouble, +-- so we might as well use the same type here too. +type Line = Int64 + hunk ./Camp/Command/Record.hs 24 -record _ [] = do directory <- recordDirectory pristineDir "." - adds <- recordAdds - n <- genName - let primitives = directory `appendSeq` adds - patches = mkPatches n 1 primitives - catches = mkCatches patches - megaPatch = MegaPatch n catches - i <- writeMegaPatch megaPatch - applyToPristine megaPatch - is <- readInventory - -- XXX We could change to a repo format that allows for append - writeInventory (is ++ [i]) -record _ _ = error "Unknown arguments to record" +record _ [] = do n <- genName + recordName n +record _ [n] = recordName (Name Positive (BSC.pack n)) +record _ _ = error "Unknown arguments to record" + +recordName :: Name -> IO () +recordName n = do directory <- recordDirectory pristineDir "." + adds <- recordAdds + let primitives = directory `appendSeq` adds + patches = mkPatches n 1 primitives + catches = mkCatches patches + megaPatch = MegaPatch n catches + i <- writeMegaPatch megaPatch + applyToPristine megaPatch + is <- readInventory + writeInventory (is ++ [i]) hunk ./tests/darcs_issue1043_b/run_test.sh 8 -CAMP="${CAMP:-`pwd`/../../dist/build/camp/camp}" +HERE=`pwd` +CAMP="${CAMP:-$HERE/../../dist/build/camp/camp}" hunk ./tests/darcs_issue1043_b/run_test.sh 12 + cd "$HERE" hunk ./tests/simple_merge/run_test.sh 12 -CAMP="${CAMP:-`pwd`/../../dist/build/camp/camp}" +HERE=`pwd` +CAMP="${CAMP:-$HERE/../../dist/build/camp/camp}" hunk ./tests/simple_merge/run_test.sh 16 + cd "$HERE" hunk ./Camp/Patch/InputOutput.hs 40 - BS.splitAt (fromIntegral (w :: Word64)) bs' + -- XXX Should be sanity checking that it was big enough? + BS.splitAt w bs' hunk ./Camp/Patch/InputOutput.hs 51 - output bs = output (fromIntegral (BS.length bs) :: Word64) `BS.append` bs + output bs = output (BS.length bs) `BS.append` bs hunk ./Camp/Command/Pull.hs 7 -import Camp.Patch.InputOutput hunk ./Camp/Command/Pull.hs 13 -import Data.ByteString.Lazy (ByteString) -import qualified Data.ByteString.Lazy.Char8 as BSC - hiding (ByteString) -- XXX Warning hack hunk ./Camp/Command/Pull.hs 20 - let wantedPatches' = map BSC.pack wantedPatches - localNames = [ n | InventoryItem n _ _ _ <- localInventory ] + let localNames = [ n | InventoryItem n _ _ _ <- localInventory ] hunk ./Camp/Command/Pull.hs 46 - if all validName wantedPatches' - then let wantedNames = -- XXX Should check snd == "" - map (fst . input) wantedPatches' - wantedNameSet = Set.fromList wantedNames - in case tryCommuteToPrefix wantedNameSet remotePatches of - Just (remotePatches' `Then` _) -> - doPull localInventory localPatches remotePatches' - Nothing -> - error "Can't pull those patches due to dependencies" - else error "Can't parse the patch names you want" + case mapM parseName wantedPatches of + Just wantedNames -> + let wantedNameSet = Set.fromList wantedNames + in case tryCommuteToPrefix wantedNameSet remotePatches of + Just (remotePatches' `Then` _) -> + doPull localInventory localPatches remotePatches' + Nothing -> + error "Can't pull those patches due to dependencies" + Nothing -> error "Can't parse the patch names you want" hunk ./Camp/Command/Pull.hs 57 -validName :: ByteString -> Bool -validName bs = case valid bs (undefined :: Name) of - Left _ -> False - Right _ -> True - hunk ./Camp/Patch/Name.hs 4 - Name(..), Sign(..), SubName(..), inverseName, inverseSubName + Name(..), Sign(..), SubName(..), inverseName, inverseSubName, + parseName, hunk ./Camp/Patch/Name.hs 25 +parseName :: String -> Maybe Name +parseName bs = case bs of + 'P':'-':bs' -> mkName Positive bs' + 'N':'-':bs' -> mkName Negative bs' + _ -> Nothing + where mkName sign bs' = if null bs' || not (all good bs') + then Nothing + else Just (Name sign (BSC.pack bs')) + good c = (c >= '0' && c <= '9') + || (c >= 'a' && c <= 'z') + || (c >= 'A' && c <= 'Z') + || (c == '-' || c == '.') + hunk ./Camp/Patch/Primitive.hs 154 - <+> integral skipBytes - <+> integral skipLines - <+> integral oldLines - <+> integral newLines - -- XXX do something about lines?: - $$ nest 4 (text $ show $ BSC.unpack oldBytes) - $$ nest 4 (text $ show $ BSC.unpack newBytes) + <+> text "Skip" <+> integral skipBytes <+> text "bytes," + <+> integral skipLines <+> text "lines" + $$ nest 4 ( + text "Removal touches" <+> + integral oldLines <+> text "lines" + $$ text "Addition touches" <+> + integral newLines <+> text "lines" + -- XXX do something about lines?: + $$ (text $ show $ BSC.unpack oldBytes) + $$ (text $ show $ BSC.unpack newBytes) + ) hunk ./Camp/Command/Record.hs 65 --- XXX This is wrong - whether or not it ends in \n is important hunk ./Camp/Command/Record.hs 69 - hunk = Hunk newPath 0 0 - BS.empty 0 - new (BSC.count '\n' new + 1) - return (addFile `Cons` hunk `Cons` Nil) + if BS.null new + then return (addFile `Cons` Nil) + else let hunk = if (BSC.head new == '\n') || + (BSC.last new == '\n') + then Hunk newPath 0 0 + BS.empty 0 + new (BSC.count '\n' new) + else Hunk newPath 0 0 + BS.empty 1 + new (BSC.count '\n' new + 1) + in return (addFile `Cons` hunk `Cons` Nil) hunk ./Camp/Command/Record.hs 81 --- XXX This is wrong - whether or not it ends in \n is important hunk ./Camp/Command/Record.hs 84 - let hunk = Hunk newPath 0 0 - old (BSC.count '\n' old + 1) - BS.empty 0 - rmFile = RmFile newPath - return (hunk `Cons` rmFile `Cons` Nil) + let rmFile = RmFile newPath + if BS.null old + then return (rmFile `Cons` Nil) + else let hunk = if (BSC.head old == '\n') || + (BSC.last old == '\n') + then Hunk newPath 0 0 + old (BSC.count '\n' old) + BS.empty 0 + else Hunk newPath 0 0 + old (BSC.count '\n' old + 1) + BS.empty 1 + in return (rmFile `Cons` hunk `Cons` Nil) hunk ./Camp/Command/Record.hs 124 - return $ mkDiff newPath (-1) 0 commonLines oldLines newLines + return $ mkDiff newPath 0 0 commonLines oldLines newLines hunk ./Camp/Command/Record.hs 130 -mkDiff fp skipBytes skipLines (common:cs) (old:os) (new:ns) - | common == old && common == new - = mkDiff fp (skipBytes + BS.length common + 1{- '\n' -}) - (skipLines + 1) cs os ns -mkDiff fp skipBytes skipLines cs@(common:_) os ns - = case (break (common ==) os, break (common ==) ns) of - (([], _), ([], _)) -> - error "XXX mkDiff: Can't happen" - ((reallyOld, os'), ([], _)) -> - Cons (Hunk fp skipBytes skipLines - -- XXX This is wrong at the beginning of the file - (BS.concat (map ('\n' `BSC.cons`) reallyOld)) - (genericLength reallyOld) - BS.empty 0) - (mkDiff fp skipBytes skipLines cs os' ns) - (([], _), (reallyNew, ns')) -> - let newBytes = BS.concat (map ('\n' `BSC.cons`) reallyNew) - newLines = genericLength reallyNew - skipBytes' = skipBytes + BS.length newBytes - skipLines' = skipLines + newLines - in Cons (Hunk fp skipBytes skipLines - BS.empty 0 - -- XXX This is wrong at the beginning of the file - newBytes newLines) - (mkDiff fp skipBytes' skipLines' cs os ns') - ((reallyOld, os'), (reallyNew, ns')) -> - let oldBytes = BS.concat (intersperse (BSC.singleton '\n') reallyOld) - oldLines = genericLength reallyOld - newBytes = BS.concat (intersperse (BSC.singleton '\n') reallyNew) - newLines = genericLength reallyNew - skipBytes' = skipBytes + BS.length newBytes - skipLines' = skipLines + newLines - in Cons (Hunk fp skipBytes skipLines - oldBytes oldLines - newBytes newLines) - (mkDiff fp skipBytes' skipLines' cs os' ns') -mkDiff _ _ _ [] [] [] = let resType :: Seq Primitive from to - resType = undefined - in case startIsEnd resType of - IsEqual -> Nil -{- -mkDiff fp skipped [] os ns = Cons (Hunk fp skipped os ns) Nil --} -mkDiff _ _ _ _ _ _ = error "XXX mkDiff fallthrough" +-- XXX This is all a bit fiddly +mkDiff fp skipBytes skipLines cs os ns + = case (cs, os, ns) of + ([], [], []) -> let resType :: Seq Primitive from to + resType = undefined + in case startIsEnd resType of + IsEqual -> Nil + (common:cs', old:os', new:ns') + | common == old && common == new + -> mkDiff fp (skipBytes + BS.length common + 1{- '\n' -}) + (skipLines + 1) cs' os' ns' + (common:_, _, _) -> + mkHunkThenRest appendNewLines skipBytes (break (common ==) os) + (break (common ==) ns) + ([], _, _) + | skipBytes == 0 -> + mkHunkThenRest intersperseNewLines skipBytes (os, []) (ns, []) + | otherwise -> + -- Here we have to unskip the last '\n' we skipped, as we need + -- to remove it + mkHunkThenRest prefixNewLines (skipBytes - 1) (os, []) (ns, []) + where appendNewLines = map (`BSC.append` BSC.singleton '\n') + intersperseNewLines = intersperse (BSC.singleton '\n') + prefixNewLines = map ('\n' `BSC.cons`) + + mkHunkThenRest _ _ ([], _) ([], _) + = error "XXX mkDiff: Can't happen" + mkHunkThenRest addNewLines skipBytes' (reallyOld, os') ([], _) = + Cons (Hunk fp skipBytes' skipLines + (BS.concat (addNewLines reallyOld)) + (genericLength reallyOld) + BS.empty 0) + (mkDiff fp skipBytes skipLines cs os' ns) + mkHunkThenRest addNewLines skipBytes' ([], _) (reallyNew, ns') = + let newBytes = BS.concat (addNewLines reallyNew) + newLines = genericLength reallyNew + skipBytes'' = skipBytes' + BS.length newBytes + skipLines' = skipLines + newLines + in Cons (Hunk fp skipBytes' skipLines + BS.empty 0 + newBytes newLines) + (mkDiff fp skipBytes'' skipLines' cs os ns') + -- If we have both removals and additions then we don't need + -- to worry about newlines - we leave newlines before and + -- after the hunk, if they were there + mkHunkThenRest _ _ (reallyOld, os') (reallyNew, ns') = + let oldBytes = BS.intercalate (BSC.singleton '\n') reallyOld + oldLines = genericLength reallyOld + newBytes = BS.intercalate (BSC.singleton '\n') reallyNew + newLines = genericLength reallyNew + skipBytes' = skipBytes + BS.length newBytes + skipLines' = skipLines + newLines + in Cons (Hunk fp skipBytes skipLines + oldBytes oldLines + newBytes newLines) + (mkDiff fp skipBytes' skipLines' cs os' ns') hunk ./Camp/Patch/Primitive.hs 28 - -- In hunks, we mostly think of '\n's as being the start of a line. - -- The exception is when adding or removing lines from the start - -- of the file, when we can't have a newline at the start. - -- The first line has no leading '\n', of course. - -- A "hunk point" is one of: - -- * the start of the file - -- * the end of the file - -- * immediately before a '\n' hunk ./Camp/Patch/Primitive.hs 30 - -> Line -- ...which means this many '\n's. - -- We are now at a hunk point. + -> Line -- ...which means this many lines hunk ./Camp/Patch/Primitive.hs 32 - -> Line -- We removed this many '\n's. - -- We are now at a hunk point. + -> Line -- We removed this many lines hunk ./Camp/Patch/Primitive.hs 34 - -> Line -- We added this many '\n's. - -- We are still at a hunk point. + -> Line -- We added this many lines adddir ./tests/file_end_commutes addfile ./tests/file_end_commutes/run_test.sh hunk ./tests/file_end_commutes/run_test.sh 1 +#!/bin/sh + +# This test tests commutes of patches around the ends of files + +set -e + +HERE=`pwd` +CAMP="${CAMP:-$HERE/../../dist/build/camp/camp}" + +cleanup() { + cd "$HERE" + rm -rf base + rm -rf r123 r132 r213 r231 r312 r321 +} + +info() { + if [ "$VERBOSE" -eq 1 ] + then + echo "$@" + fi +} + +cleanup + +if [ "$1" = "clean-only" ] +then + exit 0 +fi + +if [ "$1" != "no-clean-after" ] +then + trap cleanup EXIT +fi + +VERBOSE=0 +if [ "$1" = "v" ] +then + VERBOSE=1 +fi + +# Set up the base repo +mkdir base +cd base +"$CAMP" init +printf 'line1\nline2\n' > f +"$CAMP" add f +"$CAMP" record o +printf 'start\nline1\nline2\n' > f +"$CAMP" record p1 +printf 'start\nline1\nmiddle\nline2\n' > f +"$CAMP" record p2 +printf 'start\nline1\nmiddle\nline2\nend\n' > f +"$CAMP" record p3 +cd .. + +# Now try pulling the patches in the 6 possible orders +info Doing 123 +mkdir r123 +cd r123 +"$CAMP" init +"$CAMP" pull ../base P-o +"$CAMP" pull ../base P-p1 +"$CAMP" pull ../base P-p2 +"$CAMP" pull ../base P-p3 +diff -u ../base/f f +cd .. + +info Doing 132 +mkdir r132 +cd r132 +"$CAMP" init +"$CAMP" pull ../base P-o +"$CAMP" pull ../base P-p1 +"$CAMP" pull ../base P-p3 +"$CAMP" pull ../base P-p2 +diff -u ../base/f f +cd .. + +info Doing 213 +mkdir r213 +cd r213 +"$CAMP" init +"$CAMP" pull ../base P-o +"$CAMP" pull ../base P-p2 +"$CAMP" pull ../base P-p1 +"$CAMP" pull ../base P-p3 +diff -u ../base/f f +cd .. + +info Doing 231 +mkdir r231 +cd r231 +"$CAMP" init +"$CAMP" pull ../base P-o +"$CAMP" pull ../base P-p2 +"$CAMP" pull ../base P-p3 +"$CAMP" pull ../base P-p1 +diff -u ../base/f f +cd .. + +info Doing 312 +mkdir r312 +cd r312 +"$CAMP" init +"$CAMP" pull ../base P-o +"$CAMP" pull ../base P-p3 +"$CAMP" pull ../base P-p1 +"$CAMP" pull ../base P-p2 +diff -u ../base/f f +cd .. + +info Doing 321 +mkdir r321 +cd r321 +"$CAMP" init +"$CAMP" pull ../base P-o +"$CAMP" pull ../base P-p3 +"$CAMP" pull ../base P-p2 +"$CAMP" pull ../base P-p1 +diff -u ../base/f f +cd .. + adddir ./tests/file_end_commutes2 addfile ./tests/file_end_commutes2/run_test.sh hunk ./tests/file_end_commutes2/run_test.sh 1 +#!/bin/sh + +# This test tests commutes of patches around the ends of files +# It differs from the original in that there is no trailing newline + +set -e + +HERE=`pwd` +CAMP="${CAMP:-$HERE/../../dist/build/camp/camp}" + +cleanup() { + cd "$HERE" + rm -rf base + rm -rf r123 r132 r213 r231 r312 r321 +} + +info() { + if [ "$VERBOSE" -eq 1 ] + then + echo "$@" + fi +} + +cleanup + +if [ "$1" = "clean-only" ] +then + exit 0 +fi + +if [ "$1" != "no-clean-after" ] +then + trap cleanup EXIT +fi + +VERBOSE=0 +if [ "$1" = "v" ] +then + VERBOSE=1 +fi + +# Set up the base repo +mkdir base +cd base +"$CAMP" init +printf 'line1\nline2' > f +"$CAMP" add f +"$CAMP" record o +printf 'start\nline1\nline2' > f +"$CAMP" record p1 +printf 'start\nline1\nmiddle\nline2' > f +"$CAMP" record p2 +printf 'start\nline1\nmiddle\nline2\nend' > f +"$CAMP" record p3 +cd .. + +# Now try pulling the patches in the 6 possible orders +info Doing 123 +mkdir r123 +cd r123 +"$CAMP" init +"$CAMP" pull ../base P-o +"$CAMP" pull ../base P-p1 +"$CAMP" pull ../base P-p2 +"$CAMP" pull ../base P-p3 +diff -u ../base/f f +cd .. + +info Doing 132 +mkdir r132 +cd r132 +"$CAMP" init +"$CAMP" pull ../base P-o +"$CAMP" pull ../base P-p1 +"$CAMP" pull ../base P-p3 +"$CAMP" pull ../base P-p2 +diff -u ../base/f f +cd .. + +info Doing 213 +mkdir r213 +cd r213 +"$CAMP" init +"$CAMP" pull ../base P-o +"$CAMP" pull ../base P-p2 +"$CAMP" pull ../base P-p1 +"$CAMP" pull ../base P-p3 +diff -u ../base/f f +cd .. + +info Doing 231 +mkdir r231 +cd r231 +"$CAMP" init +"$CAMP" pull ../base P-o +"$CAMP" pull ../base P-p2 +"$CAMP" pull ../base P-p3 +"$CAMP" pull ../base P-p1 +diff -u ../base/f f +cd .. + +info Doing 312 +mkdir r312 +cd r312 +"$CAMP" init +"$CAMP" pull ../base P-o +"$CAMP" pull ../base P-p3 +"$CAMP" pull ../base P-p1 +"$CAMP" pull ../base P-p2 +diff -u ../base/f f +cd .. + +info Doing 321 +mkdir r321 +cd r321 +"$CAMP" init +"$CAMP" pull ../base P-o +"$CAMP" pull ../base P-p3 +"$CAMP" pull ../base P-p2 +"$CAMP" pull ../base P-p1 +diff -u ../base/f f +cd .. + hunk ./Camp/Command/Record.hs 123 - -- XXX -1 is a hack because files don't begin with \n hunk ./Camp/Command/Pull.hs 67 - -- XXX We could change to a repo format that allows for append addfile ./Camp/Command/Show.hs hunk ./Camp/Command/Show.hs 1 + +module Camp.Command.Show (showC) where + +import Camp.Patch.Name +import Camp.Patch.Pretty +import Camp.Repository + +showC :: Log -> [String] -> IO () +showC _ [wanted] + = do inventory <- readInventory + let wantedName = case parseName wanted of + Just n -> n + Nothing -> + -- This is a bit of a hack; if they give + -- an unsigned name, assume they want the + -- positive one + case parseName ('P':'-':wanted) of + Just n -> n + Nothing -> error "Bad Name" + let isWanted (InventoryItem n _ _ _) = n == wantedName + inventoryLine = case filter isWanted inventory of + [i] -> i + _ -> error "XXX Can't find patch" + patch <- readMegaPatch inventoryLine + putStrLn $ pprint patch +showC _ _ = error "XXX show" + hunk ./Camp/Main.hs 18 +import System.Exit hunk ./Camp/Main.hs 84 - doIt l args `catch` \e -> do - logException l e - hPutStrLn stderr "Got an exception:" - hPutStrLn stderr $ show e + doIt l args `catches` + [Handler $ \e -> throw (e :: ExitCode), + Handler $ \e -> do + logException l e + hPutStrLn stderr "Got an exception:" + hPutStrLn stderr $ show e] addfile ./Camp/Command/Interactive.hs hunk ./Camp/Command/Interactive.hs 1 + +{-# OPTIONS -w #-} + +module Camp.Command.Interactive (interactive) where + +import Camp.Patch.Catch +import Camp.Patch.Commute +import Camp.Patch.Equality +import Camp.Patch.MegaPatch +import Camp.Patch.Name +import Camp.Patch.Patch +import Camp.Patch.Pretty +import Camp.Patch.Primitive +import Camp.Patch.RevSequence +import Camp.Patch.Sequence +import Camp.Repository +import Camp.Types +import Camp.Utils + +import Control.Exception +import Data.ByteString.Lazy (ByteString) +import qualified Data.ByteString.Lazy as BS +import qualified Data.ByteString.Lazy.Char8 as BSC +import Data.List +import Data.List.LCS +import System.Directory +import System.Exit +import System.FilePath +import System.IO + +interactive :: Seq Primitive from to + -> IO (Then (Seq Primitive) (Seq Primitive) from to) +interactive ps = do + stdinBuf <- hGetBuffering stdin + stdoutBuf <- hGetBuffering stdout + do hSetBuffering stdin NoBuffering + hSetBuffering stdout NoBuffering + select NilRevSeq ps Nil + `finally` do hSetBuffering stdin stdinBuf + hSetBuffering stdout stdoutBuf + +select :: RevSeq Primitive from x -- chosen, in reverse order + -> Seq Primitive x y -- undecided + -> Seq Primitive y to -- rejected + -> IO (Then (Seq Primitive) (Seq Primitive) from to) +select revChosen Nil rejected = return (toSeq revChosen `Then` rejected) +select revChosen (p `Cons` ps) rejected + = do putStrLn (pprint p) + putStr "Want this patch? " + c <- getChar + case c of + 'y' -> select (revChosen `Snoc` p) ps rejected + 'n' -> case commute (p `Then` ps) of + Just (ps' `Then` p') -> + select revChosen ps' (p' `Cons` rejected) + Nothing -> + error "XXX Interactive commute fail" + 'q' -> exitWith ExitSuccess + _ -> select revChosen (p `Cons` ps) rejected + hunk ./Camp/Command/Record.hs 4 +import Camp.Command.Interactive hunk ./Camp/Command/Record.hs 6 +import Camp.Patch.Commute hunk ./Camp/Command/Record.hs 24 +import Unsafe.Coerce hunk ./Camp/Command/Record.hs 28 - recordName n -record _ [n] = recordName (Name Positive (BSC.pack n)) + recordName n True +record _ ["-a"] = do n <- genName + recordName n False +record _ [n] = recordName (Name Positive (BSC.pack n)) True +record _ ["-a", n] = recordName (Name Positive (BSC.pack n)) False hunk ./Camp/Command/Record.hs 35 -recordName :: Name -> IO () -recordName n = do directory <- recordDirectory pristineDir "." - adds <- recordAdds - let primitives = directory `appendSeq` adds - patches = mkPatches n 1 primitives - catches = mkCatches patches - megaPatch = MegaPatch n catches - i <- writeMegaPatch megaPatch - applyToPristine megaPatch - is <- readInventory - writeInventory (is ++ [i]) +recordName :: Name -> Bool -> IO () +recordName n chooseInteractively = do + directory <- recordDirectory pristineDir "." + adds <- recordAdds + let primitives = directory `appendSeq` adds + primitives' <- if chooseInteractively + then do wanted `Then` _ <- interactive primitives + return (unsafeCoerce wanted) -- XXX + else return primitives + let patches = mkPatches n 1 primitives' + catches = mkCatches patches + megaPatch = MegaPatch n catches + i <- writeMegaPatch megaPatch + applyToPristine megaPatch + is <- readInventory + writeInventory (is ++ [i]) hunk ./tests/darcs_issue1043_b/run_test.sh 36 -"$CAMP" record +"$CAMP" record -a hunk ./tests/darcs_issue1043_b/run_test.sh 49 -"$CAMP" record +"$CAMP" record -a hunk ./tests/darcs_issue1043_b/run_test.sh 51 -"$CAMP" record +"$CAMP" record -a hunk ./tests/darcs_issue1043_b/run_test.sh 57 -"$CAMP" record +"$CAMP" record -a hunk ./tests/darcs_issue1043_b/run_test.sh 64 -"$CAMP" record +"$CAMP" record -a hunk ./tests/darcs_issue1043_b/run_test.sh 70 -"$CAMP" record +"$CAMP" record -a hunk ./tests/file_end_commutes/run_test.sh 47 -"$CAMP" record o +"$CAMP" record -a o hunk ./tests/file_end_commutes/run_test.sh 49 -"$CAMP" record p1 +"$CAMP" record -a p1 hunk ./tests/file_end_commutes/run_test.sh 51 -"$CAMP" record p2 +"$CAMP" record -a p2 hunk ./tests/file_end_commutes/run_test.sh 53 -"$CAMP" record p3 +"$CAMP" record -a p3 hunk ./tests/file_end_commutes2/run_test.sh 48 -"$CAMP" record o +"$CAMP" record -a o hunk ./tests/file_end_commutes2/run_test.sh 50 -"$CAMP" record p1 +"$CAMP" record -a p1 hunk ./tests/file_end_commutes2/run_test.sh 52 -"$CAMP" record p2 +"$CAMP" record -a p2 hunk ./tests/file_end_commutes2/run_test.sh 54 -"$CAMP" record p3 +"$CAMP" record -a p3 hunk ./tests/simple_merge/run_test.sh 40 -"$CAMP" record +"$CAMP" record -a hunk ./tests/simple_merge/run_test.sh 49 -"$CAMP" record +"$CAMP" record -a hunk ./tests/simple_merge/run_test.sh 55 -"$CAMP" record +"$CAMP" record -a hunk ./Camp/Command/Record.hs 44 + -- XXX Check for Nil hunk ./Camp/Command/Test.hs 1 - -module Camp.Command.Test (test) where - -import Camp.Patch.Catch -import Camp.Patch.MegaPatch -import Camp.Patch.Name -import Camp.Patch.Patch -import Camp.Patch.Primitive -import Camp.Patch.Sequence -import Camp.Repository - -import qualified Data.ByteString.Lazy.Char8 as BSC -import Unsafe.Coerce - -test :: Log -> [String] -> IO () -test _ [] = do initialiseRepo - is <- writeMegaPatches megaPatchesPQR - writeInventory is - applyToRepo megaPatchesPQR -test _ ["q"] = do initialiseRepo - is <- writeMegaPatches megaPatchesPQ - writeInventory is - applyToRepo megaPatchesPQ -test _ ["r"] = do initialiseRepo - is <- writeMegaPatches megaPatchesPR - writeInventory is - applyToRepo megaPatchesPR -test _ _ = error "Unknown arguments to test" - -nameP, nameQ, nameR :: Name -nameP = Name Positive (BSC.pack "111-111") -nameQ = Name Positive (BSC.pack "222-222") -nameR = Name Positive (BSC.pack "333-333") - -patchP1 :: Patch O O_P1 -patchP1 = Primitive (SubName nameP 1) - (AddFile "foo") -patchP2 :: Patch O_P1 O_P12 -patchP2 = Primitive (SubName nameP 2) - (AddFile "bar") -patchP3 :: Patch O_P12 O_P123 -patchP3 = Primitive (SubName nameP 3) - (Hunk "foo" 0 [] - ["foo line 1", - "foo line 2", - "foo line 3", - "foo line 4", - "foo line 5"]) -patchPQ1 :: Patch O_P123 O_P123_Q1 -patchPQ1 = Primitive (SubName nameQ 1) - (Hunk "foo" 2 - ["foo line 3", "foo line 4"] - ["in between foo lines 2,5 1", - "in between foo lines 2,5 2", - "in between foo lines 2,5 3"]) -patchPQR1 :: Patch O_P123_Q1 O_P123_Q1_R1 -patchPQR1 = Primitive (SubName nameR 1) - (Hunk "foo" 1 [] - ["in between foo lines 1,2 1", - "in between foo lines 1,2 2", - "in between foo lines 1,2 3", - "in between foo lines 1,2 4"]) - -patchPR1 :: Patch O_P123 O_P123_R1 --- R actually looks identical regardless of whether or not Q has --- already been applied -patchPR1 = unsafeCoerce patchPQR1 - -catchP1 :: Catch O O_P1 -catchP1 = Patch patchP1 - -catchP2 :: Catch O_P1 O_P12 -catchP2 = Patch patchP2 - -catchP3 :: Catch O_P12 O_P123 -catchP3 = Patch patchP3 - -catchPQ1 :: Catch O_P123 O_P123_Q1 -catchPQ1 = Patch patchPQ1 - -catchPR1 :: Catch O_P123 O_P123_R1 -catchPR1 = Patch patchPR1 - -catchPQR1 :: Catch O_P123_Q1 O_P123_Q1_R1 -catchPQR1 = Patch patchPQR1 - -catchesP :: Seq Catch O O_P123 -catchesP = catchP1 `Cons` catchP2 `Cons` catchP3 `Cons` Nil - -catchesPQ :: Seq Catch O_P123 O_P123_Q1 -catchesPQ = catchPQ1 `Cons` Nil - -catchesPR :: Seq Catch O_P123 O_P123_R1 -catchesPR = catchPR1 `Cons` Nil - -catchesPQR :: Seq Catch O_P123_Q1 O_P123_Q1_R1 -catchesPQR = catchPQR1 `Cons` Nil - -megaPatchP :: MegaPatch O O_P123 -megaPatchP = MegaPatch nameP catchesP - -megaPatchPQ :: MegaPatch O_P123 O_P123_Q1 -megaPatchPQ = MegaPatch nameQ catchesPQ - -megaPatchPR :: MegaPatch O_P123 O_P123_R1 -megaPatchPR = MegaPatch nameR catchesPR - -megaPatchPQR :: MegaPatch O_P123_Q1 O_P123_Q1_R1 -megaPatchPQR = MegaPatch nameR catchesPQR - -megaPatchesPQR :: Seq MegaPatch O O_P123_Q1_R1 -megaPatchesPQR = megaPatchP `Cons` megaPatchPQ `Cons` megaPatchPQR `Cons` Nil - -megaPatchesPQ :: Seq MegaPatch O O_P123_Q1 -megaPatchesPQ = megaPatchP `Cons` megaPatchPQ `Cons` Nil - -megaPatchesPR :: Seq MegaPatch O O_P123_R1 -megaPatchesPR = megaPatchP `Cons` megaPatchPR `Cons` Nil - -data O -data O_P1 -data O_P12 -data O_P123 -data O_P123_Q1 -data O_P123_R1 -data O_P123_Q1_R1 - rmfile ./Camp/Command/Test.hs hunk ./Camp/Main.hs 10 --- import Camp.Command.Test hunk ./Camp/Main.hs 79 - "test" : _ -> True hunk ./Camp/Main.hs 98 - -- XXX "test" : args' -> test l args' hunk ./camp.cabal 26 - Camp.Command.Test addfile ./Camp/Patch/RevSequence.hs hunk ./Camp/Patch/RevSequence.hs 1 + +module Camp.Patch.RevSequence (RevSeq(..), toSeq) where + +import Camp.Patch.Sequence + +infixr `Snoc` + +data RevSeq p from to where + Snoc :: RevSeq p from mid -> p mid to -> RevSeq p from to + NilRevSeq :: RevSeq p here here + +toSeq :: RevSeq p from to -> Seq p from to +toSeq rs = toSeq' Nil rs + +toSeq' :: Seq p mid to -> RevSeq p from mid -> Seq p from to +toSeq' acc NilRevSeq = acc +toSeq' acc (rs `Snoc` p) = toSeq' (p `Cons` acc) rs + hunk ./Camp/Command/Add.hs 6 -add :: Log -> [String] -> IO () -add _ [] = error "No arguments to add" -add _ paths = do current <- readAdds - writeAdds (current ++ paths) +add :: Log -> Repository -> [String] -> IO () +add _ _ [] = error "No arguments to add" +add _ r paths = do current <- readAdds r + writeAdds r (current ++ paths) hunk ./Camp/Command/Init.hs 6 -initialise :: Log -> [String] -> IO () -initialise _ [] = initialiseRepo -initialise _ _ = error "Unknown arguments to initialise" +initialise :: Log -> Repository -> [String] -> IO () +initialise _ r [] = initialiseRepo r +initialise _ _ _ = error "Unknown arguments to initialise" hunk ./Camp/Command/Inventory.hs 9 -inventory :: Log -> [String] -> IO () -inventory _ [] = do i <- readInventory - let ns = [ n | InventoryItem n _ _ _ <- i ] - mapM_ (putStrLn . pprint) ns -inventory _ [repo] - = do localInventory <- readInventory - remoteInventory <- inRepo repo readInventory +inventory :: Log -> Repository -> [String] -> IO () +inventory _ r [] = do i <- readInventory r + let ns = [ n | InventoryItem n _ _ _ <- i ] + mapM_ (putStrLn . pprint) ns +inventory _ localRepo [remoteRepoPath] + = do remoteRepo <- mkRepo remoteRepoPath + localInventory <- readInventory localRepo + remoteInventory <- readInventory remoteRepo hunk ./Camp/Command/Inventory.hs 28 -inventory _ _ = error "Bad arguments to inventory" +inventory _ _ _ = error "Bad arguments to inventory" hunk ./Camp/Command/Pull.hs 15 -pull :: Log -> [String] -> IO () -pull l (repo : wantedPatches) - = do logRepo l "remote" repo - localInventory <- readInventory - remoteInventory <- inRepo repo readInventory +pull :: Log -> Repository -> [String] -> IO () +pull l localRepo (remoteRepoPath : wantedPatches) + = do remoteRepo <- mkRepo remoteRepoPath + logRepo l "remote" remoteRepo + localInventory <- readInventory localRepo + remoteInventory <- readInventory remoteRepo hunk ./Camp/Command/Pull.hs 36 - localReadPatches <- readMegaPatches localReadInventory - remoteReadPatches <- inRepo repo $ readMegaPatches remoteReadInventory + localReadPatches <- readMegaPatches localRepo localReadInventory + remoteReadPatches <- readMegaPatches remoteRepo remoteReadInventory hunk ./Camp/Command/Pull.hs 45 - doPull localInventory localPatches remotePatches + doPull localRepo localInventory localPatches remotePatches hunk ./Camp/Command/Pull.hs 52 - doPull localInventory localPatches remotePatches' + doPull localRepo localInventory localPatches remotePatches' hunk ./Camp/Command/Pull.hs 56 -pull _ _ = error "Unknown arguments to pull" +pull _ _ [] = error "XXX No arguments to pull" hunk ./Camp/Command/Pull.hs 58 --- GHC 6.8 can't top if this is inlined, so we make it a separate function +-- GHC 6.8 can't cope if this is inlined, so we make it a separate function hunk ./Camp/Command/Pull.hs 60 -doPull :: [InventoryItem] -> Seq MegaPatch from1 to1 -> Seq MegaPatch from2 to2 +doPull :: Repository -> [InventoryItem] + -> Seq MegaPatch from1 to1 -> Seq MegaPatch from2 to2 hunk ./Camp/Command/Pull.hs 63 -doPull localInventory localPatches remotePatches = +doPull localRepo localInventory localPatches remotePatches = hunk ./Camp/Command/Pull.hs 68 - do is <- writeMegaPatches newLocalPatches + do is <- writeMegaPatches localRepo newLocalPatches hunk ./Camp/Command/Pull.hs 70 - applyToRepo newLocalPatches - writeInventory localInventory' + applyToRepo localRepo newLocalPatches + writeInventory localRepo localInventory' hunk ./Camp/Command/Record.hs 26 -record :: Log -> [String] -> IO () -record _ [] = do n <- genName - recordName n True -record _ ["-a"] = do n <- genName - recordName n False -record _ [n] = recordName (Name Positive (BSC.pack n)) True -record _ ["-a", n] = recordName (Name Positive (BSC.pack n)) False -record _ _ = error "Unknown arguments to record" +record :: Log -> Repository -> [String] -> IO () +record _ r [] = do n <- genName r + recordName r n True +record _ r ["-a"] = do n <- genName r + recordName r n False +record _ r [n] = recordName r (Name Positive (BSC.pack n)) True +record _ r ["-a", n] = recordName r (Name Positive (BSC.pack n)) False +record _ _ _ = error "Unknown arguments to record" hunk ./Camp/Command/Record.hs 35 -recordName :: Name -> Bool -> IO () -recordName n chooseInteractively = do - directory <- recordDirectory pristineDir "." - adds <- recordAdds +recordName :: Repository -> Name -> Bool -> IO () +recordName r n chooseInteractively = do + directory <- recordDirectory (pristineDir r) "." + adds <- recordAdds r hunk ./Camp/Command/Record.hs 48 - i <- writeMegaPatch megaPatch - applyToPristine megaPatch - is <- readInventory - writeInventory (is ++ [i]) + i <- writeMegaPatch r megaPatch + applyToPristine r megaPatch + is <- readInventory r + writeInventory r (is ++ [i]) hunk ./Camp/Command/Record.hs 63 -recordAdds :: IO (Seq Primitive from to) -recordAdds = do fps <- readAdds - writeAdds [] - recordAddedFiles fps +recordAdds :: Repository -> IO (Seq Primitive from to) +recordAdds r = do fps <- readAdds r + writeAdds r [] + recordAddedFiles fps -- XXX Should pass r through hunk ./Camp/Command/Show.hs 8 -showC :: Log -> [String] -> IO () -showC _ [wanted] - = do inventory <- readInventory +showC :: Log -> Repository -> [String] -> IO () +showC _ r [wanted] + = do inventory <- readInventory r hunk ./Camp/Command/Show.hs 24 - patch <- readMegaPatch inventoryLine + patch <- readMegaPatch r inventoryLine hunk ./Camp/Command/Show.hs 26 -showC _ _ = error "XXX show" +showC _ _ _ = error "XXX show" hunk ./Camp/Main.hs 76 - = do exists <- isThisARepo - let creatingRepo = case args of - "init" : _ -> True - _ -> False - when (creatingRepo == exists) $ error "creatingRepo == exists" - l <- startLog args - doIt l args `catches` + = do repo <- case args of + "init" : _ -> createRepo + _ -> getRepo + l <- startLog repo args + doIt l repo args `catches` hunk ./Camp/Main.hs 86 - logRepo l "after" "." + endLog repo l hunk ./Camp/Main.hs 88 -doIt :: Log -> [String] -> IO () -doIt l args = case args of - "add" : args' -> add l args' - "init" : args' -> initialise l args' - "inventory" : args' -> inventory l args' - "pull" : args' -> pull l args' - "record" : args' -> record l args' - "show" : args' -> showC l args' - _ -> error "Unrecognised args" +doIt :: Log -> Repository -> [String] -> IO () +doIt l r args + = case args of + "add" : args' -> add l r args' + "init" : args' -> initialise l r args' + "inventory" : args' -> inventory l r args' + "pull" : args' -> pull l r args' + "record" : args' -> record l r args' + "show" : args' -> showC l r args' + _ -> error "Unrecognised args" hunk ./Camp/Repository.hs 3 - isThisARepo, - inRepo, + Repository, + createRepo, + mkRepo, + getRepo, hunk ./Camp/Repository.hs 24 + endLog, hunk ./Camp/Repository.hs 38 +import Control.Monad hunk ./Camp/Repository.hs 45 +import System.Exit hunk ./Camp/Repository.hs 50 -inRepo :: FilePath -> IO a -> IO a -inRepo = inDir +newtype Repository = Repository String hunk ./Camp/Repository.hs 53 -repoBase :: FilePath -repoBase = "_camp" +repoBase :: Repository -> FilePath +repoBase (Repository r) = r "_camp" hunk ./Camp/Repository.hs 56 -repoRoot :: FilePath -repoRoot = repoBase "repo" +repoRoot :: Repository -> FilePath +repoRoot r = repoBase r "repo" hunk ./Camp/Repository.hs 59 -inventoryFile :: FilePath -inventoryFile = repoRoot "inventory" +inventoryFile :: Repository -> FilePath +inventoryFile r = repoRoot r "inventory" hunk ./Camp/Repository.hs 62 -addsFile :: FilePath -addsFile = repoRoot "adds" +addsFile :: Repository -> FilePath +addsFile r = repoRoot r "adds" hunk ./Camp/Repository.hs 65 -pristineDir :: FilePath -pristineDir = repoRoot "pristine" +pristineDir :: Repository -> FilePath +pristineDir r = repoRoot r "pristine" + +workingDir :: Repository -> FilePath +workingDir (Repository r) = r hunk ./Camp/Repository.hs 102 -readInventory :: IO [InventoryItem] -readInventory = do h <- openBinaryFile inventoryFile ReadMode - size <- hFileSize h - content <- BS.hGet h (fromIntegral size) - hClose h - -- XXX check snd == ""? - return (fst $ input content) +readInventory :: Repository -> IO [InventoryItem] +readInventory r + = do h <- openBinaryFile (inventoryFile r) ReadMode + size <- hFileSize h + content <- BS.hGet h (fromIntegral size) + hClose h + -- XXX check snd == ""? + return (fst $ input content) hunk ./Camp/Repository.hs 111 -writeInventory :: [InventoryItem] -> IO () -writeInventory ns = BS.writeFile inventoryFile (output ns) +writeInventory :: Repository -> [InventoryItem] -> IO () +writeInventory r ns = BS.writeFile (inventoryFile r) (output ns) hunk ./Camp/Repository.hs 114 -readAdds :: IO [FilePath] -readAdds = do content <- readBinaryFile addsFile - case maybeRead content of - Just fps -> return fps - Nothing -> panic ("Corrupt adds?\n" ++ content) +readAdds :: Repository -> IO [FilePath] +readAdds r = do content <- readBinaryFile (addsFile r) + case maybeRead content of + Just fps -> return fps + Nothing -> panic ("Corrupt adds?\n" ++ content) hunk ./Camp/Repository.hs 120 -writeAdds :: [FilePath] -> IO () -writeAdds ns = writeBinaryFile addsFile (show ns) +writeAdds :: Repository -> [FilePath] -> IO () +writeAdds r ns = writeBinaryFile (addsFile r) (show ns) hunk ./Camp/Repository.hs 123 -patchesDir :: FilePath -patchesDir = repoRoot "patches" +patchesDir :: Repository -> FilePath +patchesDir r = repoRoot r "patches" hunk ./Camp/Repository.hs 126 -patchFile :: FilePath -patchFile = patchesDir "patchFile" +patchFile :: Repository -> FilePath +patchFile r = patchesDir r "patchFile" hunk ./Camp/Repository.hs 132 - [InventoryItem] -> IO (Seq MegaPatch from to) -readMegaPatches [] = let resType :: Seq MegaPatch from to - resType = undefined - in case startIsEnd resType of - IsEqual -> return Nil -readMegaPatches (n : ns) = do p <- readMegaPatch n - ps <- readMegaPatches ns - return (p `Cons` ps) + Repository -> [InventoryItem] -> IO (Seq MegaPatch from to) +readMegaPatches _ [] = let resType :: Seq MegaPatch from to + resType = undefined + in case startIsEnd resType of + IsEqual -> return Nil +readMegaPatches r (n : ns) = do p <- readMegaPatch r n + ps <- readMegaPatches r ns + return (p `Cons` ps) hunk ./Camp/Repository.hs 142 -readMegaPatch :: InventoryItem -> IO (MegaPatch from to) +readMegaPatch :: Repository -> InventoryItem -> IO (MegaPatch from to) hunk ./Camp/Repository.hs 149 -readMegaPatch (InventoryItem _ fp from to) - = do h <- openBinaryFile (BSC.unpack fp) ReadMode +readMegaPatch (Repository r) (InventoryItem _ fp from to) + = do h <- openBinaryFile (r BSC.unpack fp) ReadMode hunk ./Camp/Repository.hs 159 -writeMegaPatches :: Seq MegaPatch from to -> IO [InventoryItem] -writeMegaPatches Nil = return [] -writeMegaPatches (Cons p ps) = do i <- writeMegaPatch p - is <- writeMegaPatches ps - return (i : is) +writeMegaPatches :: Repository -> Seq MegaPatch from to -> IO [InventoryItem] +writeMegaPatches _ Nil = return [] +writeMegaPatches r (Cons p ps) = do i <- writeMegaPatch r p + is <- writeMegaPatches r ps + return (i : is) hunk ./Camp/Repository.hs 165 -writeMegaPatch :: MegaPatch from to -> IO InventoryItem -writeMegaPatch m@(MegaPatch n _) - = do h <- openBinaryFile patchFile AppendMode +writeMegaPatch :: Repository -> MegaPatch from to -> IO InventoryItem +writeMegaPatch r m@(MegaPatch n _) + = do let fp = patchFile r + h <- openBinaryFile fp AppendMode hunk ./Camp/Repository.hs 173 - return (InventoryItem n (BSC.pack patchFile) startSize (endSize - 1)) + return (InventoryItem n (BSC.pack fp) startSize (endSize - 1)) + +applyToRepo :: Apply p => Repository -> p from to -> IO () +applyToRepo r ps = do applyToPristine r ps + -- XXX This is wrong if there are local changes + applyToWorking r ps hunk ./Camp/Repository.hs 180 -applyToRepo :: Apply p => p from to -> IO () -applyToRepo ps = do applyToPristine ps - -- XXX This is wrong if there are local changes - apply ps +applyToPristine :: Apply p => Repository -> p from to -> IO () +applyToPristine r ps = inDir (pristineDir r) $ apply ps hunk ./Camp/Repository.hs 183 -applyToPristine :: Apply p => p from to -> IO () -applyToPristine ps = inDir pristineDir $ apply ps +applyToWorking :: Apply p => Repository -> p from to -> IO () +applyToWorking r ps = inDir (workingDir r) $ apply ps hunk ./Camp/Repository.hs 186 -initialiseRepo :: IO () -initialiseRepo = do -- We don't create things that startLog has already: - -- createDirectory repoBase - -- createDirectory logsDir - -- createDirectory repoRoot - createDirectory patchesDir - createDirectory pristineDir - writeInventory [] - writeAdds [] +-- XXX This should check that it really is a repo +mkRepo :: FilePath -> IO Repository +mkRepo p = return (Repository p) hunk ./Camp/Repository.hs 190 -isThisARepo :: IO Bool -isThisARepo = doesDirectoryExist repoRoot +createRepo :: IO Repository +createRepo = do d <- getCurrentDirectory + let r = Repository d + createDirectory (repoBase r) -- XXX catch failure + return r + +getRepo :: IO Repository +getRepo = do d <- getCurrentDirectory + let r = Repository d + exists <- doesDirectoryExist (repoBase r) + unless exists $ do + -- XXX make a proper logging function + hPutStrLn stderr "You aren't in a repo!" + exitFailure + return r + +-- createRepo has already created the actual repoBase +initialiseRepo :: Repository -> IO () +initialiseRepo r = do -- We don't create things that startLog has already: + -- createDirectory logsDir + -- createDirectory repoRoot + createDirectory (patchesDir r) + createDirectory (pristineDir r) + writeInventory r [] + writeAdds r [] hunk ./Camp/Repository.hs 217 -genName :: IO Name -genName +genName :: Repository -> IO Name +genName r hunk ./Camp/Repository.hs 221 - inv <- readInventory + inv <- readInventory r hunk ./Camp/Repository.hs 241 -copyRepo :: FilePath -> FilePath -> IO () -copyRepo from to = copyTree (from repoRoot) to +copyRepo :: Repository -> FilePath -> IO () +copyRepo r to = copyTree (repoRoot r) to hunk ./Camp/Repository.hs 244 -logsDir :: FilePath -logsDir = repoBase "logs" +logsDir :: Repository -> FilePath +logsDir r = repoBase r "logs" hunk ./Camp/Repository.hs 247 -nextLogFile :: FilePath -nextLogFile = repoBase "nextLog" +nextLogFile :: Repository -> FilePath +nextLogFile r = repoBase r "nextLog" hunk ./Camp/Repository.hs 252 -mkLog :: Integer -> IO Log -mkLog i = do curDir <- getCurrentDirectory - return $ Log (curDir logsDir show i) +mkLog :: Repository -> Integer -> Log +mkLog r i = Log (logsDir r show i) hunk ./Camp/Repository.hs 264 -startLog :: [String] -> IO Log -startLog args - = do createDirectoryIfMissing False repoBase - createDirectoryIfMissing False logsDir +startLog :: Repository -> [String] -> IO Log +startLog r args + = do createDirectoryIfMissing False (logsDir r) hunk ./Camp/Repository.hs 269 - createDirectoryIfMissing False repoRoot - exists <- doesFileExist nextLogFile + createDirectoryIfMissing False (repoRoot r) + let nlf = nextLogFile r + exists <- doesFileExist nlf hunk ./Camp/Repository.hs 273 - then do xs <- readBinaryFile nextLogFile + then do xs <- readBinaryFile nlf hunk ./Camp/Repository.hs 278 - writeBinaryFile nextLogFile (show (nextLog + 1)) - l <- mkLog nextLog + writeBinaryFile nlf (show (nextLog + 1)) + let l = mkLog r nextLog hunk ./Camp/Repository.hs 282 - logRepo l "before" "." + logRepo l "before" r hunk ./Camp/Repository.hs 285 -logRepo :: Log -> FilePath -> FilePath -> IO () -logRepo l repoName repoPath = copyRepo repoPath (logDirectory l repoName) +endLog :: Repository -> Log -> IO () +endLog r l = logRepo l "after" r + +logRepo :: Log -> FilePath -> Repository -> IO () +logRepo l repoName r = copyRepo r (logDirectory l repoName) hunk ./Camp/Command/Interactive.hs 30 +#ifdef WINDOWS +import System.Posix.Internals +#endif hunk ./Camp/Command/Interactive.hs 39 +#ifdef WINDOWS + -- XXX Hack for Windows + stdinEcho <- getEcho 0 +#endif hunk ./Camp/Command/Interactive.hs 45 +#ifdef WINDOWS + -- XXX Hack for Windows + setEcho 0 False + setCooked 0 False +#endif hunk ./Camp/Command/Interactive.hs 53 +#ifdef WINDOWS + -- XXX Hack for Windows + setEcho 0 stdinEcho + -- XXX We can't find the cooked state before, so hope + -- it's the same as the edit state? + setCooked 0 stdinEcho +#endif hunk ./camp.cabal 77 + if os(windows) + Cpp-Options: -DWINDOWS + hunk ./Camp/Main.hs 10 --- import Camp.Curl +import Camp.Curl hunk ./Camp/Main.hs 25 - {- withGlobalCurl $ -} doSomething args + withGlobalCurl $ doSomething args hunk ./camp.cabal 26 - -- Camp.Curl + Camp.Curl hunk ./camp.cabal 47 - -- C-Sources: cbits/curl.c + C-Sources: cbits/curl.c hunk ./camp.cabal 54 - -- Pkgconfig-Depends: libcurl + Pkgconfig-Depends: libcurl adddir ./camp-core adddir ./camp-core/Camp move ./Camp/Patch ./camp-core/Camp/Patch move ./Camp/Types.hs ./camp-core/Camp/Types.hs move ./Camp/Utils.hs ./camp-core/Camp/Utils.hs adddir ./camp-bin move ./Camp ./camp-bin/Camp move ./LICENSE ./camp-bin/LICENSE move ./Setup.hs ./camp-bin/Setup.hs move ./camp.cabal ./camp-bin/camp.cabal move ./cbits ./camp-bin/cbits move ./packages ./camp-bin/packages hunk ./.boring 1 -^dist(/|$) -^Setup$ -^Setup\.hi$ -^Setup\.o$ +^camp-core/dist(/|$) +^camp-core/Setup$ +^camp-core/Setup\.hi$ +^camp-core/Setup\.o$ +^camp-core/head$ +^camp-core/stable$ +^camp-bin/dist(/|$) +^camp-bin/Setup$ +^camp-bin/Setup\.hi$ +^camp-bin/Setup\.o$ +^camp-bin/head$ +^camp-bin/stable$ hunk ./.boring 14 -^head$ -^stable$ hunk ./camp-bin/camp.cabal 27 - Camp.Patch.Anonymous - Camp.Patch.Apply - Camp.Patch.Catch - Camp.Patch.Commute - Camp.Patch.CommutePast - Camp.Patch.ContextedPatch - Camp.Patch.Equality - Camp.Patch.InputOutput - Camp.Patch.Invert - Camp.Patch.MegaPatch - Camp.Patch.Merge - Camp.Patch.Name - Camp.Patch.Patch - Camp.Patch.Pretty - Camp.Patch.Primitive - Camp.Patch.Sequence hunk ./camp-bin/camp.cabal 28 - Camp.Types - Camp.Utils hunk ./camp-bin/camp.cabal 33 - Build-Depends: base, directory, filepath, containers, array, - old-time, pretty, bytestring + Build-Depends: array, base, bytestring, camp-core, containers, + directory, filepath, old-time hunk ./camp-bin/camp.cabal 38 - Extensions: GADTs, EmptyDataDecls, - ScopedTypeVariables, - MultiParamTypeClasses, FunctionalDependencies, - FlexibleContexts, FlexibleInstances, - OverlappingInstances, UndecidableInstances, - IncoherentInstances, - CPP, ForeignFunctionInterface + Extensions: CPP, ForeignFunctionInterface, EmptyDataDecls, + ScopedTypeVariables + addfile ./camp-core/LICENSE hunk ./camp-core/LICENSE 1 +Copyright (c) Ian Lynagh, 2008. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. Neither the name of the Authors nor the names of any contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. + addfile ./camp-core/Setup.hs hunk ./camp-core/Setup.hs 1 + +module Main (main) where + +import Distribution.Simple + +main :: IO () +main = defaultMain + addfile ./camp-core/camp-core.cabal hunk ./camp-core/camp-core.cabal 1 +Name: camp-core +Version: 0.1 +License: BSD3 +License-File: LICENSE +Copyright: 2008 Ian Lynagh +Author: Ian Lynagh +Maintainer: Ian Lynagh +Synopsis: Camp +Description: + Camp (Commute And Merge Patches) +Category: Development +Build-Type: Simple +Cabal-Version: >=1.2 + +Library + Exposed-Modules: + Camp.Patch.Anonymous + Camp.Patch.Apply + Camp.Patch.Catch + Camp.Patch.Commute + Camp.Patch.CommutePast + Camp.Patch.ContextedPatch + Camp.Patch.Equality + Camp.Patch.InputOutput + Camp.Patch.Invert + Camp.Patch.MegaPatch + Camp.Patch.Merge + Camp.Patch.Name + Camp.Patch.Patch + Camp.Patch.Pretty + Camp.Patch.Primitive + Camp.Patch.RevSequence + Camp.Patch.Sequence + Camp.Types + Camp.Utils + + Ghc-Options: -Wall -Werror + + Build-Depends: base, bytestring, containers, directory, filepath, pretty + Extensions: GADTs, ScopedTypeVariables, + MultiParamTypeClasses, FunctionalDependencies, + FlexibleContexts, FlexibleInstances, + OverlappingInstances, UndecidableInstances, + IncoherentInstances, + EmptyDataDecls + hunk ./tests/darcs_issue1043_b/run_test.sh 9 -CAMP="${CAMP:-$HERE/../../dist/build/camp/camp}" +CAMP="${CAMP:-$HERE/../../camp-bin/dist/build/camp/camp}" hunk ./tests/file_end_commutes/run_test.sh 8 -CAMP="${CAMP:-$HERE/../../dist/build/camp/camp}" +CAMP="${CAMP:-$HERE/../../camp-bin/dist/build/camp/camp}" hunk ./tests/file_end_commutes2/run_test.sh 9 -CAMP="${CAMP:-$HERE/../../dist/build/camp/camp}" +CAMP="${CAMP:-$HERE/../../camp-bin/dist/build/camp/camp}" hunk ./tests/run_tests.sh 5 -CAMP="${CAMP:-`pwd`/../dist/build/camp/camp}" +CAMP="${CAMP:-`pwd`/../camp-bin/dist/build/camp/camp}" hunk ./tests/simple_merge/run_test.sh 13 -CAMP="${CAMP:-$HERE/../../dist/build/camp/camp}" +CAMP="${CAMP:-$HERE/../../camp-bin/dist/build/camp/camp}" adddir ./camp-repository adddir ./camp-repository/Camp move ./camp-bin/Camp/Repository.hs ./camp-repository/Camp/Repository.hs move ./camp-bin/packages ./camp-repository/packages hunk ./camp-bin/Camp/Command/Interactive.hs 25 -import Data.List.LCS hunk ./camp-bin/Camp/Command/Record.hs 5 +import Camp.Diff hunk ./camp-bin/Camp/Command/Record.hs 15 -import Camp.Types -import Camp.Utils hunk ./camp-bin/Camp/Command/Record.hs 16 -import Data.ByteString.Lazy (ByteString) hunk ./camp-bin/Camp/Command/Record.hs 19 -import Data.List.LCS hunk ./camp-bin/Camp/Command/Record.hs 121 - then recordFile oldE newE + then diffFile oldE newE hunk ./camp-bin/Camp/Command/Record.hs 126 -recordFile :: FilePath -> FilePath -> IO (Seq Primitive from to) -recordFile oldPath newPath = do old <- BS.readFile oldPath - new <- BS.readFile newPath - let oldLines = myLines old - newLines = myLines new - commonLines = lcs oldLines newLines - return $ mkDiff newPath 0 0 commonLines oldLines newLines - -mkDiff :: forall from to . - FilePath -> Bytes -> Line - -> [ByteString] -> [ByteString] -> [ByteString] - -> Seq Primitive from to --- XXX This is all a bit fiddly -mkDiff fp skipBytes skipLines cs os ns - = case (cs, os, ns) of - ([], [], []) -> let resType :: Seq Primitive from to - resType = undefined - in case startIsEnd resType of - IsEqual -> Nil - (common:cs', old:os', new:ns') - | common == old && common == new - -> mkDiff fp (skipBytes + BS.length common + 1{- '\n' -}) - (skipLines + 1) cs' os' ns' - (common:_, _, _) -> - mkHunkThenRest appendNewLines skipBytes (break (common ==) os) - (break (common ==) ns) - ([], _, _) - | skipBytes == 0 -> - mkHunkThenRest intersperseNewLines skipBytes (os, []) (ns, []) - | otherwise -> - -- Here we have to unskip the last '\n' we skipped, as we need - -- to remove it - mkHunkThenRest prefixNewLines (skipBytes - 1) (os, []) (ns, []) - where appendNewLines = map (`BSC.append` BSC.singleton '\n') - intersperseNewLines = intersperse (BSC.singleton '\n') - prefixNewLines = map ('\n' `BSC.cons`) - - mkHunkThenRest _ _ ([], _) ([], _) - = error "XXX mkDiff: Can't happen" - mkHunkThenRest addNewLines skipBytes' (reallyOld, os') ([], _) = - Cons (Hunk fp skipBytes' skipLines - (BS.concat (addNewLines reallyOld)) - (genericLength reallyOld) - BS.empty 0) - (mkDiff fp skipBytes skipLines cs os' ns) - mkHunkThenRest addNewLines skipBytes' ([], _) (reallyNew, ns') = - let newBytes = BS.concat (addNewLines reallyNew) - newLines = genericLength reallyNew - skipBytes'' = skipBytes' + BS.length newBytes - skipLines' = skipLines + newLines - in Cons (Hunk fp skipBytes' skipLines - BS.empty 0 - newBytes newLines) - (mkDiff fp skipBytes'' skipLines' cs os ns') - -- If we have both removals and additions then we don't need - -- to worry about newlines - we leave newlines before and - -- after the hunk, if they were there - mkHunkThenRest _ _ (reallyOld, os') (reallyNew, ns') = - let oldBytes = BS.intercalate (BSC.singleton '\n') reallyOld - oldLines = genericLength reallyOld - newBytes = BS.intercalate (BSC.singleton '\n') reallyNew - newLines = genericLength reallyNew - skipBytes' = skipBytes + BS.length newBytes - skipLines' = skipLines + newLines - in Cons (Hunk fp skipBytes skipLines - oldBytes oldLines - newBytes newLines) - (mkDiff fp skipBytes' skipLines' cs os' ns') - hunk ./camp-bin/LICENSE 29 - -The contents of packages/ may be under different licences. - - hunk ./camp-bin/camp.cabal 27 - Camp.Repository - Hs-Source-Dirs: . hunk ./camp-bin/camp.cabal 31 - Build-Depends: array, base, bytestring, camp-core, containers, - directory, filepath, old-time + Build-Depends: base, bytestring, camp-core, camp-repository, + containers, directory, filepath hunk ./camp-bin/camp.cabal 36 - Extensions: CPP, ForeignFunctionInterface, EmptyDataDecls, - ScopedTypeVariables + Extensions: CPP, ForeignFunctionInterface, ScopedTypeVariables, + EmptyDataDecls hunk ./camp-bin/camp.cabal 42 - -- This is a hack to avoid needing to install the lcs package - -- when building with the GHC HEAD. The in-tree sources are from - -- lcs 0.2 - if flag(have_lcs) - Build-Depends: lcs - else - Hs-Source-Dirs: packages/lcs - Other-Modules: - Data.List.LCS - Data.List.LCS.HuntSzymanski - addfile ./camp-repository/Camp/Diff.hs hunk ./camp-repository/Camp/Diff.hs 1 + +module Camp.Diff (diffFile) where + +import Camp.Patch.Equality +import Camp.Patch.Primitive +import Camp.Patch.Sequence +import Camp.Types +import Camp.Utils + +import Data.ByteString.Lazy (ByteString) +import qualified Data.ByteString.Lazy as BS +import qualified Data.ByteString.Lazy.Char8 as BSC +import Data.List +import Data.List.LCS + +diffFile :: FilePath -> FilePath -> IO (Seq Primitive from to) +diffFile oldPath newPath + = do old <- BS.readFile oldPath + new <- BS.readFile newPath + let oldLines = myLines old + newLines = myLines new + commonLines = lcs oldLines newLines + return $ mkDiff newPath 0 0 commonLines oldLines newLines + +mkDiff :: forall from to . + FilePath -> Bytes -> Line + -> [ByteString] -> [ByteString] -> [ByteString] + -> Seq Primitive from to +-- XXX This is all a bit fiddly +mkDiff fp skipBytes skipLines cs os ns + = case (cs, os, ns) of + ([], [], []) -> let resType :: Seq Primitive from to + resType = undefined + in case startIsEnd resType of + IsEqual -> Nil + (common:cs', old:os', new:ns') + | common == old && common == new + -> mkDiff fp (skipBytes + BS.length common + 1{- '\n' -}) + (skipLines + 1) cs' os' ns' + (common:_, _, _) -> + mkHunkThenRest appendNewLines skipBytes (break (common ==) os) + (break (common ==) ns) + ([], _, _) + | skipBytes == 0 -> + mkHunkThenRest intersperseNewLines skipBytes (os, []) (ns, []) + | otherwise -> + -- Here we have to unskip the last '\n' we skipped, as we need + -- to remove it + mkHunkThenRest prefixNewLines (skipBytes - 1) (os, []) (ns, []) + where appendNewLines = map (`BSC.append` BSC.singleton '\n') + intersperseNewLines = intersperse (BSC.singleton '\n') + prefixNewLines = map ('\n' `BSC.cons`) + + mkHunkThenRest _ _ ([], _) ([], _) + = error "XXX mkDiff: Can't happen" + mkHunkThenRest addNewLines skipBytes' (reallyOld, os') ([], _) = + Cons (Hunk fp skipBytes' skipLines + (BS.concat (addNewLines reallyOld)) + (genericLength reallyOld) + BS.empty 0) + (mkDiff fp skipBytes skipLines cs os' ns) + mkHunkThenRest addNewLines skipBytes' ([], _) (reallyNew, ns') = + let newBytes = BS.concat (addNewLines reallyNew) + newLines = genericLength reallyNew + skipBytes'' = skipBytes' + BS.length newBytes + skipLines' = skipLines + newLines + in Cons (Hunk fp skipBytes' skipLines + BS.empty 0 + newBytes newLines) + (mkDiff fp skipBytes'' skipLines' cs os ns') + -- If we have both removals and additions then we don't need + -- to worry about newlines - we leave newlines before and + -- after the hunk, if they were there + mkHunkThenRest _ _ (reallyOld, os') (reallyNew, ns') = + let oldBytes = BS.intercalate (BSC.singleton '\n') reallyOld + oldLines = genericLength reallyOld + newBytes = BS.intercalate (BSC.singleton '\n') reallyNew + newLines = genericLength reallyNew + skipBytes' = skipBytes + BS.length newBytes + skipLines' = skipLines + newLines + in Cons (Hunk fp skipBytes skipLines + oldBytes oldLines + newBytes newLines) + (mkDiff fp skipBytes' skipLines' cs os' ns') + addfile ./camp-repository/LICENSE hunk ./camp-repository/LICENSE 1 +Copyright (c) Ian Lynagh, 2008. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. Neither the name of the Authors nor the names of any contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. + + + +The contents of packages/ may be under different licences. + addfile ./camp-repository/Setup.hs hunk ./camp-repository/Setup.hs 1 + +module Main (main) where + +import Distribution.Simple + +main :: IO () +main = defaultMain + hunk ./.boring 7 +^camp-core/bindist$ hunk ./.boring 14 +^camp-bin/bindist$ +^camp-repository/dist(/|$) +^camp-repository/Setup$ +^camp-repository/Setup\.hi$ +^camp-repository/Setup\.o$ +^camp-repository/head$ +^camp-repository/stable$ +^camp-repository/bindist$ addfile ./camp-repository/camp-repository.cabal hunk ./camp-repository/camp-repository.cabal 1 +Name: camp-repository +Version: 0.1 +License: BSD3 +License-File: LICENSE +Copyright: 2008 Ian Lynagh +Author: Ian Lynagh +Maintainer: Ian Lynagh +Synopsis: Camp +Description: + Camp (Commute And Merge Patches) +Category: Development +Build-Type: Simple +Cabal-Version: >=1.2 + +Flag have_lcs + Description: Do we have the lcs package? + +Library + Exposed-Modules: + Camp.Diff + Camp.Repository + Hs-Source-Dirs: . + + Ghc-Options: -Wall -Werror + + Build-Depends: base, bytestring, camp-core, directory, filepath, old-time + Extensions: CPP, ScopedTypeVariables + -- Build-Depends: base, bytestring, camp-core, directory, filepath, old-time + -- Extensions: GADTs, ScopedTypeVariables, + -- MultiParamTypeClasses, FunctionalDependencies, + -- FlexibleContexts, FlexibleInstances, + -- OverlappingInstances, UndecidableInstances, + -- IncoherentInstances, + -- EmptyDataDecls + + -- This is a hack to avoid needing to install the lcs package + -- when building with the GHC HEAD. The in-tree sources are from + -- lcs 0.2 + if flag(have_lcs) + Build-Depends: lcs + else + Hs-Source-Dirs: packages/lcs + Build-Depends: array + Other-Modules: + Data.List.LCS + Data.List.LCS.HuntSzymanski + hunk ./camp-core/Camp/Patch/Primitive.hs 36 + Binary :: FilePath + -> ByteString -- Remove these bytes (the whole file). + -> ByteString -- Add these bytes (the whole file). + -> Primitive from to hunk ./camp-core/Camp/Patch/Primitive.hs 84 + 7 -> case input (BS.tail bs) of + (fp, bs1) -> + case input bs1 of + (oldBytes, bs2) -> + case input bs2 of + (newBytes, bs3) -> + (Binary (BSC.unpack fp) oldBytes newBytes, bs3) hunk ./camp-core/Camp/Patch/Primitive.hs 125 + -- Binary + Just (7, _) -> + error "XXX valid/Binary not written yet" hunk ./camp-core/Camp/Patch/Primitive.hs 145 + output (Binary fp oldBytes newBytes) + = 7 `BS.cons` output (BSC.pack fp) + `BS.append` output oldBytes + `BS.append` output newBytes hunk ./camp-core/Camp/Patch/Primitive.hs 172 + ppr (Binary {}) = error "XXX Binary" hunk ./camp-core/Camp/Patch/Primitive.hs 178 + (Binary {}, _) -> Nothing -- XXX far too conservative + (_, Binary {}) -> Nothing -- XXX far too conservative hunk ./camp-core/Camp/Patch/Primitive.hs 270 + invert (Binary fp oldBytes newBytes) = Binary fp newBytes oldBytes hunk ./camp-core/Camp/Patch/Primitive.hs 326 + apply (Binary fp oldBytes newBytes) + = do content <- BS.readFile fp + if oldBytes == content + then BS.writeFile fp newBytes + else error "XXX Old content is wrong" hunk ./camp-repository/Camp/Diff.hs 79 - skipBytes' = skipBytes + BS.length newBytes + skipBytes' = skipBytes + BS.length newBytes + 1{- '\n' -} hunk ./camp-bin/Camp/Command/Record.hs 6 -import Camp.Patch.Catch hunk ./camp-bin/Camp/Command/Record.hs 8 -import Camp.Patch.MegaPatch hunk ./camp-bin/Camp/Command/Record.hs 9 -import Camp.Patch.Patch hunk ./camp-bin/Camp/Command/Record.hs 11 +import Camp.Record hunk ./camp-bin/Camp/Command/Record.hs 16 -import Data.List hunk ./camp-bin/Camp/Command/Record.hs 39 - let patches = mkPatches n 1 primitives' - catches = mkCatches patches - megaPatch = MegaPatch n catches - i <- writeMegaPatch r megaPatch - applyToPristine r megaPatch - is <- readInventory r - writeInventory r (is ++ [i]) - -mkPatches :: Name -> Integer -> Seq Primitive from to -> Seq Patch from to -mkPatches _ _ Nil = Nil -mkPatches n i (Cons p ps) = Primitive (SubName n i) p - `Cons` - mkPatches n (i + 1) ps - -mkCatches :: Seq Patch from to -> Seq Catch from to -mkCatches Nil = Nil -mkCatches (Cons p ps) = Patch p `Cons` mkCatches ps + recordMegaPatch r n primitives' addfile ./camp-repository/Camp/Record.hs hunk ./camp-repository/Camp/Record.hs 1 + +module Camp.Record (recordMegaPatch) where + +import Camp.Patch.Catch +import Camp.Patch.MegaPatch +import Camp.Patch.Name +import Camp.Patch.Patch +import Camp.Patch.Primitive +import Camp.Patch.Sequence +import Camp.Repository + +recordMegaPatch :: Repository -> Name -> Seq Primitive from to -> IO () +recordMegaPatch r n primitives = do + let patches = mkPatches n 1 primitives + catches = mkCatches patches + megaPatch = MegaPatch n catches + i <- writeMegaPatch r megaPatch + applyToPristine r megaPatch + is <- readInventory r + writeInventory r (is ++ [i]) + +mkPatches :: Name -> Integer -> Seq Primitive from to -> Seq Patch from to +mkPatches _ _ Nil = Nil +mkPatches n i (Cons p ps) = Primitive (SubName n i) p + `Cons` + mkPatches n (i + 1) ps + +mkCatches :: Seq Patch from to -> Seq Catch from to +mkCatches Nil = Nil +mkCatches (Cons p ps) = Patch p `Cons` mkCatches ps + hunk ./camp-repository/camp-repository.cabal 21 + Camp.Record hunk ./camp-bin/camp.cabal 29 - Ghc-Options: -Wall -Werror + Ghc-Options: -Wall -fwarn-tabs -Werror hunk ./camp-core/camp-core.cabal 37 - Ghc-Options: -Wall -Werror + Ghc-Options: -Wall -fwarn-tabs -Werror hunk ./camp-repository/camp-repository.cabal 25 - Ghc-Options: -Wall -Werror + Ghc-Options: -Wall -fwarn-tabs -Werror hunk ./camp-core/Camp/Patch/Name.hs 82 - case BSC.uncons bs' of - Just (':', bs'') -> - valid bs'' (undefined :: Integer) - _ -> - Left ("InputOutput SubName Expected colon", bs') + valid bs' (undefined :: Integer) hunk ./camp-core/Camp/Patch/Patch.hs 27 - Right bs' -> valid bs' (undefined :: Patch from to) + Right bs' -> valid bs' (undefined :: Primitive from to) hunk ./camp-core/Camp/Utils.hs 7 + timeCommand, hunk ./camp-core/Camp/Utils.hs 93 +timeCommand :: String -> IO a -> IO a +timeCommand msg io + = do putStr msg + startCPUTime <- getCPUTime + res <- io + endCPUTime <- getCPUTime + putStr $ take (maxLen - length msg) $ repeat '.' + let timeTakenPicoSeconds = endCPUTime - startCPUTime + let timeTakenSeconds = timeTakenPicoSeconds `div` (10 ^ (12 :: Int)) + print timeTakenSeconds + return res + where maxLen = 30 -- XXX Euch + addfile ./camp-bin/Camp/Command/Get.hs hunk ./camp-bin/Camp/Command/Get.hs 1 + +module Camp.Command.Get (get) where + +import Camp.Repository + +-- XXX Should make a directory +get :: Log -> Repository -> [String] -> IO () +get l localRepo [remoteRepoPath] + = do remoteRepo <- mkRepo remoteRepoPath + logRepo l "remote" remoteRepo + -- XXX We leak space for the remote inventory + remoteInventory <- readInventory remoteRepo + patches <- getMegaPatches remoteRepo remoteInventory + initialiseRepo localRepo + (filename, offset) <- putMegaPatches localRepo patches + let relativeInventory = inventoryToCompactRelativeInventory + remoteInventory + localInventory = relativeInventoryToInventory filename offset + relativeInventory + writeInventory localRepo localInventory + -- XXX This reading should actually check that the inventory + -- aligns correctly with the patch text, whereas the default + -- readMegaPatches will just parse the whole sequence. + -- Otherwise we should check at an earlier stage that the + -- inventory correctly points at the patch boundaries. + + -- We read the inventory and patches twice to avoid a space leak + localInventory' <- readInventoryLazily localRepo + patches' <- readMegaPatches localRepo localInventory' + applyToPristine localRepo patches' + localInventory'' <- readInventoryLazily localRepo + patches'' <- readMegaPatches localRepo localInventory'' + applyToWorking localRepo patches'' +get _ _ _ = error "XXX Bad arguments to get" + hunk ./camp-bin/Camp/Main.hs 5 +import Camp.Command.Get hunk ./camp-bin/Camp/Main.hs 31 + "XXX This is out of date", + "", hunk ./camp-bin/Camp/Main.hs 80 + "get" : _ -> createRepo hunk ./camp-bin/Camp/Main.hs 96 + "get" : args' -> get l r args' hunk ./camp-bin/camp.cabal 22 + Camp.Command.Get hunk ./camp-core/Camp/Patch/InputOutput.hs 109 - (xs, bs'') -> (x : xs, bs'') + -- XXX This ~ is just for lazy inventory reading; use a different type? + ~(xs, bs'') -> (x : xs, bs'') hunk ./camp-core/Camp/Patch/MegaPatch.hs 30 + +instance InputOutput2 MegaPatch where + input2 = input + output2 = output + valid2 = valid hunk ./camp-core/Camp/Patch/Sequence.hs 44 - (xs, bs'') -> + -- XXX There's a performance penalty for this ~, + -- but a significant space bonus + ~(xs, bs'') -> addfile ./camp-core/Camp/Patch/Stream.hs hunk ./camp-core/Camp/Patch/Stream.hs 1 + +module Camp.Patch.Stream (Stream(..)) where + +import Camp.Patch.InputOutput +import Camp.Patch.Sequence + +import qualified Data.ByteString.Lazy as BS +import Unsafe.Coerce + +newtype Stream p from to = Stream (Seq p from to) + +instance InputOutput2 p => InputOutput (Stream p from to) where + input bs = (Stream (f bs), BS.empty) + where f bs0 = if BS.null bs0 + then unsafeCoerce Nil + else case input2 bs0 of + (x, bs1) -> + x `Cons` f bs1 + valid bs _ = if BS.null bs + then Right BS.empty + else case valid2 bs (undefined :: p from mid) of + Right bs'' -> + valid bs'' (undefined :: Stream p mid to) + Left err -> Left err + output (Stream Nil) = BS.empty + output (Stream (x `Cons` xs)) = output2 x `BS.append` output (Stream xs) + hunk ./camp-core/Camp/Utils.hs 3 - myLines, myUnlines, mySplitAt, maybeRead, maybeReads, + myLines, myUnlines, mySplitAt, maybeRead, maybeReads, hGetLazily, hunk ./camp-core/Camp/Utils.hs 16 +import System.CPUTime hunk ./camp-core/Camp/Utils.hs 20 +import System.IO.Unsafe hunk ./camp-core/Camp/Utils.hs 108 +-- XXX Need an audit of checking that sizes really are what we asked for +-- XXX We have an Int/Int64 problem here +hGetLazily :: Int -> Handle -> Int -> IO ByteString +hGetLazily _ h 0 = do -- XXX Should we be checking for EOF here? + hClose h + return BS.empty +hGetLazily chunkSize h n + = do chunk <- BS.hGet h (min chunkSize n) + let n' = n - fromIntegral (BS.length chunk) + evaluate n' + chunks <- unsafeInterleaveIO + $ hGetLazily chunkSize h n' + return (chunk `BS.append` chunks) + hunk ./camp-core/camp-core.cabal 34 + Camp.Patch.Stream hunk ./camp-repository/Camp/Repository.hs 9 + readInventoryLazily, hunk ./camp-repository/Camp/Repository.hs 12 + inventoryToCompactRelativeInventory, + relativeInventoryToInventory, hunk ./camp-repository/Camp/Repository.hs 16 + getMegaPatches, + putMegaPatches, hunk ./camp-repository/Camp/Repository.hs 23 - applyToRepo, + applyToRepo, -- XXX space leak hunk ./camp-repository/Camp/Repository.hs 25 + applyToWorking, hunk ./camp-repository/Camp/Repository.hs 36 -import Camp.Patch.Equality hunk ./camp-repository/Camp/Repository.hs 40 +import Camp.Patch.Stream hunk ./camp-repository/Camp/Repository.hs 46 +-- XXX Euch, can we get defaultChunkSize added to the official interface? +-- Or should re define it locally? +import qualified Data.ByteString.Lazy.Internal as BS (defaultChunkSize) hunk ./camp-repository/Camp/Repository.hs 56 -import System.IO +import System.IO hiding (getContents) -- XXX +import System.IO.Unsafe hunk ./camp-repository/Camp/Repository.hs 60 +import Prelude hiding (getContents) -- XXX + hunk ./camp-repository/Camp/Repository.hs 84 +-- XXX Curently this is start/end. Should it be start/length? hunk ./camp-repository/Camp/Repository.hs 87 +data RelativeInventoryItem = RelativeInventoryItem Name Integer Integer + hunk ./camp-repository/Camp/Repository.hs 126 +-- XXX Need a safe variant for remote repos, "camp check", etc +readInventoryLazily :: Repository -> IO [InventoryItem] +readInventoryLazily r + = do content <- BS.readFile (inventoryFile r) + -- XXX check snd == ""? + return (fst $ input content) + hunk ./camp-repository/Camp/Repository.hs 151 --- XXX This should possibly use a variant readMegaPatch that takes the --- handle; we'd have to check that the filename matches +-- XXX Both Integer's should be Bytes +data Content = Content ByteString{- FilePath -} Integer Integer + +inventoryToContents :: [InventoryItem] -> [Content] +inventoryToContents [] = [] +inventoryToContents (InventoryItem _ fp from to : is) + = let f cur [] = [Content fp from cur] + f cur is'@(InventoryItem _ fp' from' to' : is'') + | (fp == fp') && (cur + 1 == from') = f to' is'' + | otherwise = Content fp from cur : inventoryToContents is' + in f to is + +inventoryToCompactRelativeInventory :: [InventoryItem] + -> [RelativeInventoryItem] +inventoryToCompactRelativeInventory = inventoryToCompactRelativeInventoryFrom 0 + +-- XXX Make this local to the above +inventoryToCompactRelativeInventoryFrom :: Integer{- XXX Bytes -} + -> [InventoryItem] + -> [RelativeInventoryItem] +inventoryToCompactRelativeInventoryFrom _ [] = [] +inventoryToCompactRelativeInventoryFrom cur (InventoryItem n _ from to : is) + = let to' = cur + (to - from) + in RelativeInventoryItem n cur to' : + inventoryToCompactRelativeInventoryFrom (to' + 1) is + +relativeInventoryToInventory :: ByteString{- XXX FilePath -} -> Integer{- XXX Bytes -} + -> [RelativeInventoryItem] + -> [InventoryItem] +relativeInventoryToInventory _ _ [] = [] +relativeInventoryToInventory fp offset (RelativeInventoryItem n from to : is) + = InventoryItem n fp (from + offset) (to + offset) + : relativeInventoryToInventory fp offset is + +putMegaPatches :: Repository -> ByteString + -> IO (ByteString{- FilePath -}, Integer{- XXX Bytes -}) +putMegaPatches r s + = do let fp = patchFile r + h <- openBinaryFile fp AppendMode + startSize <- hFileSize h + BS.hPut h s + hClose h + return (BSC.pack fp, startSize) + +getMegaPatches :: Repository -> [InventoryItem] -> IO ByteString +getMegaPatches r is = getContents r (inventoryToContents is) + +getContents :: Repository -> [Content] -> IO ByteString +getContents _ [] = return BS.empty +getContents r (c : cs) = do x <- getContent r c + xs <- unsafeInterleaveIO $ getContents r cs + return (x `BS.append` xs) + +-- XXX This is currently lazy. Really we want control over whether or +-- not it is strict +getContent :: Repository -> Content -> IO ByteString +getContent (Repository r) (Content fp from to) + = do -- putStrLn ("Getting content: XXX") -- XXX Proper logging please + h <- openBinaryFile (r BSC.unpack fp) ReadMode + hSeek h AbsoluteSeek from + content <- hGetLazily BS.defaultChunkSize + h + (fromIntegral (1 + to - from)) + return content +{- + = do -- putStrLn ("Getting content: XXX") -- XXX Proper logging please + h <- openBinaryFile (r BSC.unpack fp) ReadMode + hSeek h AbsoluteSeek from + content <- BS.hGet h (fromIntegral (1 + to - from)) + hClose h + return content +-} + hunk ./camp-repository/Camp/Repository.hs 226 -readMegaPatches _ [] = let resType :: Seq MegaPatch from to - resType = undefined - in case startIsEnd resType of - IsEqual -> return Nil -readMegaPatches r (n : ns) = do p <- readMegaPatch r n - ps <- readMegaPatches r ns - return (p `Cons` ps) +readMegaPatches r ns = do content <- getMegaPatches r ns + case input content of + (Stream s, _{- "" -}) -> return s hunk ./camp-repository/Camp/Repository.hs 230 +-- XXX Is this still used? adddir ./tests/hunk_commute hunk ./camp-core/Camp/Patch/Primitive.hs 251 - Hunk p_fp (p_skipBytes - byteMovement) - (p_skipLines - lineMovement) + Hunk p_fp (p_skipBytes + byteMovement) + (p_skipLines + lineMovement) addfile ./tests/hunk_commute/run_test.sh hunk ./tests/hunk_commute/run_test.sh 1 +#!/bin/sh + +# This test tests the basic functionality, i.e.: +# * Initialising a repo works +# * Adding a file works +# * Recording a patch works +# * Pulling from a repo works +# * Trivial non-conflicting merges work + +set -e + +HERE=`pwd` +CAMP="${CAMP:-$HERE/../../camp-bin/dist/build/camp/camp}" + +cleanup() { + cd "$HERE" + rm -rf a + rm -rf b + rm -rf c +} + +cleanup + +if [ "$1" = "clean-only" ] +then + exit 0 +fi + +if [ "$1" != "no-clean-after" ] +then + trap cleanup EXIT +fi + +# Set up the base repo in "a" +mkdir a +cd a +"$CAMP" init +printf 'line1\nline2\nline3\n' > f +"$CAMP" add f +"$CAMP" record -a base +printf 'line1\nx1\nx2\nline2\nline3\n' > f +"$CAMP" record -a x +printf 'line1\nx1\nx2\nline2\ny1\n2y\ny3\ny4\nline3\n' > f +"$CAMP" record -a y +cd .. + +mkdir b +cd b +"$CAMP" init +"$CAMP" pull ../a P-base +"$CAMP" pull ../a P-y +"$CAMP" pull ../a P-x +cd .. + +mkdir c +cd c +"$CAMP" init +"$CAMP" pull ../b P-base +"$CAMP" pull ../b P-x +"$CAMP" pull ../b P-y +cd .. + +# Now check that the file content is what we expect +diff -u a/f b/f +diff -u a/f c/f + hunk ./camp-bin/Camp/Command/Get.hs 11 - -- XXX We leak space for the remote inventory - remoteInventory <- readInventory remoteRepo + remoteInventory <- readInventoryLazily remoteRepo hunk ./camp-bin/Camp/Command/Get.hs 15 + -- XXX We need to make a local copy rather than reading from + -- the remote twice + remoteInventory' <- readInventoryLazily remoteRepo hunk ./camp-bin/Camp/Command/Get.hs 19 - remoteInventory + remoteInventory' hunk ./camp-bin/Camp/Command/Get.hs 33 + -- XXX Should we just copy the tree instead? hunk ./camp-bin/Camp/Command/Get.hs 33 - -- XXX Should we just copy the tree instead? - localInventory'' <- readInventoryLazily localRepo - patches'' <- readMegaPatches localRepo localInventory'' - applyToWorking localRepo patches'' + + -- Rather than applying all the patches again with + -- applyToWorking, we just copy the result from pristine. + -- This is normally much faster. + copyPristineToWorking localRepo hunk ./camp-repository/Camp/Repository.hs 26 + copyPristineToWorking, hunk ./camp-repository/Camp/Repository.hs 277 +-- XXX Assumes that working is empty (apart from _camp) +copyPristineToWorking :: Repository -> IO () +copyPristineToWorking r = copyTree (pristineDir r) (workingDir r) + hunk ./camp-core/Camp/Utils.hs 4 - readBinaryFile, writeBinaryFile, copyTree, inDir, + readBinaryFile, writeBinaryFile, copyTree, copyTreeToDirectory, inDir, hunk ./camp-core/Camp/Utils.hs 69 - xs <- getDirectoryContents from - mapM_ copyEntry xs + copyTreeToDirectory from to + +copyTreeToDirectory :: FilePath -> FilePath -> IO () +copyTreeToDirectory from to = do xs <- getDirectoryContents from + mapM_ copyEntry xs hunk ./camp-repository/Camp/Repository.hs 279 -copyPristineToWorking r = copyTree (pristineDir r) (workingDir r) +copyPristineToWorking r = copyTreeToDirectory (pristineDir r) (workingDir r) hunk ./camp-repository/Camp/Diff.hs 15 +import System.FilePath hunk ./camp-repository/Camp/Diff.hs 24 - return $ mkDiff newPath 0 0 commonLines oldLines newLines + -- XXX We should actually normalise consistently everywhere, + -- and think about what normalisation actually means + newPath' = normalise newPath + return $ mkDiff newPath' 0 0 commonLines oldLines newLines adddir ./camp-view adddir ./camp-view/Camp adddir ./camp-view/Camp/View adddir ./camp-view/Camp/View/Draw addfile ./camp-view/Camp/View.hs hunk ./camp-view/Camp/View.hs 1 + +{- +XXX This program is just a quick hack. It can probably be written much +more efficiently, and much more nicely +-} + +module Main (main) where + +import Camp.Patch.Name +import Camp.Repository + +import Camp.View.Deps +import qualified Camp.View.Draw.Text as Text +import qualified Camp.View.Draw.PIL as PIL +import Camp.View.Types + +import System.Environment + +main :: IO () +main = do args <- getArgs + case args of + [] -> doit PIL.draw + ["--pil"] -> doit PIL.draw + ["--text"] -> doit Text.draw + _ -> error "XXX Bad args" + +doit :: (Mapping Name DepInfo -> IO ()) -> IO () +doit drawFun + = do repo <- getRepo + inventory <- readInventory repo + patches <- readMegaPatches repo inventory + drawFun $ findMarkedDeps patches + addfile ./camp-view/Camp/View/Deps.hs hunk ./camp-view/Camp/View/Deps.hs 1 + +{- +XXX This program is just a quick hack. It can probably be written much +more efficiently, and much more nicely +-} + +module Camp.View.Deps (findMarkedDeps) where + +import Camp.Patch.Commute +import Camp.Patch.MegaPatch +import Camp.Patch.Name +import Camp.Patch.Sequence +import Camp.Patch.RevSequence + +import Camp.View.Types + +import Data.List +import qualified Data.Set as Set + +-- XXX Lots of reverse... +findMarkedDeps :: Seq MegaPatch from to -> Mapping Name DepInfo +findMarkedDeps = reverse . markLastDeps . reverse . findAllDeps + +findAllDeps :: Seq MegaPatch from to -> Mapping Name [Name] +findAllDeps = f NilRevSeq + where f :: RevSeq MegaPatch from mid -> Seq MegaPatch mid to + -> Mapping Name [Name] + f _ Nil = [] + f past (p `Cons` ps) = (name p, findDeps past p) + : f (past `Snoc` p) ps + +findDeps :: RevSeq MegaPatch from mid -> MegaPatch mid to -> [Name] +findDeps NilRevSeq _ = [] +findDeps (ps `Snoc` p) me = case commute (p `Then` me) of + Just (me' `Then` _) -> findDeps ps me' + Nothing -> + case commuteOut ps p of + HiddenFrom ps' -> + name p : findDeps ps' me + +-- XXX This should be in core +data HiddenFrom p to + where HiddenFrom :: p from to -> HiddenFrom p to + +commuteOut :: RevSeq MegaPatch from mid -> MegaPatch mid to + -> HiddenFrom (RevSeq MegaPatch) to +commuteOut NilRevSeq _ = HiddenFrom NilRevSeq +commuteOut (ps `Snoc` p) me = case commute (p `Then` me) of + Just (me' `Then` p') -> + case commuteOut ps me' of + HiddenFrom ps' -> + HiddenFrom (ps' `Snoc` p') + Nothing -> + case commuteOut ps p of + HiddenFrom ps' -> + commuteOut ps' me + +markLastDeps :: Mapping Name [Name] + -> Mapping Name DepInfo +markLastDeps = f Set.empty + where f _ [] = [] + f seenNames ((patchName, depNames) : xs) + = let allNames = patchName : depNames + noRevDeps = patchName `Set.notMember` seenNames + depNames' = [ (n, n `Set.notMember` seenNames) + | n <- depNames ] + seenNames' = foldl' (flip Set.insert) seenNames allNames + in (patchName, (noRevDeps, depNames')) : f seenNames' xs + addfile ./camp-view/Camp/View/Draw/PIL.hs hunk ./camp-view/Camp/View/Draw/PIL.hs 1 + +{- +XXX This program is just a quick hack. It can probably be written much +more efficiently, and much more nicely +-} + +module Camp.View.Draw.PIL (draw) where + +import Camp.Patch.Name +import Camp.Patch.Pretty + +import Camp.View.Types + +import Control.Monad +import System.Cmd +import System.Exit +import System.IO + +draw :: Mapping Name DepInfo -> IO () +draw m = do h <- openFile "camp-view.py" WriteMode + hPutStrLn h "#!/usr/bin/python" + hPutStrLn h "" + hPutStrLn h "import Image, ImageFont, ImageDraw" + hPutStrLn h "" + hPutStrLn h "image = Image.new('RGB', (200, 200), '#FFFFFF')" + hPutStrLn h "draw = ImageDraw.Draw(image)" + hPutStrLn h "" + draw' h 0 [] m + hPutStrLn h "" + hPutStrLn h "image.save('camp-view.png', 'PNG')" + hClose h + rPython <- rawSystem "python" ["camp-view.py"] + case rPython of + ExitSuccess -> + do rQiv <- rawSystem "qiv" ["camp-view.png"] + case rQiv of + ExitSuccess -> + return () + f -> + error ("Failed to run qiv: " ++ show f) + f -> + error ("Failed to run python: " ++ show f) + +draw' :: Handle -> Int -> [(Name, Int)] -> Mapping Name DepInfo -> IO () +draw' _ _ [] [] = return () +draw' _ _ _ [] = error "Can't happen: draw': Dangling lines" +draw' h row cols0 ((patchName, (noRevDeps, deps)) : ms) + = do let cols1 = dropDeadCols cols0 deps + cols2 = addColumn patchName cols1 + cols3 = if noRevDeps then cols1 else cols2 + patchCol = case lookup patchName cols2 of + Just col -> col + Nothing -> error "Can't happen: draw': No patchName" + depCols = depColumns deps cols0 + horizBarStart = minimum (patchCol : depCols) + horizBarStop = maximum (patchCol : depCols) + printDeps h row cols0 deps + printHorizBar h row horizBarStart horizBarStop + printPatch h row cols2 patchName + draw' h (row + 1) cols3 ms + +-- XXX Rewrite with filter +dropDeadCols :: [(Name, Int)] -> [(Name, Bool)] -> [(Name, Int)] +dropDeadCols [] _ = [] +dropDeadCols (col@(n, _) : cols) deps = case lookup n deps of + Just True -> cols' + _ -> col : cols' + where cols' = dropDeadCols cols deps + +depColumns :: [(Name, Bool)] -> [(Name, Int)] -> [Int] +depColumns deps = f + where depNames = map fst deps + f [] = [] + f ((n, i) : xs) + = if n `elem` depNames then i : f xs + else f xs + +printDeps :: Handle -> Int -> [(Name, Int)] -> [(Name, Bool)] -> IO () +printDeps h row cols deps + = f cols + where depNames = map fst deps + f [] = return () + f ((n, i) : xs) + = do let x = indent + cellWidth * i + (cellWidth `div` 2) + yTop = row * rowHeight + yMiddle = yTop + (interCellHeight `div` 2) + yBottom = case lookup n deps of + Just True -> yTop + (interCellHeight `div` 2) + _ -> yTop + interCellHeight + radius = 2 + ellipseBB = [x - radius, yMiddle - radius, + x + radius, yMiddle + radius] + hPutStrLn h ("draw.line([" ++ + show (x, yTop) ++ ", " ++ + show (x, yBottom) ++ + "], fill='#000000', width=1)") + when (n `elem` depNames) $ + hPutStrLn h ("draw.ellipse(" ++ + show ellipseBB ++ + ", fill='#000000')") + f xs + +printHorizBar :: Handle -> Int -> Int -> Int -> IO () +printHorizBar h row horizBarStart horizBarStop + = do let xLeft = indent + cellWidth * horizBarStart + (cellWidth `div` 2) + xRight = indent + cellWidth * horizBarStop + (cellWidth `div` 2) + y = row * rowHeight + (interCellHeight `div` 2) + hPutStrLn h ("draw.line([" ++ + show (xLeft, y) ++ ", " ++ + show (xRight, y) ++ + "], fill='#000000', width=1)") + +printPatch :: Handle -> Int -> [(Name, Int)] -> Name -> IO () +printPatch h row cols patchName + = do let x = 5 + hPutStrLn h ("draw.text(" ++ show (x, yTop) ++ ", \"" ++ + pprint patchName ++ "\", fill='#000000')") + f cols + where yVeryTop = yTop - (interCellHeight `div` 2) + yTop = row * rowHeight + interCellHeight + yBottom = yTop + cellHeight + f [] = return () + f ((n, i) : xs) + = do let xMiddle = indent + cellWidth * i + (cellWidth `div` 2) + -- height, not width, as we want a circle + radius = cellHeight `div` 2 + xLeft = xMiddle - radius + xRight = xMiddle + radius + if n == patchName + then do hPutStrLn h ("draw.line([" ++ + show (xMiddle, yVeryTop) ++ ", " ++ + show (xMiddle, yTop) ++ + "], fill='#000000', width=1)") + hPutStrLn h ("draw.ellipse(" ++ + show [xLeft, yTop, + xRight, yBottom] ++ + ", outline='#000000')") + else hPutStrLn h ("draw.line([" ++ + show (xMiddle, yTop) ++ ", " ++ + show (xMiddle, yBottom) ++ + "], fill='#000000', width=1)") + f xs + +addColumn :: Name -> [(Name, Int)] -> [(Name, Int)] +addColumn patchName = f 0 + where f i [] = [(patchName, i)] + f i xs@(x@(_, j) : xs') + | i == j = x : f (i + 1) xs' + | otherwise = (patchName, i) : xs + +rowHeight :: Int +rowHeight = cellHeight + interCellHeight + +interCellHeight :: Int +interCellHeight = 15 + +cellHeight :: Int +cellHeight = 10 + +cellWidth :: Int +cellWidth = 30 + +indent :: Int +indent = 120 + addfile ./camp-view/Camp/View/Draw/Text.hs hunk ./camp-view/Camp/View/Draw/Text.hs 1 + +{- +XXX This program is just a quick hack. It can probably be written much +more efficiently, and much more nicely +-} + +module Camp.View.Draw.Text (draw) where + +import Camp.Patch.Name +import Camp.Patch.Pretty + +import Camp.View.Types + +draw :: Mapping Name DepInfo -> IO () +draw m = do let maxNameLength = maximum $ map (length . pprint . fst) m + draw' (maxNameLength + 2) [] m + +colWidth :: Int +colWidth = 4 + +draw' :: Int -> [(Name, Int)] -> Mapping Name DepInfo -> IO () +draw' _ [] [] = return () +draw' _ _ [] = error "Can't happen: draw': Dangling lines" +draw' indent cols0 ((patchName, (noRevDeps, deps)) : ms) + = do let cols1 = dropDeadCols cols0 deps + cols2 = addColumn patchName cols1 + cols3 = if noRevDeps then cols1 else cols2 + patchCol = case lookup patchName cols2 of + Just col -> col + Nothing -> error "Can't happen: draw': No patchName" + depCols = depColumns deps cols0 + horizBarStart = minimum (patchCol : depCols) + horizBarStop = maximum (patchCol : depCols) + printDeps indent cols0 horizBarStart horizBarStop patchCol deps + printPatch indent cols2 patchName + draw' indent cols3 ms + +-- XXX Rewrite with filter +dropDeadCols :: [(Name, Int)] -> [(Name, Bool)] -> [(Name, Int)] +dropDeadCols [] _ = [] +dropDeadCols (col@(n, _) : cols) deps = case lookup n deps of + Just True -> cols' + _ -> col : cols' + where cols' = dropDeadCols cols deps + +depColumns :: [(Name, Bool)] -> [(Name, Int)] -> [Int] +depColumns deps = f + where depNames = map fst deps + f [] = [] + f ((n, i) : xs) + = if n `elem` depNames then i : f xs + else f xs + +printDeps :: Int -> [(Name, Int)] -> Int -> Int -> Int -> [(Name, Bool)] + -> IO () +printDeps indent cols hStart hStop patchCol deps + = do putStr (replicate indent ' ') + f 0 cols + where depNames = map fst deps + f offset [] = if offset < colWidth * hStop + then let shift = colWidth * hStop - offset + in do putStr $ replicate shift '-' + putChar '+' + putStrLn "" + else putStrLn "" + f offset ((n, i) : xs) + | (offset < colWidth * hStart) && (hStart < i) + = do let shift = colWidth * hStart - offset + putStr $ replicate shift ' ' + f (colWidth * hStart) ((n, i) : xs) + | (offset < colWidth * hStop) && (hStop < i) + = do let shift = colWidth * hStop - offset + putStr $ replicate shift '-' + f (colWidth * hStop) ((n, i) : xs) + | otherwise + = do let shift = colWidth * i - offset + shiftChar = if (colWidth * hStart <= offset) && + (colWidth * hStop >= offset) + then '-' + else ' ' + putStr $ replicate shift shiftChar + let myChar = case (n `elem` depNames, i == patchCol) of + (True, True) -> '|' + (True, False) -> '*' + (False, True) -> '+' + (False, False) -> '|' + putChar myChar + f (offset + shift + 1) xs + +printPatch :: Int -> [(Name, Int)] -> Name -> IO () +printPatch indent cols patchName = do putStr $ pad indent $ pprint patchName + f 0 cols + where f _ [] = putStrLn "" + f offset ((n, i) : xs) = do let shift = colWidth * i - offset + putStr $ replicate shift ' ' + if n == patchName + then putChar 'O' + else putChar '|' + f (offset + shift + 1) xs + +-- XXX Should be in a Utils module +pad :: Int -> String -> String +pad i str = str ++ replicate (i - length str) ' ' + +addColumn :: Name -> [(Name, Int)] -> [(Name, Int)] +addColumn patchName = f 0 + where f i [] = [(patchName, i)] + f i xs@(x@(_, j) : xs') + | i == j = x : f (i + 1) xs' + | otherwise = (patchName, i) : xs + addfile ./camp-view/Camp/View/Types.hs hunk ./camp-view/Camp/View/Types.hs 1 + +module Camp.View.Types (Mapping, DepInfo) where + +import Camp.Patch.Name + +type Mapping from to = [(from, to)] + +type DepInfo = (Bool, -- Does no-one dep on me? + [(Name, + Bool)]) -- Am I the last to dep on this? + addfile ./camp-view/LICENSE hunk ./camp-view/LICENSE 1 +Copyright (c) Ian Lynagh, 2008. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. Neither the name of the Authors nor the names of any contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. + addfile ./camp-view/Setup.hs hunk ./camp-view/Setup.hs 1 + +module Main (main) where + +import Distribution.Simple + +main :: IO () +main = defaultMain + addfile ./camp-view/camp-view.cabal hunk ./camp-view/camp-view.cabal 1 +Name: camp-view +Version: 0.1 +License: BSD3 +License-File: LICENSE +Copyright: 2008 Ian Lynagh +Author: Ian Lynagh +Maintainer: Ian Lynagh +Synopsis: Camp View +Description: + Camp (Commute And Merge Patches) repo viewer. + XXX This program is just a quick hack. It can probably be + written much more efficiently, and much more nicely. +Category: Development +Build-Type: Simple +Cabal-Version: >=1.2 + +Executable camp-view + Main-Is: Camp/View.hs + Other-Modules: Camp.View.Deps + Camp.View.Draw.PIL + Camp.View.Draw.Text + Camp.View.Types + + Ghc-Options: -Wall -fwarn-tabs -Werror + + Build-Depends: base, bytestring, camp-core, camp-repository, + containers, directory, filepath, pretty, process + + Extensions: CPP, ForeignFunctionInterface, ScopedTypeVariables, + EmptyDataDecls, GADTs + + if impl(ghc < 6.9) + Extensions: PatternSignatures + hunk ./camp-bin/Camp/Command/Record.hs 8 +import Camp.Patch.MegaPatch hunk ./camp-bin/Camp/Command/Record.hs 15 +import Data.ByteString.Lazy (ByteString) hunk ./camp-bin/Camp/Command/Record.hs 20 +import System.IO +import System.Time hunk ./camp-bin/Camp/Command/Record.hs 43 - recordMegaPatch r n primitives' + maybeAuthor <- getAuthor r + author <- case maybeAuthor of + Just x -> return x + Nothing -> + error "XXX No author found in _camp/repo/prefs/author" + (short, long) <- getDescription + time <- getClockTime + let mi = MetaInfo short long author time + recordMegaPatch r n mi primitives' + +getDescription :: IO (ByteString, ByteString) +getDescription = do putStr "Enter short description: " + hFlush stdout + line <- getLine + -- XXX long is always empty for now + return (BSC.pack line, BS.empty) hunk ./camp-bin/camp.cabal 33 - containers, directory, filepath + containers, directory, filepath, old-time hunk ./camp-core/Camp/Patch/InputOutput.hs 10 +import System.Time hunk ./camp-core/Camp/Patch/InputOutput.hs 104 +instance InputOutput ClockTime where + input bs0 = case input bs0 of + (i, bs1) -> + case input bs1 of + (j, bs2) -> + (TOD i j, bs2) + valid bs0 _ = case valid bs0 (undefined :: Integer) of + Left err -> Left err + Right bs1 -> + valid bs1 (undefined :: Integer) + output (TOD i j) = output i `BS.append` output j + hunk ./camp-core/Camp/Patch/MegaPatch.hs 2 -module Camp.Patch.MegaPatch (MegaPatch(..), commuteToPrefix) where +-- XXX Work around GHC warning bugs: +{-# OPTIONS_GHC -fno-warn-unused-imports #-} + +module Camp.Patch.MegaPatch ( + MetaInfo(..), MegaPatch(..), commuteToPrefix + ) + where hunk ./camp-core/Camp/Patch/MegaPatch.hs 20 +import Data.ByteString.Lazy (ByteString) hunk ./camp-core/Camp/Patch/MegaPatch.hs 22 +import qualified Data.ByteString.Lazy.Char8 as BSC +import System.Time hunk ./camp-core/Camp/Patch/MegaPatch.hs 27 - MegaPatch :: Name -> Seq Catch from to -> MegaPatch from to + MegaPatch :: Name -> MetaInfo -> Seq Catch from to -> MegaPatch from to hunk ./camp-core/Camp/Patch/MegaPatch.hs 30 - input bs = case input bs of - (n, bs') -> - case input bs' of - (cs, bs'') -> - (MegaPatch n cs, bs'') - valid bs _ = case valid bs (undefined :: Name) of - Left err -> Left err - Right bs' -> valid bs' (undefined :: Seq Catch from to) - output (MegaPatch n cs) = output n `BS.append` output cs + input bs0 = case input bs0 of + (n, bs1) -> + case input bs1 of + (mi, bs2) -> + case input bs2 of + (cs, bs3) -> + (MegaPatch n mi cs, bs3) + valid bs0 _ = case valid bs0 (undefined :: Name) of + Left err -> Left err + Right bs1 -> + case valid bs1 (undefined :: MetaInfo) of + Left err -> Left err + Right bs2 -> valid bs2 (undefined :: Seq Catch from to) + output (MegaPatch n mi cs) = output n `BS.append` + output mi `BS.append` + output cs hunk ./camp-core/Camp/Patch/MegaPatch.hs 53 - ppr (MegaPatch n p) = text "MegaPatch" <+> pprAtomic n - $$ nest 4 (pprAtomic p) + ppr (MegaPatch n mi p) = text "MegaPatch" <+> pprAtomic n + $$ nest 4 (ppr mi) + $$ nest 4 (pprAtomic p) hunk ./camp-core/Camp/Patch/MegaPatch.hs 58 - name (MegaPatch n _) = n + name (MegaPatch n _ _) = n hunk ./camp-core/Camp/Patch/MegaPatch.hs 65 - commute (MegaPatch np p `Then` MegaPatch nq q) + commute (MegaPatch np mip p `Then` MegaPatch nq miq q) hunk ./camp-core/Camp/Patch/MegaPatch.hs 67 - return (MegaPatch nq q' `Then` MegaPatch np p') + return (MegaPatch nq miq q' `Then` MegaPatch np mip p') hunk ./camp-core/Camp/Patch/MegaPatch.hs 70 - invert (MegaPatch n p) = MegaPatch (inverseName n) (invert p) + invert (MegaPatch n mi p) = MegaPatch (inverseName n) mi (invert p) hunk ./camp-core/Camp/Patch/MegaPatch.hs 73 - apply (MegaPatch _ p) = apply p + apply (MegaPatch _ _ p) = apply p + + + +data MetaInfo = MetaInfo ByteString -- Short description + ByteString -- Long description + ByteString -- Author + ClockTime -- Date of recording. XXX from old-time + +instance Ppr MetaInfo where + ppr (MetaInfo short _long author date) + = (text $ show date) <> text " " <> (text $ show $ BSC.unpack author) + $$ text " * " <> (text $ show $ BSC.unpack short) + +instance InputOutput MetaInfo where + input bs0 = case input bs0 of + (short, bs1) -> + case input bs1 of + (long, bs2) -> + case input bs2 of + (author, bs3) -> + case input bs3 of + (date, bs4) -> + (MetaInfo short long author date, bs4) + valid bs0 _ = case valid bs0 (undefined :: ByteString) of + Left err -> Left err + Right bs1 -> + case valid bs1 (undefined :: ByteString) of + Left err -> Left err + Right bs2 -> + case valid bs2 (undefined :: ByteString) of + Left err -> Left err + Right bs3 -> valid bs3 (undefined :: ClockTime) + output (MetaInfo short long author date) + = output short `BS.append` output long `BS.append` + output author `BS.append` output date hunk ./camp-core/Camp/Patch/Merge.hs 103 - merge (Fork (MegaPatch _ ps) (MegaPatch nq qs)) + merge (Fork (MegaPatch _ _ ps) (MegaPatch nq miq qs)) hunk ./camp-core/Camp/Patch/Merge.hs 106 - Anonymous1 (MegaPatch nq qs') + Anonymous1 (MegaPatch nq miq qs') hunk ./camp-core/camp-core.cabal 40 - Build-Depends: base, bytestring, containers, directory, filepath, pretty + Build-Depends: base, bytestring, containers, directory, filepath, + old-time, pretty hunk ./camp-repository/Camp/Record.hs 12 -recordMegaPatch :: Repository -> Name -> Seq Primitive from to -> IO () -recordMegaPatch r n primitives = do +recordMegaPatch :: Repository -> Name -> MetaInfo -> Seq Primitive from to + -> IO () +recordMegaPatch r n mi primitives = do hunk ./camp-repository/Camp/Record.hs 17 - megaPatch = MegaPatch n catches + megaPatch = MegaPatch n mi catches hunk ./camp-repository/Camp/Repository.hs 28 + getAuthor, hunk ./camp-repository/Camp/Repository.hs 79 +prefsDir :: Repository -> FilePath +prefsDir r = repoRoot r "prefs" + +authorFile :: Repository -> FilePath +authorFile r = prefsDir r "author" + hunk ./camp-repository/Camp/Repository.hs 264 -writeMegaPatch r m@(MegaPatch n _) +writeMegaPatch r m@(MegaPatch n _ _) hunk ./camp-repository/Camp/Repository.hs 340 +getAuthor :: Repository -> IO (Maybe ByteString) +getAuthor r = do m1 <- maybeReadFile $ authorFile r + case m1 of + Nothing -> + do globalCampDir <- getAppUserDataDirectory "camp" + maybeReadFile (globalCampDir "author") + j -> return j + +-- XXX This should be in a Utils module +-- XXX Should really do this by catching exceptions +maybeReadFile :: FilePath -> IO (Maybe ByteString) +maybeReadFile fp = do exists <- doesFileExist fp + if exists then do bs <- BS.readFile fp + return (Just bs) + else return Nothing + hunk ./camp-bin/Camp/Command/Interactive.hs 33 -interactive :: Seq Primitive from to - -> IO (Then (Seq Primitive) (Seq Primitive) from to) -interactive ps = do +interactive :: Commute p p + => (forall x y . p x y -> String) -> Seq p from to + -> IO (Then (Seq p) (Seq p) from to) +interactive printer ps = do hunk ./camp-bin/Camp/Command/Interactive.hs 50 - select NilRevSeq ps Nil + select printer NilRevSeq NilRevSeq ps hunk ./camp-bin/Camp/Command/Interactive.hs 61 -select :: RevSeq Primitive from x -- chosen, in reverse order - -> Seq Primitive x y -- undecided - -> Seq Primitive y to -- rejected - -> IO (Then (Seq Primitive) (Seq Primitive) from to) -select revChosen Nil rejected = return (toSeq revChosen `Then` rejected) -select revChosen (p `Cons` ps) rejected - = do putStrLn (pprint p) - putStr "Want this patch? " - c <- getChar - case c of - 'y' -> select (revChosen `Snoc` p) ps rejected - 'n' -> case commute (p `Then` ps) of - Just (ps' `Then` p') -> - select revChosen ps' (p' `Cons` rejected) - Nothing -> - error "XXX Interactive commute fail" - 'q' -> exitWith ExitSuccess - _ -> select revChosen (p `Cons` ps) rejected +select :: Commute p p + => (forall c1 c2 . p c1 c2 -> String) + -> RevSeq p from x -- chosen, in reverse order + -> RevSeq p x y -- rejected + -> Seq p y to -- undecided + -> IO (Then (Seq p) (Seq p) from to) +select _ chosen rejected Nil + = return (toSeq chosen `Then` toSeq rejected) +select printer chosen rejected (p `Cons` ps) + = do putStrLn $ printer p + case commute (rejected `Then` p) of + Nothing -> + do putStrLn "Skipping due to dependencies" + select printer chosen (rejected `Snoc` p) ps + Just (p' `Then` rejected') -> + do putStr "Want this patch? " + c <- getChar + putStrLn "" + case c of + 'y' -> select printer (chosen `Snoc` p') rejected' ps + 'n' -> select printer chosen (rejected `Snoc` p) ps + 'q' -> exitWith ExitSuccess + _ -> select printer chosen rejected (p `Cons` ps) hunk ./camp-bin/Camp/Command/Pull.hs 4 +import Camp.Command.Interactive hunk ./camp-bin/Camp/Command/Pull.hs 11 +import Camp.Patch.Pretty hunk ./camp-bin/Camp/Command/Pull.hs 47 - doPull localRepo localInventory localPatches remotePatches + do res <- interactive describeMegaPatch remotePatches + case res of + remotePatches' `Then` _ -> + doPull localRepo localInventory localPatches remotePatches' hunk ./camp-bin/Camp/Command/Pull.hs 63 +describeMegaPatch :: MegaPatch from to -> String +describeMegaPatch (MegaPatch _ mi _) = pprint mi + hunk ./camp-bin/Camp/Command/Record.hs 10 +import Camp.Patch.Pretty hunk ./camp-bin/Camp/Command/Record.hs 40 - then do wanted `Then` _ <- interactive primitives + then do wanted `Then` _ <- interactive pprint primitives hunk ./camp-bin/camp.cabal 38 - EmptyDataDecls + EmptyDataDecls, Rank2Types, FlexibleContexts hunk ./camp-core/Camp/Patch/RevSequence.hs 4 +import Camp.Patch.Commute hunk ./camp-core/Camp/Patch/RevSequence.hs 20 +commutePastRevSequence :: Commute p q + => Then p (RevSeq q) from to + -> Maybe (Then (RevSeq q) p from to) +commutePastRevSequence (p `Then` NilRevSeq) = Just (NilRevSeq `Then` p) +commutePastRevSequence (p `Then` Snoc qs q) + = do (qs' `Then` p') <- commutePastRevSequence (p `Then` qs) + (q' `Then` p'') <- commute (p' `Then` q) + return (Snoc qs' q' `Then` p'') + +commuteRevSequencePast :: Commute p q + => Then (RevSeq p) q from to + -> Maybe (Then q (RevSeq p) from to) +commuteRevSequencePast (NilRevSeq `Then` q) = Just (q `Then` NilRevSeq) +commuteRevSequencePast (Snoc ps p `Then` q) + = do (q' `Then` p') <- commute (p `Then` q) + (q'' `Then` ps') <- commuteRevSequencePast (ps `Then` q') + return (q'' `Then` Snoc ps' p') + +instance Commute p q => Commute p (RevSeq q) where + commute = commutePastRevSequence + +instance Commute p q => Commute (RevSeq p) q where + commute = commuteRevSequencePast + +-- We need to provide this redundant instance or GHC won't know which +-- of the previous two instances to use for +-- Commute (RevSeq Patch) (RevSeq Patch) +instance Commute p q => Commute (RevSeq p) (RevSeq q) where + -- This can use either commutePastRevSequence or commuteRevSequencePast + commute = commuteRevSequencePast + hunk ./camp-bin/Camp/Command/Record.hs 27 - recordName r n True + recordName r n Nothing True hunk ./camp-bin/Camp/Command/Record.hs 29 - recordName r n False -record _ r [n] = recordName r (Name Positive (BSC.pack n)) True -record _ r ["-a", n] = recordName r (Name Positive (BSC.pack n)) False + recordName r n Nothing False +record _ r ["-a", "-m", short] = do n <- genName r + recordName r n (Just short) False +record _ r [n] = recordName r (Name Positive (BSC.pack n)) Nothing True +record _ r ["-a", n] = recordName r (Name Positive (BSC.pack n)) Nothing False hunk ./camp-bin/Camp/Command/Record.hs 36 -recordName :: Repository -> Name -> Bool -> IO () -recordName r n chooseInteractively = do +recordName :: Repository -> Name -> Maybe String -> Bool -> IO () +recordName r n m chooseInteractively = do hunk ./camp-bin/Camp/Command/Record.hs 51 - (short, long) <- getDescription + (short, long) <- case m of + Just x -> return (BSC.pack x, BS.empty) + Nothing -> getDescription hunk ./camp-core/Camp/Patch/MegaPatch.hs 84 - = (text $ show date) <> text " " <> (text $ show $ BSC.unpack author) - $$ text " * " <> (text $ show $ BSC.unpack short) + = (text $ show date) <> text " " <> (text $ BSC.unpack author) + $$ text " * " <> (text $ BSC.unpack short) hunk ./camp-repository/Camp/Repository.hs 341 -getAuthor r = do m1 <- maybeReadFile $ authorFile r +getAuthor r = do m1 <- maybeReadLine $ authorFile r hunk ./camp-repository/Camp/Repository.hs 345 - maybeReadFile (globalCampDir "author") + maybeReadLine (globalCampDir "author") hunk ./camp-repository/Camp/Repository.hs 350 -maybeReadFile :: FilePath -> IO (Maybe ByteString) -maybeReadFile fp = do exists <- doesFileExist fp - if exists then do bs <- BS.readFile fp - return (Just bs) +maybeReadLine :: FilePath -> IO (Maybe ByteString) +maybeReadLine fp = do exists <- doesFileExist fp + if exists then do h <- openFile fp ReadMode + s <- hGetLine h + hClose h + return $ Just $ BSC.pack s hunk ./camp-bin/Camp/Command/Interactive.hs 73 - do putStrLn "Skipping due to dependencies" + do putStrLn $ yellow "Skipping due to dependencies" hunk ./camp-bin/Camp/Command/Interactive.hs 76 - do putStr "Want this patch? " + do putStr $ green "Want this patch? " + putStr "[ynq] " hunk ./camp-bin/Camp/Command/Interactive.hs 86 +-- XXX For now we just assume that ANSI escape sequences will work +yellow :: String -> String +yellow xs = "\ESC[0;33m" ++ xs ++ "\ESC[0m" + +green :: String -> String +green xs = "\ESC[0;32m" ++ xs ++ "\ESC[0m" + hunk ./camp-view/Camp/View/Draw/PIL.hs 137 - ", outline='#000000')") + ", fill='#0000FF')") hunk ./camp-core/Camp/Patch/Primitive.hs 276 - apply (AddDir fp) = do fileExists <- doesFileExist fp - directoryExists <- doesDirectoryExist fp - if fileExists || directoryExists - then error ("Already exists: " ++ show fp) - else createDirectory fp - apply (RmDir fp) = do -- XXX Should check for emptiness - removeDirectory fp - apply (MvDir from to) - = do fromDirectoryExists <- doesDirectoryExist from - toFileExists <- doesFileExist to - toDirectoryExists <- doesDirectoryExist to - if fromDirectoryExists - then if toFileExists || toDirectoryExists - then error ("Already exists: " ++ show to) - else renameDirectory from to - else error ("Not a directory: " ++ show from) - apply (AddFile fp) = do fileExists <- doesFileExist fp - directoryExists <- doesDirectoryExist fp - if fileExists || directoryExists - then error ("Already exists: " ++ show fp) - else withBinaryFile fp WriteMode $ \_ -> - return () - apply (RmFile fp) = do size <- withBinaryFile fp ReadMode hFileSize - if size /= 0 - then error ("Not empty: " ++ show fp) - else removeFile fp - apply (MvFile from to) - = do fromFileExists <- doesFileExist from - toFileExists <- doesFileExist to - toDirectoryExists <- doesDirectoryExist to - if fromFileExists - then if toFileExists || toDirectoryExists - then error ("Already exists: " ++ show to) - else renameFile from to - else error ("Not a file: " ++ show from) - apply (Hunk fp skipBytes _ oldBytes _ newBytes _) - -- XXX This should check that the file is big enough etc - = do content <- BS.readFile fp - case BS.splitAt skipBytes content of - -- XXX Should check length of skipped - (skipped, rest) -> - case BS.splitAt (BS.length oldBytes) rest of - (_old, rest') -> - -- XXX sanity check oldBytes == old - do let content' = skipped `BS.append` newBytes - `BS.append` rest' - evaluate $ BS.length content -- XXX - BS.writeFile fp content' - -- Nothing -> error "Old patch content is wrong" - -- Nothing -> error "Not enough lines to skip" - apply (Binary fp oldBytes newBytes) - = do content <- BS.readFile fp - if oldBytes == content - then BS.writeFile fp newBytes - else error "XXX Old content is wrong" + apply prim = case prim of + AddDir fp -> do fileExists <- doesFileExist fp + directoryExists <- doesDirectoryExist fp + if fileExists || directoryExists + then error ("Already exists: " ++ show fp) + else createDirectory fp + RmDir fp -> do -- XXX Should check for emptiness + removeDirectory fp + MvDir from to -> + do fromDirectoryExists <- doesDirectoryExist from + toFileExists <- doesFileExist to + toDirectoryExists <- doesDirectoryExist to + if fromDirectoryExists + then if toFileExists || toDirectoryExists + then error ("Already exists: " ++ show to) + else renameDirectory from to + else error ("Not a directory: " ++ show from) + AddFile fp -> do fileExists <- doesFileExist fp + directoryExists <- doesDirectoryExist fp + if fileExists || directoryExists + then error ("Already exists: " ++ show fp) + else withBinaryFile fp WriteMode $ \_ -> + return () + RmFile fp -> do size <- withBinaryFile fp ReadMode hFileSize + if size /= 0 + then error ("Not empty: " ++ show fp) + else removeFile fp + MvFile from to -> + do fromFileExists <- doesFileExist from + toFileExists <- doesFileExist to + toDirectoryExists <- doesDirectoryExist to + if fromFileExists + then if toFileExists || toDirectoryExists + then error ("Already exists: " ++ show to) + else renameFile from to + else error ("Not a file: " ++ show from) + Hunk fp skipBytes _ oldBytes _ newBytes _ -> + -- XXX This should check that the file is big enough etc + do content <- BS.readFile fp + case BS.splitAt skipBytes content of + -- XXX Should check length of skipped + (skipped, rest) -> + case BS.splitAt (BS.length oldBytes) rest of + (_old, rest') -> + -- XXX sanity check oldBytes == old + do let content' = skipped `BS.append` newBytes + `BS.append` rest' + evaluate $ BS.length content -- XXX + BS.writeFile fp content' + -- Nothing -> error "Old patch content is wrong" + -- Nothing -> error "Not enough lines to skip" + Binary fp oldBytes newBytes -> + do content <- BS.readFile fp + if oldBytes == content + then BS.writeFile fp newBytes + else error "XXX Old content is wrong" hunk ./camp-core/Camp/Patch/Apply.hs 2 -module Camp.Patch.Apply (Apply(..)) where +module Camp.Patch.Apply (Apply(..), flush, applyFully) where + +import Data.ByteString.Lazy (ByteString) +import qualified Data.ByteString.Lazy as BS +import Camp.Types + +-- While we're applying things, we don't write to +-- disk eagerly as the next thing may want to change the +-- same file +type ApplyState = Maybe (FilePath, -- Filename we are dealing with + Bytes, -- Number of bytes in the first ByteString + ByteString, -- These ByteStrings concatenated are + ByteString) -- the current required file content hunk ./camp-core/Camp/Patch/Apply.hs 17 - apply :: p from to -> IO () + apply :: ApplyState -> p from to -> IO ApplyState + +flush :: ApplyState -> IO () +flush Nothing = return () +flush (Just (fp, _, start, finish)) + = BS.writeFile fp (start `BS.append` finish) + +-- XXX Should we rename this to apply, and apply to someting else? +applyFully :: Apply p => p from to -> IO () +applyFully p = do m <- apply Nothing p + flush m + hunk ./camp-core/Camp/Patch/Catch.hs 260 - apply (Patch p) = apply p - apply (Conflictor effect _ _) = apply effect + apply m (Patch p) = apply m p + apply m (Conflictor effect _ _) = apply m effect hunk ./camp-core/Camp/Patch/MegaPatch.hs 73 - apply (MegaPatch _ _ p) = apply p + apply m (MegaPatch _ _ p) = apply m p hunk ./camp-core/Camp/Patch/Patch.hs 57 - apply (Primitive _ p) = apply p + apply m (Primitive _ p) = apply m p hunk ./camp-core/Camp/Patch/Primitive.hs 13 +import Control.Monad hunk ./camp-core/Camp/Patch/Primitive.hs 277 - apply prim = case prim of + apply m prim = case prim of hunk ./camp-core/Camp/Patch/Primitive.hs 283 + return m hunk ./camp-core/Camp/Patch/Primitive.hs 286 + return m + -- XXX We could be clever here, and just update m if necessary hunk ./camp-core/Camp/Patch/Primitive.hs 289 - do fromDirectoryExists <- doesDirectoryExist from + do flush m + fromDirectoryExists <- doesDirectoryExist from hunk ./camp-core/Camp/Patch/Primitive.hs 298 + return Nothing hunk ./camp-core/Camp/Patch/Primitive.hs 305 - RmFile fp -> do size <- withBinaryFile fp ReadMode hFileSize - if size /= 0 - then error ("Not empty: " ++ show fp) - else removeFile fp + return m + RmFile fp -> + case m of + Just (fp', _, start, finish) + | fp == fp' -> + if BS.null start && BS.null finish + then do -- The file might not exist, as we may be applying + -- AddFile f; RmFile f + -- But if we get to this point then we know that + -- the directory doesn't exist, because either + -- AddFile checked it doesn't, or we read the + -- file when doing e.g. a Hunk. + fileExists <- doesFileExist fp + when fileExists $ removeFile fp + return Nothing + else do flush m + error ("Not empty: " ++ show fp) + _ -> + do size <- withBinaryFile fp ReadMode hFileSize + if size /= 0 + then error ("Not empty: " ++ show fp) + else removeFile fp + return m + -- XXX We could be clever here, and just update m if necessary hunk ./camp-core/Camp/Patch/Primitive.hs 330 - do fromFileExists <- doesFileExist from + do flush m + fromFileExists <- doesFileExist from hunk ./camp-core/Camp/Patch/Primitive.hs 339 + return Nothing + -- XXX Handle m hunk ./camp-core/Camp/Patch/Primitive.hs 342 + case m of + Just (fp', startBytes, start, finish) + | fp == fp' -> hunk ./camp-core/Camp/Patch/Primitive.hs 346 - do content <- BS.readFile fp - case BS.splitAt skipBytes content of - -- XXX Should check length of skipped - (skipped, rest) -> - case BS.splitAt (BS.length oldBytes) rest of - (_old, rest') -> - -- XXX sanity check oldBytes == old - do let content' = skipped `BS.append` newBytes - `BS.append` rest' - evaluate $ BS.length content -- XXX - BS.writeFile fp content' - -- Nothing -> error "Old patch content is wrong" - -- Nothing -> error "Not enough lines to skip" + if skipBytes < startBytes + then do let finish' = start `BS.append` finish + apply (Just (fp, 0, BS.empty, finish')) prim + else case BS.splitAt (skipBytes - startBytes) finish of + -- XXX Should check length of skipped + (skipped, finish') -> + case BS.splitAt (BS.length oldBytes) finish' of + (_old, finish'') -> + -- XXX sanity check oldBytes == old + do let start' = skipped `BS.append` + newBytes + startBytes' = skipBytes + + BS.length newBytes + return (Just (fp, + startBytes', + start', + finish'')) + -- Nothing -> error "Old patch content is wrong" + -- Nothing -> error "Not enough lines to skip" + _ -> + do flush m + content <- BS.readFile fp + -- XXX Currently we force the length, so that we can + -- safely overwrite. Perhaps we should mv to somewhere + -- under _camp instead? + evaluate $ BS.length content + apply (Just (fp, 0, BS.empty, content)) prim + -- XXX Handle m hunk ./camp-core/Camp/Patch/Primitive.hs 375 - do content <- BS.readFile fp + do flush m + content <- BS.readFile fp hunk ./camp-core/Camp/Patch/Primitive.hs 380 + return Nothing hunk ./camp-core/Camp/Patch/Sequence.hs 135 - apply Nil = return () - apply (Cons p ps) = do apply p - apply ps + apply m Nil = return m + apply m (Cons p ps) = do m' <- apply m p + apply m' ps hunk ./camp-repository/Camp/Repository.hs 279 -applyToPristine r ps = inDir (pristineDir r) $ apply ps +applyToPristine r ps = inDir (pristineDir r) $ applyFully ps hunk ./camp-repository/Camp/Repository.hs 282 -applyToWorking r ps = inDir (workingDir r) $ apply ps +applyToWorking r ps = inDir (workingDir r) $ applyFully ps hunk ./tests/hunk_commute/run_test.sh 40 -"$CAMP" record -a base +"$CAMP" record -a -m base hunk ./tests/hunk_commute/run_test.sh 42 -"$CAMP" record -a x +"$CAMP" record -a -m x hunk ./tests/hunk_commute/run_test.sh 44 -"$CAMP" record -a y +"$CAMP" record -a -m y hunk ./tests/hunk_commute/run_test.sh 50 -"$CAMP" pull ../a P-base -"$CAMP" pull ../a P-y -"$CAMP" pull ../a P-x +"$CAMP" pull ../a base +"$CAMP" pull ../a y +"$CAMP" pull ../a x hunk ./tests/hunk_commute/run_test.sh 58 -"$CAMP" pull ../b P-base -"$CAMP" pull ../b P-x -"$CAMP" pull ../b P-y +"$CAMP" pull ../b base +"$CAMP" pull ../b x +"$CAMP" pull ../b y hunk ./camp-bin/Camp/Command/Pull.hs 10 -import Camp.Patch.Name hunk ./camp-bin/Camp/Command/Pull.hs 14 +import Data.ByteString.Lazy.Char8 (ByteString) +import qualified Data.ByteString.Lazy.Char8 as BSC +import Data.Set (Set) hunk ./camp-bin/Camp/Command/Pull.hs 54 - case mapM parseName wantedPatches of - Just wantedNames -> - let wantedNameSet = Set.fromList wantedNames - in case tryCommuteToPrefix wantedNameSet remotePatches of - Just (remotePatches' `Then` _) -> - doPull localRepo localInventory localPatches remotePatches' - Nothing -> - error "Can't pull those patches due to dependencies" - Nothing -> error "Can't parse the patch names you want" + let wantedShortDescs = map BSC.pack wantedPatches + wantedNameSet = Set.fromList wantedShortDescs + in case tryCommuteToPrefix wantedNameSet remotePatches of + Just (remotePatches' `Then` _) -> + doPull localRepo localInventory localPatches remotePatches' + Nothing -> + error "Can't pull those patches due to dependencies" hunk ./camp-bin/Camp/Command/Pull.hs 81 +tryCommuteToPrefix :: Set ByteString -> Seq MegaPatch from to + -> Maybe (Then (Seq MegaPatch) (Seq MegaPatch) from to) +tryCommuteToPrefix _ Nil = Just (Nil `Then` Nil) +tryCommuteToPrefix ns (p@(MegaPatch _ (MetaInfo shortDesc _ _ _) _) `Cons` ps) + = case tryCommuteToPrefix ns ps of + Just (qs `Then` rs) + | shortDesc `Set.member` ns -> + Just (Cons p qs `Then` rs) + | otherwise -> + case commute (p `Then` qs) of + Just (qs' `Then` p') -> + Just (qs' `Then` Cons p' rs) + Nothing -> Nothing + Nothing -> Nothing + hunk ./camp-core/Camp/Patch/Sequence.hs 3 - Seq(..), names, commuteToPrefix, tryCommuteToPrefix, appendSeq + Seq(..), names, commuteToPrefix, appendSeq hunk ./camp-core/Camp/Patch/Sequence.hs 112 +-- XXX The other use of this, in the pull command, got inlined and speciaised +-- as it now uses the short description rather than the patch name hunk ./camp-bin/Camp/Command/Pull.hs 53 + ["-a"] -> + do doPull localRepo localInventory localPatches remotePatches hunk ./tests/simple_merge/run_test.sh 40 -"$CAMP" record -a +"$CAMP" record -a -m base hunk ./tests/simple_merge/run_test.sh 47 -"$CAMP" pull ../a +"$CAMP" pull ../a -a hunk ./tests/simple_merge/run_test.sh 49 -"$CAMP" record -a +"$CAMP" record -a -m change23 hunk ./tests/simple_merge/run_test.sh 55 -"$CAMP" record -a +"$CAMP" record -a -m change12 hunk ./tests/simple_merge/run_test.sh 60 -"$CAMP" pull ../b +"$CAMP" pull ../b -a hunk ./tests/simple_merge/run_test.sh 63 -"$CAMP" pull ../a +"$CAMP" pull ../a -a hunk ./tests/file_end_commutes/run_test.sh 47 -"$CAMP" record -a o +"$CAMP" record -a -m o hunk ./tests/file_end_commutes/run_test.sh 49 -"$CAMP" record -a p1 +"$CAMP" record -a -m p1 hunk ./tests/file_end_commutes/run_test.sh 51 -"$CAMP" record -a p2 +"$CAMP" record -a -m p2 hunk ./tests/file_end_commutes/run_test.sh 53 -"$CAMP" record -a p3 +"$CAMP" record -a -m p3 hunk ./tests/file_end_commutes/run_test.sh 61 -"$CAMP" pull ../base P-o -"$CAMP" pull ../base P-p1 -"$CAMP" pull ../base P-p2 -"$CAMP" pull ../base P-p3 +"$CAMP" pull ../base o +"$CAMP" pull ../base p1 +"$CAMP" pull ../base p2 +"$CAMP" pull ../base p3 hunk ./tests/file_end_commutes/run_test.sh 72 -"$CAMP" pull ../base P-o -"$CAMP" pull ../base P-p1 -"$CAMP" pull ../base P-p3 -"$CAMP" pull ../base P-p2 +"$CAMP" pull ../base o +"$CAMP" pull ../base p1 +"$CAMP" pull ../base p3 +"$CAMP" pull ../base p2 hunk ./tests/file_end_commutes/run_test.sh 83 -"$CAMP" pull ../base P-o -"$CAMP" pull ../base P-p2 -"$CAMP" pull ../base P-p1 -"$CAMP" pull ../base P-p3 +"$CAMP" pull ../base o +"$CAMP" pull ../base p2 +"$CAMP" pull ../base p1 +"$CAMP" pull ../base p3 hunk ./tests/file_end_commutes/run_test.sh 94 -"$CAMP" pull ../base P-o -"$CAMP" pull ../base P-p2 -"$CAMP" pull ../base P-p3 -"$CAMP" pull ../base P-p1 +"$CAMP" pull ../base o +"$CAMP" pull ../base p2 +"$CAMP" pull ../base p3 +"$CAMP" pull ../base p1 hunk ./tests/file_end_commutes/run_test.sh 105 -"$CAMP" pull ../base P-o -"$CAMP" pull ../base P-p3 -"$CAMP" pull ../base P-p1 -"$CAMP" pull ../base P-p2 +"$CAMP" pull ../base o +"$CAMP" pull ../base p3 +"$CAMP" pull ../base p1 +"$CAMP" pull ../base p2 hunk ./tests/file_end_commutes/run_test.sh 116 -"$CAMP" pull ../base P-o -"$CAMP" pull ../base P-p3 -"$CAMP" pull ../base P-p2 -"$CAMP" pull ../base P-p1 +"$CAMP" pull ../base o +"$CAMP" pull ../base p3 +"$CAMP" pull ../base p2 +"$CAMP" pull ../base p1 hunk ./tests/file_end_commutes2/run_test.sh 48 -"$CAMP" record -a o +"$CAMP" record -a -m o hunk ./tests/file_end_commutes2/run_test.sh 50 -"$CAMP" record -a p1 +"$CAMP" record -a -m p1 hunk ./tests/file_end_commutes2/run_test.sh 52 -"$CAMP" record -a p2 +"$CAMP" record -a -m p2 hunk ./tests/file_end_commutes2/run_test.sh 54 -"$CAMP" record -a p3 +"$CAMP" record -a -m p3 hunk ./tests/file_end_commutes2/run_test.sh 62 -"$CAMP" pull ../base P-o -"$CAMP" pull ../base P-p1 -"$CAMP" pull ../base P-p2 -"$CAMP" pull ../base P-p3 +"$CAMP" pull ../base o +"$CAMP" pull ../base p1 +"$CAMP" pull ../base p2 +"$CAMP" pull ../base p3 hunk ./tests/file_end_commutes2/run_test.sh 73 -"$CAMP" pull ../base P-o -"$CAMP" pull ../base P-p1 -"$CAMP" pull ../base P-p3 -"$CAMP" pull ../base P-p2 +"$CAMP" pull ../base o +"$CAMP" pull ../base p1 +"$CAMP" pull ../base p3 +"$CAMP" pull ../base p2 hunk ./tests/file_end_commutes2/run_test.sh 84 -"$CAMP" pull ../base P-o -"$CAMP" pull ../base P-p2 -"$CAMP" pull ../base P-p1 -"$CAMP" pull ../base P-p3 +"$CAMP" pull ../base o +"$CAMP" pull ../base p2 +"$CAMP" pull ../base p1 +"$CAMP" pull ../base p3 hunk ./tests/file_end_commutes2/run_test.sh 95 -"$CAMP" pull ../base P-o -"$CAMP" pull ../base P-p2 -"$CAMP" pull ../base P-p3 -"$CAMP" pull ../base P-p1 +"$CAMP" pull ../base o +"$CAMP" pull ../base p2 +"$CAMP" pull ../base p3 +"$CAMP" pull ../base p1 hunk ./tests/file_end_commutes2/run_test.sh 106 -"$CAMP" pull ../base P-o -"$CAMP" pull ../base P-p3 -"$CAMP" pull ../base P-p1 -"$CAMP" pull ../base P-p2 +"$CAMP" pull ../base o +"$CAMP" pull ../base p3 +"$CAMP" pull ../base p1 +"$CAMP" pull ../base p2 hunk ./tests/file_end_commutes2/run_test.sh 117 -"$CAMP" pull ../base P-o -"$CAMP" pull ../base P-p3 -"$CAMP" pull ../base P-p2 -"$CAMP" pull ../base P-p1 +"$CAMP" pull ../base o +"$CAMP" pull ../base p3 +"$CAMP" pull ../base p2 +"$CAMP" pull ../base p1 hunk ./camp-core/Camp/Patch/Primitive.hs 355 - do let start' = skipped `BS.append` + do let start' = start `BS.append` + skipped `BS.append` hunk ./tests/darcs_issue1043_b/run_test.sh 36 -"$CAMP" record -a +"$CAMP" record -a -m base hunk ./tests/darcs_issue1043_b/run_test.sh 43 -"$CAMP" pull ../a +"$CAMP" pull ../a -a hunk ./tests/darcs_issue1043_b/run_test.sh 49 -"$CAMP" record -a +"$CAMP" record -a -m brocolli hunk ./tests/darcs_issue1043_b/run_test.sh 51 -"$CAMP" record -a +"$CAMP" record -a -m artichoke hunk ./tests/darcs_issue1043_b/run_test.sh 57 -"$CAMP" record -a +"$CAMP" record -a -m cougar hunk ./tests/darcs_issue1043_b/run_test.sh 62 -"$CAMP" pull ../a +"$CAMP" pull ../a -a hunk ./tests/darcs_issue1043_b/run_test.sh 64 -"$CAMP" record -a +"$CAMP" record -a -m resolved hunk ./tests/darcs_issue1043_b/run_test.sh 70 -"$CAMP" record -a +"$CAMP" record -a -m apple hunk ./tests/darcs_issue1043_b/run_test.sh 75 -"$CAMP" pull ../a +"$CAMP" pull ../a -a hunk ./camp-bin/Camp/Command/Pull.hs 78 - do is <- writeMegaPatches localRepo newLocalPatches + do -- XXX We're leaking space here + is <- writeMegaPatches localRepo newLocalPatches hunk ./camp-bin/Camp/Command/Pull.hs 81 - applyToRepo localRepo newLocalPatches + applyToPristine localRepo newLocalPatches + applyToWorking localRepo newLocalPatches hunk ./camp-repository/Camp/Repository.hs 23 - applyToRepo, -- XXX space leak hunk ./camp-repository/Camp/Repository.hs 272 -applyToRepo :: Apply p => Repository -> p from to -> IO () -applyToRepo r ps = do applyToPristine r ps - -- XXX This is wrong if there are local changes - applyToWorking r ps - hunk ./camp-repository/Camp/Repository.hs 275 +-- XXX This is wrong if there are local changes addfile ./camp-repository/Camp/Inventory.hs hunk ./camp-repository/Camp/Inventory.hs 1 + +module Camp.Inventory ( + InventoryItem(..), + inventoryToCompactRelativeInventory, + relativeInventoryToInventory, + Content(..), + inventoryToContents + ) where + +import Camp.Patch.InputOutput +import Camp.Patch.Name + +import Data.ByteString.Lazy (ByteString) +import qualified Data.ByteString.Lazy as BS + +-- XXX Pull may benefit from a contexted sequence of these +-- XXX Curently this is start/end. Should it be start/length? +data InventoryItem = InventoryItem Name ByteString{-FilePath/Filename-} Integer Integer + +data RelativeInventoryItem = RelativeInventoryItem Name Integer Integer + +instance InputOutput InventoryItem where + input bs0 = case input bs0 of + (n, bs1) -> + case input bs1 of + (fp, bs2) -> + case input bs2 of + (from, bs3) -> + case input bs3 of + (to, bs4) -> + (InventoryItem n fp from to, bs4) + valid bs0 _ = case valid bs0 (undefined :: Name) of + Left err -> Left err + Right bs1 -> + case valid bs1 (undefined :: ByteString) of + Left err -> Left err + Right bs2 -> + case valid bs2 (undefined :: Integer) of + Left err -> Left err + Right bs3 -> + valid bs3 (undefined :: Integer) + output (InventoryItem n fp from to) + = output n `BS.append` + output fp `BS.append` + output from `BS.append` + output to + +-- XXX Both Integer's should be Bytes +data Content = Content ByteString{- FilePath -} Integer Integer + +inventoryToContents :: [InventoryItem] -> [Content] +inventoryToContents [] = [] +inventoryToContents (InventoryItem _ fp from to : is) + = let f cur [] = [Content fp from cur] + f cur is'@(InventoryItem _ fp' from' to' : is'') + | (fp == fp') && (cur + 1 == from') = f to' is'' + | otherwise = Content fp from cur : inventoryToContents is' + in f to is + +inventoryToCompactRelativeInventory :: [InventoryItem] + -> [RelativeInventoryItem] +inventoryToCompactRelativeInventory = inventoryToCompactRelativeInventoryFrom 0 + +-- XXX Make this local to the above +inventoryToCompactRelativeInventoryFrom :: Integer{- XXX Bytes -} + -> [InventoryItem] + -> [RelativeInventoryItem] +inventoryToCompactRelativeInventoryFrom _ [] = [] +inventoryToCompactRelativeInventoryFrom cur (InventoryItem n _ from to : is) + = let to' = cur + (to - from) + in RelativeInventoryItem n cur to' : + inventoryToCompactRelativeInventoryFrom (to' + 1) is + +relativeInventoryToInventory :: ByteString{- XXX FilePath -} -> Integer{- XXX Bytes -} + -> [RelativeInventoryItem] + -> [InventoryItem] +relativeInventoryToInventory _ _ [] = [] +relativeInventoryToInventory fp offset (RelativeInventoryItem n from to : is) + = InventoryItem n fp (from + offset) (to + offset) + : relativeInventoryToInventory fp offset is + hunk ./camp-repository/Camp/Repository.hs 36 +import Camp.Inventory hunk ./camp-repository/Camp/Repository.hs 91 --- XXX Pull may benefit from a contexted sequence of these --- XXX Curently this is start/end. Should it be start/length? -data InventoryItem = InventoryItem Name ByteString{-FilePath/Filename-} Integer Integer - -data RelativeInventoryItem = RelativeInventoryItem Name Integer Integer - -instance InputOutput InventoryItem where - input bs0 = case input bs0 of - (n, bs1) -> - case input bs1 of - (fp, bs2) -> - case input bs2 of - (from, bs3) -> - case input bs3 of - (to, bs4) -> - (InventoryItem n fp from to, bs4) - valid bs0 _ = case valid bs0 (undefined :: Name) of - Left err -> Left err - Right bs1 -> - case valid bs1 (undefined :: ByteString) of - Left err -> Left err - Right bs2 -> - case valid bs2 (undefined :: Integer) of - Left err -> Left err - Right bs3 -> - valid bs3 (undefined :: Integer) - output (InventoryItem n fp from to) - = output n `BS.append` - output fp `BS.append` - output from `BS.append` - output to - hunk ./camp-repository/Camp/Repository.hs 127 --- XXX Both Integer's should be Bytes -data Content = Content ByteString{- FilePath -} Integer Integer - -inventoryToContents :: [InventoryItem] -> [Content] -inventoryToContents [] = [] -inventoryToContents (InventoryItem _ fp from to : is) - = let f cur [] = [Content fp from cur] - f cur is'@(InventoryItem _ fp' from' to' : is'') - | (fp == fp') && (cur + 1 == from') = f to' is'' - | otherwise = Content fp from cur : inventoryToContents is' - in f to is - -inventoryToCompactRelativeInventory :: [InventoryItem] - -> [RelativeInventoryItem] -inventoryToCompactRelativeInventory = inventoryToCompactRelativeInventoryFrom 0 - --- XXX Make this local to the above -inventoryToCompactRelativeInventoryFrom :: Integer{- XXX Bytes -} - -> [InventoryItem] - -> [RelativeInventoryItem] -inventoryToCompactRelativeInventoryFrom _ [] = [] -inventoryToCompactRelativeInventoryFrom cur (InventoryItem n _ from to : is) - = let to' = cur + (to - from) - in RelativeInventoryItem n cur to' : - inventoryToCompactRelativeInventoryFrom (to' + 1) is - -relativeInventoryToInventory :: ByteString{- XXX FilePath -} -> Integer{- XXX Bytes -} - -> [RelativeInventoryItem] - -> [InventoryItem] -relativeInventoryToInventory _ _ [] = [] -relativeInventoryToInventory fp offset (RelativeInventoryItem n from to : is) - = InventoryItem n fp (from + offset) (to + offset) - : relativeInventoryToInventory fp offset is - hunk ./camp-repository/camp-repository.cabal 21 + Camp.Inventory hunk ./camp-bin/Camp/Command/Record.hs 6 +import Camp.InRepoFileName as InRepoFileName hunk ./camp-bin/Camp/Command/Record.hs 83 - let addFile = AddFile newPath + let fn = InRepoFileName.fromString newPath + addFile = AddFile fn hunk ./camp-bin/Camp/Command/Record.hs 89 - then Hunk newPath 0 0 - BS.empty 0 - new (BSC.count '\n' new) - else Hunk newPath 0 0 - BS.empty 1 - new (BSC.count '\n' new + 1) + then Hunk fn 0 0 + BS.empty 0 + new (BSC.count '\n' new) + else Hunk fn 0 0 + BS.empty 1 + new (BSC.count '\n' new + 1) hunk ./camp-bin/Camp/Command/Record.hs 100 - let rmFile = RmFile newPath + let fn = InRepoFileName.fromString newPath + rmFile = RmFile fn hunk ./camp-bin/Camp/Command/Record.hs 105 - (BSC.last old == '\n') - then Hunk newPath 0 0 - old (BSC.count '\n' old) - BS.empty 0 - else Hunk newPath 0 0 - old (BSC.count '\n' old + 1) - BS.empty 1 + (BSC.last old == '\n') + then Hunk fn 0 0 + old (BSC.count '\n' old) + BS.empty 0 + else Hunk fn 0 0 + old (BSC.count '\n' old + 1) + BS.empty 1 addfile ./camp-core/Camp/InRepoFileName.hs hunk ./camp-core/Camp/InRepoFileName.hs 1 + +module Camp.InRepoFileName + (InRepoFileName, fromString, fromByteString, toFilePath) + where + +import Camp.Patch.InputOutput +import Data.ByteString.Lazy.Char8 (ByteString) +import qualified Data.ByteString.Lazy.Char8 as BSC +import System.FilePath + +newtype InRepoFileName = InRepoFileName FilePath + +instance Eq InRepoFileName where + InRepoFileName fn1 == InRepoFileName fn2 = fn1 == fn2 + +instance Show InRepoFileName where + showsPrec _ (InRepoFileName fn) = showString fn + +-- XXX Need to do the right checks in the right method (input vs valid) +instance InputOutput InRepoFileName where + input bs0 = case input bs0 of + (fnBS, bs1) -> (fromByteString fnBS, bs1) + output fn = output $ BSC.pack $ toFilePath fn + valid bs0 _ = valid bs0 (undefined :: ByteString) + +-- XXX We should check that we haven't passed in "../foo", "/foo" etc, +-- check for case-sensitive filenames, filenames like "NUL", think about +-- files systems that use unicode encodings, etc +fromString :: String -> InRepoFileName +fromString fp = let fp' = normalise fp + in InRepoFileName fp' + +fromByteString :: ByteString -> InRepoFileName +fromByteString = fromString . BSC.unpack + +toFilePath :: InRepoFileName -> FilePath +toFilePath (InRepoFileName fp) = fp + hunk ./camp-core/Camp/Patch/Apply.hs 6 +import Camp.InRepoFileName as InRepoFileName hunk ./camp-core/Camp/Patch/Apply.hs 12 -type ApplyState = Maybe (FilePath, -- Filename we are dealing with +type ApplyState = Maybe (InRepoFileName, -- Filename we are dealing with hunk ./camp-core/Camp/Patch/Apply.hs 22 -flush (Just (fp, _, start, finish)) - = BS.writeFile fp (start `BS.append` finish) +flush (Just (fn, _, start, finish)) + = do let fp = InRepoFileName.toFilePath fn + BS.writeFile fp (start `BS.append` finish) hunk ./camp-core/Camp/Patch/Primitive.hs 4 +import Camp.InRepoFileName as InRepoFileName hunk ./camp-core/Camp/Patch/Primitive.hs 24 - AddDir :: FilePath -> Primitive from to - RmDir :: FilePath -> Primitive from to - MvDir :: FilePath -> FilePath -> Primitive from to - AddFile :: FilePath -> Primitive from to - RmFile :: FilePath -> Primitive from to - MvFile :: FilePath -> FilePath -> Primitive from to - Hunk :: FilePath + AddDir :: InRepoFileName -> Primitive from to + RmDir :: InRepoFileName -> Primitive from to + MvDir :: InRepoFileName -> InRepoFileName -> Primitive from to + AddFile :: InRepoFileName -> Primitive from to + RmFile :: InRepoFileName -> Primitive from to + MvFile :: InRepoFileName -> InRepoFileName -> Primitive from to + Hunk :: InRepoFileName hunk ./camp-core/Camp/Patch/Primitive.hs 38 - Binary :: FilePath + Binary :: InRepoFileName hunk ./camp-core/Camp/Patch/Primitive.hs 47 - (fp, bs1) -> (AddDir (BSC.unpack fp), bs1) + (fn, bs1) -> (AddDir fn, bs1) hunk ./camp-core/Camp/Patch/Primitive.hs 49 - (fp, bs1) -> (RmDir (BSC.unpack fp), bs1) + (fn, bs1) -> (RmDir fn, bs1) hunk ./camp-core/Camp/Patch/Primitive.hs 54 - (MvDir (BSC.unpack from) (BSC.unpack to), bs2) + (MvDir from to, bs2) hunk ./camp-core/Camp/Patch/Primitive.hs 56 - (fp, bs1) -> (AddFile (BSC.unpack fp), bs1) + (fn, bs1) -> (AddFile fn, bs1) hunk ./camp-core/Camp/Patch/Primitive.hs 58 - (fp, bs1) -> (RmFile (BSC.unpack fp), bs1) + (fn, bs1) -> (RmFile fn, bs1) hunk ./camp-core/Camp/Patch/Primitive.hs 63 - (MvFile (BSC.unpack from) (BSC.unpack to), bs2) + (MvFile from to, bs2) hunk ./camp-core/Camp/Patch/Primitive.hs 65 - (fp, bs1) -> + (fn, bs1) -> hunk ./camp-core/Camp/Patch/Primitive.hs 78 - (Hunk (BSC.unpack fp) + (Hunk fn hunk ./camp-core/Camp/Patch/Primitive.hs 87 - (fp, bs1) -> + (fn, bs1) -> hunk ./camp-core/Camp/Patch/Primitive.hs 92 - (Binary (BSC.unpack fp) oldBytes newBytes, bs3) + (Binary fn oldBytes newBytes, bs3) hunk ./camp-core/Camp/Patch/Primitive.hs 131 - output (AddDir fp) = 0 `BS.cons` output (BSC.pack fp) - output (RmDir fp) = 1 `BS.cons` output (BSC.pack fp) - output (MvDir from to) = 2 `BS.cons` output (BSC.pack from) - `BS.append` output (BSC.pack to) - output (AddFile fp) = 3 `BS.cons` output (BSC.pack fp) - output (RmFile fp) = 4 `BS.cons` output (BSC.pack fp) - output (MvFile from to) = 5 `BS.cons` output (BSC.pack from) - `BS.append` output (BSC.pack to) - output (Hunk fp skipBytes skipLines oldBytes oldLines newBytes newLines) - = 6 `BS.cons` output (BSC.pack fp) + output (AddDir fn) = 0 `BS.cons` output fn + output (RmDir fn) = 1 `BS.cons` output fn + output (MvDir from to) = 2 `BS.cons` output from + `BS.append` output to + output (AddFile fn) = 3 `BS.cons` output fn + output (RmFile fn) = 4 `BS.cons` output fn + output (MvFile from to) = 5 `BS.cons` output from + `BS.append` output to + output (Hunk fn skipBytes skipLines oldBytes oldLines newBytes newLines) + = 6 `BS.cons` output fn hunk ./camp-core/Camp/Patch/Primitive.hs 147 - output (Binary fp oldBytes newBytes) - = 7 `BS.cons` output (BSC.pack fp) + output (Binary fn oldBytes newBytes) + = 7 `BS.cons` output fn hunk ./camp-core/Camp/Patch/Primitive.hs 153 - ppr (AddDir fp) = text "AddDir" <+> text (show fp) - ppr (RmDir fp) = text "RmDir" <+> text (show fp) + ppr (AddDir fn) = text "AddDir" <+> text (show fn) + ppr (RmDir fn) = text "RmDir" <+> text (show fn) hunk ./camp-core/Camp/Patch/Primitive.hs 157 - ppr (AddFile fp) = text "AddFile" <+> text (show fp) - ppr (RmFile fp) = text "RmFile" <+> text (show fp) + ppr (AddFile fn) = text "AddFile" <+> text (show fn) + ppr (RmFile fn) = text "RmFile" <+> text (show fn) hunk ./camp-core/Camp/Patch/Primitive.hs 161 - ppr (Hunk fp skipBytes skipLines oldBytes oldLines newBytes newLines) - = text "Hunk" <+> text (show fp) + ppr (Hunk fn skipBytes skipLines oldBytes oldLines newBytes newLines) + = text "Hunk" <+> text (show fn) hunk ./camp-core/Camp/Patch/Primitive.hs 264 - invert (AddDir fp) = RmDir fp - invert (RmDir fp) = AddDir fp + invert (AddDir fn) = RmDir fn + invert (RmDir fn) = AddDir fn hunk ./camp-core/Camp/Patch/Primitive.hs 267 - invert (AddFile fp) = RmFile fp - invert (RmFile fp) = AddFile fp + invert (AddFile fn) = RmFile fn + invert (RmFile fn) = AddFile fn hunk ./camp-core/Camp/Patch/Primitive.hs 270 - invert (Hunk fp skipBytes skipLines oldBytes oldLines newBytes newLines) - = Hunk fp skipBytes skipLines newBytes newLines oldBytes oldLines - invert (Binary fp oldBytes newBytes) = Binary fp newBytes oldBytes + invert (Hunk fn skipBytes skipLines oldBytes oldLines newBytes newLines) + = Hunk fn skipBytes skipLines newBytes newLines oldBytes oldLines + invert (Binary fn oldBytes newBytes) = Binary fn newBytes oldBytes hunk ./camp-core/Camp/Patch/Primitive.hs 279 - AddDir fp -> do fileExists <- doesFileExist fp + AddDir fn -> do let fp = InRepoFileName.toFilePath fn + fileExists <- doesFileExist fp hunk ./camp-core/Camp/Patch/Primitive.hs 286 - RmDir fp -> do -- XXX Should check for emptiness + RmDir fn -> do let fp = InRepoFileName.toFilePath fn + -- XXX Should check for emptiness hunk ./camp-core/Camp/Patch/Primitive.hs 291 - MvDir from to -> + MvDir fromFn toFn -> hunk ./camp-core/Camp/Patch/Primitive.hs 293 + let from = InRepoFileName.toFilePath fromFn + to = InRepoFileName.toFilePath toFn hunk ./camp-core/Camp/Patch/Primitive.hs 304 - AddFile fp -> do fileExists <- doesFileExist fp + AddFile fn -> do let fp = InRepoFileName.toFilePath fn + fileExists <- doesFileExist fp hunk ./camp-core/Camp/Patch/Primitive.hs 312 - RmFile fp -> + RmFile fn -> hunk ./camp-core/Camp/Patch/Primitive.hs 314 - Just (fp', _, start, finish) - | fp == fp' -> + Just (fn', _, start, finish) + | fn == fn' -> hunk ./camp-core/Camp/Patch/Primitive.hs 317 - then do -- The file might not exist, as we may be applying + then do let fp = InRepoFileName.toFilePath fn + -- The file might not exist, as we may be applying hunk ./camp-core/Camp/Patch/Primitive.hs 328 - error ("Not empty: " ++ show fp) + error ("Not empty: " ++ show fn) hunk ./camp-core/Camp/Patch/Primitive.hs 330 - do size <- withBinaryFile fp ReadMode hFileSize + do let fp = InRepoFileName.toFilePath fn + size <- withBinaryFile fp ReadMode hFileSize hunk ./camp-core/Camp/Patch/Primitive.hs 333 - then error ("Not empty: " ++ show fp) + then error ("Not empty: " ++ show fn) hunk ./camp-core/Camp/Patch/Primitive.hs 337 - MvFile from to -> + MvFile fromFn toFn -> hunk ./camp-core/Camp/Patch/Primitive.hs 339 + let from = InRepoFileName.toFilePath fromFn + to = InRepoFileName.toFilePath toFn hunk ./camp-core/Camp/Patch/Primitive.hs 351 - Hunk fp skipBytes _ oldBytes _ newBytes _ -> + Hunk fn skipBytes _ oldBytes _ newBytes _ -> hunk ./camp-core/Camp/Patch/Primitive.hs 353 - Just (fp', startBytes, start, finish) - | fp == fp' -> + Just (fn', startBytes, start, finish) + | fn == fn' -> hunk ./camp-core/Camp/Patch/Primitive.hs 358 - apply (Just (fp, 0, BS.empty, finish')) prim + apply (Just (fn, 0, BS.empty, finish')) prim hunk ./camp-core/Camp/Patch/Primitive.hs 370 - return (Just (fp, + return (Just (fn, hunk ./camp-core/Camp/Patch/Primitive.hs 378 + let fp = InRepoFileName.toFilePath fn hunk ./camp-core/Camp/Patch/Primitive.hs 384 - apply (Just (fp, 0, BS.empty, content)) prim + apply (Just (fn, 0, BS.empty, content)) prim hunk ./camp-core/Camp/Patch/Primitive.hs 386 - Binary fp oldBytes newBytes -> + Binary fn oldBytes newBytes -> hunk ./camp-core/Camp/Patch/Primitive.hs 388 + let fp = InRepoFileName.toFilePath fn hunk ./camp-core/camp-core.cabal 17 + Camp.InRepoFileName hunk ./camp-repository/Camp/Diff.hs 4 +import Camp.InRepoFileName as InRepoFileName hunk ./camp-repository/Camp/Diff.hs 25 - -- XXX We should actually normalise consistently everywhere, - -- and think about what normalisation actually means - newPath' = normalise newPath - return $ mkDiff newPath' 0 0 commonLines oldLines newLines + fn = InRepoFileName.fromString newPath + return $ mkDiff fn 0 0 commonLines oldLines newLines hunk ./camp-repository/Camp/Diff.hs 29 - FilePath -> Bytes -> Line + InRepoFileName -> Bytes -> Line hunk ./camp-repository/Camp/Diff.hs 33 -mkDiff fp skipBytes skipLines cs os ns +mkDiff fn skipBytes skipLines cs os ns hunk ./camp-repository/Camp/Diff.hs 41 - -> mkDiff fp (skipBytes + BS.length common + 1{- '\n' -}) + -> mkDiff fn (skipBytes + BS.length common + 1{- '\n' -}) hunk ./camp-repository/Camp/Diff.hs 60 - Cons (Hunk fp skipBytes' skipLines + Cons (Hunk fn skipBytes' skipLines hunk ./camp-repository/Camp/Diff.hs 64 - (mkDiff fp skipBytes skipLines cs os' ns) + (mkDiff fn skipBytes skipLines cs os' ns) hunk ./camp-repository/Camp/Diff.hs 70 - in Cons (Hunk fp skipBytes' skipLines + in Cons (Hunk fn skipBytes' skipLines hunk ./camp-repository/Camp/Diff.hs 73 - (mkDiff fp skipBytes'' skipLines' cs os ns') + (mkDiff fn skipBytes'' skipLines' cs os ns') hunk ./camp-repository/Camp/Diff.hs 84 - in Cons (Hunk fp skipBytes skipLines + in Cons (Hunk fn skipBytes skipLines hunk ./camp-repository/Camp/Diff.hs 87 - (mkDiff fp skipBytes' skipLines' cs os' ns') + (mkDiff fn skipBytes' skipLines' cs os' ns') hunk ./camp-repository/Camp/Record.hs 20 + -- XXX This is linear in the number of patches hunk ./camp-repository/Camp/Repository.hs 109 +-- XXX Currently we assume this is atomic...which probably isn't true hunk ./camp-core/Camp/InRepoFileName.hs 2 +-- XXX Rename to RootedFileName? + hunk ./camp-core/Camp/InRepoFileName.hs 13 +-- XXX Should we use [Word8] instead of FilePath? hunk ./camp-core/Camp/InRepoFileName.hs 31 --- files systems that use unicode encodings, etc +-- files systems that use unicode encodings, etc. See eg: +-- * http://en.wikipedia.org/wiki/Filename +-- * http://en.wikipedia.org/wiki/Comparison_of_file_systems + addfile ./camp-bin/Camp/Options.hs hunk ./camp-bin/Camp/Options.hs 1 + +module Camp.Options where + +import System.Console.GetOpt + +-- XXX Should we put the various options in their own modules? +-- Or in the command modules? + +data Verbosity = Silent + | Normal + | Verbose + +type ChangeGeneralFlags = GeneralFlags -> Either String GeneralFlags +data GeneralFlags = GeneralFlags { + gfVerbosity :: Verbosity, + gfLog :: Bool + } + +generalOpts :: [OptDescr ChangeGeneralFlags] +generalOpts = [ + Option ['v'] ["verbose"] (OptArg setVerbosity "Verbosity") "verbosity", + Option ['l'] ["log"] (NoArg (setLog True)) "logging", + Option [] ["no-log"] (NoArg (setLog False)) "no logging" + ] + +setLog :: Bool -> ChangeGeneralFlags +setLog b gf = Right $ gf { gfLog = b } + +setVerbosity :: Maybe String -> ChangeGeneralFlags +setVerbosity Nothing gf = Right $ gf { gfVerbosity = Verbose } +setVerbosity (Just "0") gf = Right $ gf { gfVerbosity = Silent } +setVerbosity (Just "1") gf = Right $ gf { gfVerbosity = Normal } +setVerbosity (Just "2") gf = Right $ gf { gfVerbosity = Verbose } +setVerbosity (Just s) gf = Left ("Bad verbosity: " ++ show s) + +defaultGeneralFlags :: GeneralFlags +defaultGeneralFlags = GeneralFlags { + gfVerbosity = Normal, + gfLog = False + } + +parseGeneralFlags :: [String] -> Either [String] (GeneralFlags, [String]) +parseGeneralFlags args = case getOpt RequireOrder generalOpts args of + (fs, afterOpts, []) -> + case apply fs defaultGeneralFlags of + Left err -> Left [err] + Right flags -> Right (flags, afterOpts) + (fs, afterOpts, errs) -> + Left errs + where apply [] flags = Right flags + apply (f:fs) flags = case f flags of + Left err -> Left err + Right flags' -> apply fs flags + +------------------------------------------------------------- + hunk ./camp-bin/camp.cabal 28 + Camp.Options hunk ./camp-bin/Camp/Command/Add.hs 4 +import Camp.Options hunk ./camp-bin/Camp/Command/Add.hs 7 -add :: Log -> Repository -> [String] -> IO () -add _ _ [] = error "No arguments to add" -add _ r paths = do current <- readAdds r - writeAdds r (current ++ paths) +add :: GeneralFlags -> [String] -> IO () +add _ [] = error "No arguments to add" +add _ paths = do r <- getRepo + l <- startLog r + current <- readAdds r + writeAdds r (current ++ paths) + endLog r l hunk ./camp-bin/Camp/Command/Get.hs 4 +import Camp.Options hunk ./camp-bin/Camp/Command/Get.hs 8 -get :: Log -> Repository -> [String] -> IO () -get l localRepo [remoteRepoPath] +get :: GeneralFlags -> [String] -> IO () +get _ [remoteRepoPath] hunk ./camp-bin/Camp/Command/Get.hs 11 + + -- XXX By the time we are creating a local repo we should be + -- reasonably sure that there really is a remote repo + localRepo <- createRepo + l <- startLog localRepo + hunk ./camp-bin/Camp/Command/Get.hs 45 -get _ _ _ = error "XXX Bad arguments to get" + + endLog localRepo l +get _ _ = error "XXX Bad arguments to get" hunk ./camp-bin/Camp/Command/Init.hs 4 +import Camp.Options hunk ./camp-bin/Camp/Command/Init.hs 7 -initialise :: Log -> Repository -> [String] -> IO () -initialise _ r [] = initialiseRepo r -initialise _ _ _ = error "Unknown arguments to initialise" +initialise :: GeneralFlags -> [String] -> IO () +initialise _ [] = do r <- createRepo + l <- startLog r + initialiseRepo r + endLog r l +initialise _ _ = error "Unknown arguments to initialise" hunk ./camp-bin/Camp/Command/Inventory.hs 4 +import Camp.Options hunk ./camp-bin/Camp/Command/Inventory.hs 10 -inventory :: Log -> Repository -> [String] -> IO () -inventory _ r [] = do i <- readInventory r - let ns = [ n | InventoryItem n _ _ _ <- i ] - mapM_ (putStrLn . pprint) ns -inventory _ localRepo [remoteRepoPath] - = do remoteRepo <- mkRepo remoteRepoPath +inventory :: GeneralFlags -> [String] -> IO () +inventory _ [] = do r <- getRepo + l <- startLog r + i <- readInventory r + let ns = [ n | InventoryItem n _ _ _ <- i ] + mapM_ (putStrLn . pprint) ns + endLog r l +inventory _ [remoteRepoPath] + = do localRepo <- getRepo + l <- startLog localRepo + remoteRepo <- mkRepo remoteRepoPath hunk ./camp-bin/Camp/Command/Inventory.hs 34 -inventory _ _ _ = error "Bad arguments to inventory" + endLog localRepo l +inventory _ _ = error "Bad arguments to inventory" hunk ./camp-bin/Camp/Command/Pull.hs 5 +import Camp.Options hunk ./camp-bin/Camp/Command/Pull.hs 20 -pull :: Log -> Repository -> [String] -> IO () -pull l localRepo (remoteRepoPath : wantedPatches) +pull :: GeneralFlags -> [String] -> IO () +pull _ (remoteRepoPath : wantedPatches) hunk ./camp-bin/Camp/Command/Pull.hs 23 + localRepo <- getRepo + l <- startLog localRepo hunk ./camp-bin/Camp/Command/Pull.hs 66 -pull _ _ [] = error "XXX No arguments to pull" + endLog localRepo l +pull _ [] = error "XXX No arguments to pull" hunk ./camp-bin/Camp/Command/Record.hs 7 +import Camp.Options hunk ./camp-bin/Camp/Command/Record.hs 27 -record :: Log -> Repository -> [String] -> IO () -record _ r [] = do n <- genName r - recordName r n Nothing True -record _ r ["-a"] = do n <- genName r - recordName r n Nothing False -record _ r ["-a", "-m", short] = do n <- genName r - recordName r n (Just short) False -record _ r [n] = recordName r (Name Positive (BSC.pack n)) Nothing True -record _ r ["-a", n] = recordName r (Name Positive (BSC.pack n)) Nothing False -record _ _ _ = error "Unknown arguments to record" +record :: GeneralFlags -> [String] -> IO () +record _ [] = do r <- getRepo + l <- startLog r + n <- genName r + recordName r n Nothing True + endLog r l +record _ ["-a"] = do r <- getRepo + l <- startLog r + n <- genName r + recordName r n Nothing False + endLog r l +record _ ["-a", "-m", short] = do r <- getRepo + l <- startLog r + n <- genName r + recordName r n (Just short) False + endLog r l +record _ [n] = do r <- getRepo + l <- startLog r + recordName r (Name Positive (BSC.pack n)) Nothing True + endLog r l +record _ ["-a", n] = do r <- getRepo + l <- startLog r + recordName r (Name Positive (BSC.pack n)) Nothing False + endLog r l +record _ _ = error "Unknown arguments to record" hunk ./camp-bin/Camp/Command/Show.hs 4 +import Camp.Options hunk ./camp-bin/Camp/Command/Show.hs 9 -showC :: Log -> Repository -> [String] -> IO () -showC _ r [wanted] - = do inventory <- readInventory r +showC :: GeneralFlags -> [String] -> IO () +showC _ [wanted] + = do r <- getRepo + inventory <- readInventory r hunk ./camp-bin/Camp/Command/Show.hs 28 -showC _ _ _ = error "XXX show" +showC _ _ = error "XXX show" hunk ./camp-bin/Camp/Main.hs 12 -import Camp.Repository +import Camp.Options hunk ./camp-bin/Camp/Main.hs 79 - = do repo <- case args of - "get" : _ -> createRepo - "init" : _ -> createRepo - _ -> getRepo - l <- startLog repo args - doIt l repo args `catches` - [Handler $ \e -> throw (e :: ExitCode), - Handler $ \e -> do - logException l e - hPutStrLn stderr "Got an exception:" - hPutStrLn stderr $ show e] - endLog repo l + = doIt args `catches` + [Handler $ \e -> throw (e :: ExitCode), + Handler $ \e -> do + -- XXX This should be in the withLog function + -- logException l e + hPutStrLn stderr "Got an exception:" + hPutStrLn stderr $ show (e :: SomeException)] hunk ./camp-bin/Camp/Main.hs 87 -doIt :: Log -> Repository -> [String] -> IO () -doIt l r args - = case args of - "add" : args' -> add l r args' - "get" : args' -> get l r args' - "init" : args' -> initialise l r args' - "inventory" : args' -> inventory l r args' - "pull" : args' -> pull l r args' - "record" : args' -> record l r args' - "show" : args' -> showC l r args' - _ -> error "Unrecognised args" +doIt :: [String] -> IO () +doIt args + = case parseGeneralFlags args of + Right (gf, cmd : args') -> + case cmd of + "add" -> add gf args' + "get" -> get gf args' + "init" -> initialise gf args' + "inventory" -> inventory gf args' + "pull" -> pull gf args' + "record" -> record gf args' + "show" -> showC gf args' + _ -> error "Unrecognised command" -- XXX Give better errors + _ -> error "Unrecognised args" -- XXX Give better errors hunk ./camp-bin/Camp/Options.hs 2 -module Camp.Options where +module Camp.Options (GeneralFlags(..), parseGeneralFlags) where hunk ./camp-bin/Camp/Options.hs 34 -setVerbosity (Just s) gf = Left ("Bad verbosity: " ++ show s) +setVerbosity (Just s) _ = Left ("Bad verbosity: " ++ show s) hunk ./camp-bin/Camp/Options.hs 48 - (fs, afterOpts, errs) -> + (_, _, errs) -> hunk ./camp-bin/Camp/Options.hs 53 - Right flags' -> apply fs flags - -------------------------------------------------------------- + Right flags' -> apply fs flags' hunk ./camp-repository/Camp/Repository.hs 56 +import System.Environment hunk ./camp-repository/Camp/Repository.hs 316 -startLog :: Repository -> [String] -> IO Log -startLog r args - = do createDirectoryIfMissing False (logsDir r) +-- XXX Should use a withLog function instead +startLog :: Repository -> IO Log +startLog r + = do args <- getArgs + createDirectoryIfMissing False (logsDir r) move ./camp-bin/Camp/Options.hs ./camp-repository/Camp/Options.hs hunk ./camp-bin/Camp/Command/Add.hs 9 -add _ paths = do r <- getRepo - l <- startLog r - current <- readAdds r - writeAdds r (current ++ paths) - endLog r l +add gf paths = do r <- getRepo + withLog gf r $ \_ -> do + current <- readAdds r + writeAdds r (current ++ paths) hunk ./camp-bin/Camp/Command/Get.hs 9 -get _ [remoteRepoPath] +get gf [remoteRepoPath] hunk ./camp-bin/Camp/Command/Get.hs 15 - l <- startLog localRepo + withLog gf localRepo $ \l -> do + logRepo l "remote" remoteRepo + remoteInventory <- readInventoryLazily remoteRepo + patches <- getMegaPatches remoteRepo remoteInventory + initialiseRepo localRepo + (filename, offset) <- putMegaPatches localRepo patches + -- XXX We need to make a local copy rather than reading from + -- the remote twice + remoteInventory' <- readInventoryLazily remoteRepo + let relativeInventory = inventoryToCompactRelativeInventory + remoteInventory' + localInventory = relativeInventoryToInventory filename offset + relativeInventory + writeInventory localRepo localInventory + -- XXX This reading should actually check that the inventory + -- aligns correctly with the patch text, whereas the default + -- readMegaPatches will just parse the whole sequence. + -- Otherwise we should check at an earlier stage that the + -- inventory correctly points at the patch boundaries. hunk ./camp-bin/Camp/Command/Get.hs 35 - logRepo l "remote" remoteRepo - remoteInventory <- readInventoryLazily remoteRepo - patches <- getMegaPatches remoteRepo remoteInventory - initialiseRepo localRepo - (filename, offset) <- putMegaPatches localRepo patches - -- XXX We need to make a local copy rather than reading from - -- the remote twice - remoteInventory' <- readInventoryLazily remoteRepo - let relativeInventory = inventoryToCompactRelativeInventory - remoteInventory' - localInventory = relativeInventoryToInventory filename offset - relativeInventory - writeInventory localRepo localInventory - -- XXX This reading should actually check that the inventory - -- aligns correctly with the patch text, whereas the default - -- readMegaPatches will just parse the whole sequence. - -- Otherwise we should check at an earlier stage that the - -- inventory correctly points at the patch boundaries. + -- We read the inventory and patches twice to avoid a space leak + localInventory' <- readInventoryLazily localRepo + patches' <- readMegaPatches localRepo localInventory' + applyToPristine localRepo patches' hunk ./camp-bin/Camp/Command/Get.hs 40 - -- We read the inventory and patches twice to avoid a space leak - localInventory' <- readInventoryLazily localRepo - patches' <- readMegaPatches localRepo localInventory' - applyToPristine localRepo patches' - - -- Rather than applying all the patches again with - -- applyToWorking, we just copy the result from pristine. - -- This is normally much faster. - copyPristineToWorking localRepo - - endLog localRepo l + -- Rather than applying all the patches again with + -- applyToWorking, we just copy the result from pristine. + -- This is normally much faster. + copyPristineToWorking localRepo hunk ./camp-bin/Camp/Command/Init.hs 8 -initialise _ [] = do r <- createRepo - l <- startLog r - initialiseRepo r - endLog r l +initialise gf [] = do r <- createRepo + initialiseRepo r + withLog gf r $ \_ -> return () hunk ./camp-bin/Camp/Command/Inventory.hs 12 - l <- startLog r hunk ./camp-bin/Camp/Command/Inventory.hs 15 - endLog r l hunk ./camp-bin/Camp/Command/Inventory.hs 17 - l <- startLog localRepo hunk ./camp-bin/Camp/Command/Inventory.hs 31 - endLog localRepo l hunk ./camp-bin/Camp/Command/Pull.hs 21 -pull _ (remoteRepoPath : wantedPatches) +pull gf (remoteRepoPath : wantedPatches) hunk ./camp-bin/Camp/Command/Pull.hs 24 - l <- startLog localRepo - logRepo l "remote" remoteRepo - localInventory <- readInventory localRepo - remoteInventory <- readInventory remoteRepo - let localNames = [ n | InventoryItem n _ _ _ <- localInventory ] - remoteNames = [ n | InventoryItem n _ _ _ <- remoteInventory ] - localNameSet = Set.fromList localNames - remoteNameSet = Set.fromList remoteNames - commonNameSet = localNameSet `Set.intersection` remoteNameSet - -- We can skip over the prefix that is common to both repos - isCommon (InventoryItem n _ _ _) = n `Set.member` commonNameSet - localReadInventory = case span isCommon localInventory of - (_, x) -> x - remoteReadInventory = case span isCommon remoteInventory of - (_, x) -> x - -- XXX Should special-case remote = {} + withLog gf localRepo $ \l -> do + logRepo l "remote" remoteRepo + localInventory <- readInventory localRepo + remoteInventory <- readInventory remoteRepo + let localNames = [ n | InventoryItem n _ _ _ <- localInventory ] + remoteNames = [ n | InventoryItem n _ _ _ <- remoteInventory ] + localNameSet = Set.fromList localNames + remoteNameSet = Set.fromList remoteNames + commonNameSet = localNameSet `Set.intersection` remoteNameSet + -- We can skip over the prefix that is common to both repos + isCommon (InventoryItem n _ _ _) = n `Set.member` commonNameSet + localReadInventory = case span isCommon localInventory of + (_, x) -> x + remoteReadInventory = case span isCommon remoteInventory of + (_, x) -> x + -- XXX Should special-case remote = {} hunk ./camp-bin/Camp/Command/Pull.hs 41 - -- We need to commute the remainder so that it it partitioned - -- into the common patches, and those only in one of the repos - localReadPatches <- readMegaPatches localRepo localReadInventory - remoteReadPatches <- readMegaPatches remoteRepo remoteReadInventory - case commuteToPrefix commonNameSet localReadPatches of - -- XXX Should special-case local = {} - _ `Then` localPatches -> - case commuteToPrefix commonNameSet remoteReadPatches of - _ `Then` remotePatches -> - case wantedPatches of - [] -> - do res <- interactive describeMegaPatch remotePatches - case res of - remotePatches' `Then` _ -> + -- We need to commute the remainder so that it it partitioned + -- into the common patches, and those only in one of the repos + localReadPatches <- readMegaPatches localRepo localReadInventory + remoteReadPatches <- readMegaPatches remoteRepo remoteReadInventory + case commuteToPrefix commonNameSet localReadPatches of + -- XXX Should special-case local = {} + _ `Then` localPatches -> + case commuteToPrefix commonNameSet remoteReadPatches of + _ `Then` remotePatches -> + case wantedPatches of + [] -> + do res <- interactive describeMegaPatch remotePatches + case res of + remotePatches' `Then` _ -> + doPull localRepo localInventory localPatches remotePatches' + ["-a"] -> + do doPull localRepo localInventory localPatches remotePatches + _ -> + let wantedShortDescs = map BSC.pack wantedPatches + wantedNameSet = Set.fromList wantedShortDescs + in case tryCommuteToPrefix wantedNameSet remotePatches of + Just (remotePatches' `Then` _) -> hunk ./camp-bin/Camp/Command/Pull.hs 64 - ["-a"] -> - do doPull localRepo localInventory localPatches remotePatches - _ -> - let wantedShortDescs = map BSC.pack wantedPatches - wantedNameSet = Set.fromList wantedShortDescs - in case tryCommuteToPrefix wantedNameSet remotePatches of - Just (remotePatches' `Then` _) -> - doPull localRepo localInventory localPatches remotePatches' - Nothing -> - error "Can't pull those patches due to dependencies" - endLog localRepo l + Nothing -> + error "Can't pull those patches due to dependencies" hunk ./camp-bin/Camp/Command/Record.hs 28 -record _ [] = do r <- getRepo - l <- startLog r - n <- genName r - recordName r n Nothing True - endLog r l -record _ ["-a"] = do r <- getRepo - l <- startLog r - n <- genName r - recordName r n Nothing False - endLog r l -record _ ["-a", "-m", short] = do r <- getRepo - l <- startLog r - n <- genName r - recordName r n (Just short) False - endLog r l -record _ [n] = do r <- getRepo - l <- startLog r - recordName r (Name Positive (BSC.pack n)) Nothing True - endLog r l -record _ ["-a", n] = do r <- getRepo - l <- startLog r - recordName r (Name Positive (BSC.pack n)) Nothing False - endLog r l +record gf [] = do r <- getRepo + withLog gf r $ \_ -> do + n <- genName r + recordName r n Nothing True +record gf ["-a"] = do r <- getRepo + withLog gf r $ \_ -> do + n <- genName r + recordName r n Nothing False +record gf ["-a", "-m", short] = do r <- getRepo + withLog gf r $ \_ -> do + n <- genName r + recordName r n (Just short) False +record gf [n] = do r <- getRepo + withLog gf r $ \_ -> do + recordName r (Name Positive (BSC.pack n)) Nothing True +record gf ["-a", n] = do r <- getRepo + withLog gf r $ \_ -> do + recordName r (Name Positive (BSC.pack n)) Nothing False hunk ./camp-bin/camp.cabal 28 - Camp.Options hunk ./camp-repository/Camp/Repository.hs 30 + withLog, + -- XXX Get rid of these hunk ./camp-repository/Camp/Repository.hs 39 +import Camp.Options hunk ./camp-repository/Camp/Repository.hs 245 -initialiseRepo r = do -- We don't create things that startLog has already: - -- createDirectory logsDir - -- createDirectory repoRoot +initialiseRepo r = do createDirectory (logsDir r) + createDirectory (repoRoot r) hunk ./camp-repository/Camp/Repository.hs 304 -newtype Log = Log FilePath +data Log = Log FilePath + | NoLog hunk ./camp-repository/Camp/Repository.hs 312 +createLog NoLog = return () hunk ./camp-repository/Camp/Repository.hs 314 -logFile :: Log -> FilePath -> FilePath -logFile (Log fp) file = fp file +withLogFilePath :: Log -> FilePath -> (FilePath -> IO ()) -> IO () +withLogFilePath (Log fp) file f = f (fp file) +withLogFilePath NoLog _ _ = return () hunk ./camp-repository/Camp/Repository.hs 318 -logDirectory :: Log -> FilePath -> FilePath -logDirectory (Log fp) dir = fp dir +withLogDirectoryPath :: Log -> FilePath -> (FilePath -> IO ()) -> IO () +withLogDirectoryPath (Log fp) dir f = f (fp dir) +withLogDirectoryPath NoLog _ _ = return () + +withLog :: GeneralFlags -> Repository -> (Log -> IO a) -> IO a +withLog gf r f = if gfLog gf then bracket (startLog r) (endLog r) f + else f NoLog hunk ./camp-repository/Camp/Repository.hs 326 --- XXX Should use a withLog function instead hunk ./camp-repository/Camp/Repository.hs 329 - createDirectoryIfMissing False (logsDir r) - -- We create the repoRoot as otherwise copyRepo would have to - -- cope with it not existing - createDirectoryIfMissing False (repoRoot r) hunk ./camp-repository/Camp/Repository.hs 340 - writeBinaryFile (logFile l "args") (show args) + withLogFilePath l "args" $ \fp -> writeBinaryFile fp (show args) hunk ./camp-repository/Camp/Repository.hs 348 -logRepo l repoName r = copyRepo r (logDirectory l repoName) +logRepo l repoName r = withLogDirectoryPath l repoName $ \dp -> copyRepo r dp hunk ./camp-repository/Camp/Repository.hs 357 -logException l e = writeBinaryFile (logFile l "exception") (show e) +logException l e = withLogFilePath l "exception" + $ \fp -> writeBinaryFile fp (show e) hunk ./camp-repository/camp-repository.cabal 22 + Camp.Options hunk ./camp-bin/Camp/Command/Interactive.hs 2 -{-# OPTIONS -w #-} - hunk ./camp-bin/Camp/Command/Interactive.hs 4 -import Camp.Patch.Catch hunk ./camp-bin/Camp/Command/Interactive.hs 5 -import Camp.Patch.Equality -import Camp.Patch.MegaPatch -import Camp.Patch.Name -import Camp.Patch.Patch -import Camp.Patch.Pretty -import Camp.Patch.Primitive hunk ./camp-bin/Camp/Command/Interactive.hs 7 -import Camp.Repository -import Camp.Types -import Camp.Utils hunk ./camp-bin/Camp/Command/Interactive.hs 9 -import Data.ByteString.Lazy (ByteString) -import qualified Data.ByteString.Lazy as BS -import qualified Data.ByteString.Lazy.Char8 as BSC hunk ./camp-bin/Camp/Command/Interactive.hs 10 -import System.Directory hunk ./camp-bin/Camp/Command/Interactive.hs 11 -import System.FilePath hunk ./camp-bin/Camp/Command/Add.hs 4 +import Camp.Logging hunk ./camp-bin/Camp/Command/Get.hs 4 +import Camp.Logging hunk ./camp-bin/Camp/Command/Init.hs 4 +import Camp.Logging hunk ./camp-bin/Camp/Command/Pull.hs 5 +import Camp.Logging hunk ./camp-bin/Camp/Command/Record.hs 7 +import Camp.Logging addfile ./camp-repository/Camp/Logging.hs hunk ./camp-repository/Camp/Logging.hs 1 + +-- This is for logging (read: debugging) purposes only + +module Camp.Logging ( + Log, + withLog, + -- XXX Get rid of these + startLog, + endLog, + logRepo, + logException, + ) where + +import Camp.Options +import Camp.Repository +import Camp.Utils + +import Control.Exception +import System.Directory +import System.Environment +import System.FilePath + +import Prelude hiding (getContents) -- XXX + +data Log = Log FilePath + | NoLog + +mkLog :: Repository -> Integer -> Log +mkLog r i = Log (logsDir r show i) + +createLog :: Log -> IO () +createLog (Log fp) = createDirectory fp +createLog NoLog = return () + +withLogFilePath :: Log -> FilePath -> (FilePath -> IO ()) -> IO () +withLogFilePath (Log fp) file f = f (fp file) +withLogFilePath NoLog _ _ = return () + +withLogDirectoryPath :: Log -> FilePath -> (FilePath -> IO ()) -> IO () +withLogDirectoryPath (Log fp) dir f = f (fp dir) +withLogDirectoryPath NoLog _ _ = return () + +withLog :: GeneralFlags -> Repository -> (Log -> IO a) -> IO a +withLog gf r f = if gfLog gf then bracket (startLog r) (endLog r) f + else f NoLog + +startLog :: Repository -> IO Log +startLog r + = do args <- getArgs + let nlf = nextLogFile r + exists <- doesFileExist nlf + nextLog <- if exists + then do xs <- readBinaryFile nlf + case maybeRead xs of + Just nextLog -> return nextLog + Nothing -> panic "Couldn't read nextLog" + else return 1 + writeBinaryFile nlf (show (nextLog + 1)) + let l = mkLog r nextLog + createLog l + withLogFilePath l "args" $ \fp -> writeBinaryFile fp (show args) + logRepo l "before" r + return l + +endLog :: Repository -> Log -> IO () +endLog r l = logRepo l "after" r + +logRepo :: Log -> FilePath -> Repository -> IO () +logRepo l repoName r = withLogDirectoryPath l repoName $ \dp -> copyRepo r dp + +#if __GLASGOW_HASKELL__ >= 609 +type ExceptionType = SomeException +#else +type ExceptionType = Exception +#endif + +logException :: Log -> ExceptionType -> IO () +logException l e = withLogFilePath l "exception" + $ \fp -> writeBinaryFile fp (show e) + hunk ./camp-repository/Camp/Repository.hs 28 - -- For logging (read: debugging) only: - Log, - withLog, - -- XXX Get rid of these - startLog, - endLog, - logRepo, - logException, + copyRepo, + logsDir, + nextLogFile, hunk ./camp-repository/Camp/Repository.hs 34 -import Camp.Options hunk ./camp-repository/Camp/Repository.hs 42 -import Control.Exception hunk ./camp-repository/Camp/Repository.hs 52 -import System.Environment hunk ./camp-repository/Camp/Repository.hs 284 ---------------------------------------------------------------------- --- This is for logging (read: debugging) purposes only - -copyRepo :: Repository -> FilePath -> IO () -copyRepo r to = copyTree (repoRoot r) to - -logsDir :: Repository -> FilePath -logsDir r = repoBase r "logs" - hunk ./camp-repository/Camp/Repository.hs 287 -data Log = Log FilePath - | NoLog - -mkLog :: Repository -> Integer -> Log -mkLog r i = Log (logsDir r show i) - -createLog :: Log -> IO () -createLog (Log fp) = createDirectory fp -createLog NoLog = return () - -withLogFilePath :: Log -> FilePath -> (FilePath -> IO ()) -> IO () -withLogFilePath (Log fp) file f = f (fp file) -withLogFilePath NoLog _ _ = return () - -withLogDirectoryPath :: Log -> FilePath -> (FilePath -> IO ()) -> IO () -withLogDirectoryPath (Log fp) dir f = f (fp dir) -withLogDirectoryPath NoLog _ _ = return () - -withLog :: GeneralFlags -> Repository -> (Log -> IO a) -> IO a -withLog gf r f = if gfLog gf then bracket (startLog r) (endLog r) f - else f NoLog - -startLog :: Repository -> IO Log -startLog r - = do args <- getArgs - let nlf = nextLogFile r - exists <- doesFileExist nlf - nextLog <- if exists - then do xs <- readBinaryFile nlf - case maybeRead xs of - Just nextLog -> return nextLog - Nothing -> panic "Couldn't read nextLog" - else return 1 - writeBinaryFile nlf (show (nextLog + 1)) - let l = mkLog r nextLog - createLog l - withLogFilePath l "args" $ \fp -> writeBinaryFile fp (show args) - logRepo l "before" r - return l - -endLog :: Repository -> Log -> IO () -endLog r l = logRepo l "after" r - -logRepo :: Log -> FilePath -> Repository -> IO () -logRepo l repoName r = withLogDirectoryPath l repoName $ \dp -> copyRepo r dp - -#if __GLASGOW_HASKELL__ >= 609 -type ExceptionType = SomeException -#else -type ExceptionType = Exception -#endif +logsDir :: Repository -> FilePath +logsDir r = repoBase r "logs" hunk ./camp-repository/Camp/Repository.hs 290 -logException :: Log -> ExceptionType -> IO () -logException l e = withLogFilePath l "exception" - $ \fp -> writeBinaryFile fp (show e) +copyRepo :: Repository -> FilePath -> IO () +copyRepo r to = copyTree (repoRoot r) to hunk ./camp-repository/camp-repository.cabal 22 + Camp.Logging hunk ./camp-repository/Camp/Repository.hs 8 + appendInventoryItem, hunk ./camp-repository/Camp/Repository.hs 88 +-- XXX This ought to be atomic given the current repo format, but it isn't +appendInventoryItem :: Repository -> InventoryItem -> IO () +appendInventoryItem r i + = do h <- openBinaryFile (inventoryFile r) AppendMode + BS.hPutStr h (output i) + hClose h + hunk ./camp-repository/Camp/Record.hs 20 - -- XXX This is linear in the number of patches - is <- readInventory r - writeInventory r (is ++ [i]) + appendInventoryItem r i hunk ./camp-core/Camp/Patch/Stream.hs 2 -module Camp.Patch.Stream (Stream(..)) where +module Camp.Patch.Stream (Stream(..), Stream2(..)) where hunk ./camp-core/Camp/Patch/Stream.hs 10 -newtype Stream p from to = Stream (Seq p from to) +newtype Stream a = Stream [a] hunk ./camp-core/Camp/Patch/Stream.hs 12 -instance InputOutput2 p => InputOutput (Stream p from to) where +instance InputOutput a => InputOutput (Stream a) where hunk ./camp-core/Camp/Patch/Stream.hs 15 + then [] + else case input bs0 of + (x, bs1) -> + x : f bs1 + valid bs _ = if BS.null bs + then Right BS.empty + else case valid bs (undefined :: a) of + Right bs'' -> + valid bs'' (undefined :: Stream a) + Left err -> Left err + output (Stream []) = BS.empty + output (Stream (x : xs)) = output x `BS.append` output (Stream xs) + +----- + +newtype Stream2 p from to = Stream2 (Seq p from to) + +instance InputOutput2 p => InputOutput (Stream2 p from to) where + input bs = (Stream2 (f bs), BS.empty) + where f bs0 = if BS.null bs0 hunk ./camp-core/Camp/Patch/Stream.hs 43 - valid bs'' (undefined :: Stream p mid to) + valid bs'' (undefined :: Stream2 p mid to) hunk ./camp-core/Camp/Patch/Stream.hs 45 - output (Stream Nil) = BS.empty - output (Stream (x `Cons` xs)) = output2 x `BS.append` output (Stream xs) + output (Stream2 Nil) = BS.empty + output (Stream2 (x `Cons` xs)) = output2 x `BS.append` output (Stream2 xs) hunk ./camp-repository/Camp/Repository.hs 104 - return (fst $ input content) + case fst $ input content of + Stream is -> return is hunk ./camp-repository/Camp/Repository.hs 112 - return (fst $ input content) + case fst $ input content of + Stream is -> return is hunk ./camp-repository/Camp/Repository.hs 117 -writeInventory r ns = BS.writeFile (inventoryFile r) (output ns) +writeInventory r ns = BS.writeFile (inventoryFile r) (output (Stream ns)) hunk ./camp-repository/Camp/Repository.hs 177 - (Stream s, _{- "" -}) -> return s + (Stream2 s, _{- "" -}) -> return s hunk ./camp-repository/Camp/Repository.hs 196 --- XXX This should use a variant writeMegaPatch that takes the handle --- and start size hunk ./camp-repository/Camp/Repository.hs 198 -writeMegaPatches r (Cons p ps) = do i <- writeMegaPatch r p - is <- writeMegaPatches r ps - return (i : is) +writeMegaPatches r ps + = do let fp = patchFile r + h <- openBinaryFile fp AppendMode + startSize <- hFileSize h + is <- hWriteMegaPatches fp h startSize ps + hClose h + return is + +hWriteMegaPatches :: FilePath -> Handle -> Integer{- XXX Bytes -} -> Seq MegaPatch from to + -> IO [InventoryItem] +hWriteMegaPatches _ _ _ Nil = return [] +hWriteMegaPatches fp h startSize (Cons p ps) + = do (endSize, i) <- hWriteMegaPatch fp h startSize p + is <- hWriteMegaPatches fp h endSize ps + return (i : is) hunk ./camp-repository/Camp/Repository.hs 215 -writeMegaPatch r m@(MegaPatch n _ _) +writeMegaPatch r m hunk ./camp-repository/Camp/Repository.hs 219 - BS.hPut h $ output m + (_, i) <- hWriteMegaPatch fp h startSize m + hClose h + return i + +hWriteMegaPatch :: FilePath -> Handle -> Integer{- XXX Bytes -} + -> MegaPatch from to + -> IO (Integer{- XXX Bytes -}, InventoryItem) +hWriteMegaPatch fp h startSize m@(MegaPatch n _ _) + = do BS.hPut h $ output m hunk ./camp-repository/Camp/Repository.hs 230 - return (InventoryItem n (BSC.pack fp) startSize (endSize - 1)) + return (endSize, + InventoryItem n (BSC.pack fp) startSize (endSize - 1)) hunk ./camp-core/Camp/Utils.hs 3 - myLines, myUnlines, mySplitAt, maybeRead, maybeReads, hGetLazily, + myLines, myUnlines, mySplitAt, maybeRead, maybeReads, + hGetBytes, hGetLazily, hFileSizeBytes, hSeekBytes, hunk ./camp-core/Camp/Utils.hs 11 +import Camp.Types + hunk ./camp-core/Camp/Utils.hs 115 --- XXX We have an Int/Int64 problem here -hGetLazily :: Int -> Handle -> Int -> IO ByteString +-- XXX Int can be smaller than bytes... +hGetBytes :: Handle -> Bytes -> IO ByteString +hGetBytes h len = hGetBytes h (fromIntegral len) + +-- XXX Need an audit of checking that sizes really are what we asked for +hGetLazily :: Bytes -> Handle -> Bytes -> IO ByteString hunk ./camp-core/Camp/Utils.hs 125 - = do chunk <- BS.hGet h (min chunkSize n) - let n' = n - fromIntegral (BS.length chunk) + = do chunk <- BS.hGet h (fromIntegral (min chunkSize n)) + let n' = n - BS.length chunk hunk ./camp-core/Camp/Utils.hs 132 +hFileSizeBytes :: Handle -> IO Bytes +hFileSizeBytes h = do size <- hFileSize h + return $ fromIntegral size + +hSeekBytes :: Handle -> SeekMode -> Bytes -> IO () +hSeekBytes h sm to = hSeek h sm (fromIntegral to) + hunk ./camp-repository/Camp/Inventory.hs 12 +import Camp.Types hunk ./camp-repository/Camp/Inventory.hs 19 -data InventoryItem = InventoryItem Name ByteString{-FilePath/Filename-} Integer Integer +data InventoryItem = InventoryItem Name -- MegaPatch name + ByteString -- Filename (relative to + -- working directory) + Bytes -- Offset + Bytes -- Length hunk ./camp-repository/Camp/Inventory.hs 25 -data RelativeInventoryItem = RelativeInventoryItem Name Integer Integer +data RelativeInventoryItem = RelativeInventoryItem Name -- MegaPatch name + Bytes -- Offset + Bytes -- Length hunk ./camp-repository/Camp/Inventory.hs 37 - (to, bs4) -> - (InventoryItem n fp from to, bs4) + (len, bs4) -> + (InventoryItem n fp from len, bs4) hunk ./camp-repository/Camp/Inventory.hs 45 - case valid bs2 (undefined :: Integer) of + case valid bs2 (undefined :: Bytes) of hunk ./camp-repository/Camp/Inventory.hs 48 - valid bs3 (undefined :: Integer) - output (InventoryItem n fp from to) + valid bs3 (undefined :: Bytes) + output (InventoryItem n fp from len) hunk ./camp-repository/Camp/Inventory.hs 53 - output to + output len hunk ./camp-repository/Camp/Inventory.hs 55 --- XXX Both Integer's should be Bytes -data Content = Content ByteString{- FilePath -} Integer Integer +data Content = Content ByteString{- FilePath -} Bytes Bytes hunk ./camp-repository/Camp/Inventory.hs 57 +-- When we have an inventory like +-- name1 myfile 100 3 +-- name2 myfile 103 6 +-- we don't want to download bytes 100-102 and 103-108 separately, so +-- this function will merge them into a single content of 100-108. hunk ./camp-repository/Camp/Inventory.hs 64 -inventoryToContents (InventoryItem _ fp from to : is) +inventoryToContents (InventoryItem _ fp from len : is) hunk ./camp-repository/Camp/Inventory.hs 66 - f cur is'@(InventoryItem _ fp' from' to' : is'') - | (fp == fp') && (cur + 1 == from') = f to' is'' + f cur is'@(InventoryItem _ fp' from' len' : is'') + | (fp == fp') && (cur == from') = f (cur + len') is'' hunk ./camp-repository/Camp/Inventory.hs 69 - in f to is + in f (from + len) is hunk ./camp-repository/Camp/Inventory.hs 76 -inventoryToCompactRelativeInventoryFrom :: Integer{- XXX Bytes -} +inventoryToCompactRelativeInventoryFrom :: Bytes hunk ./camp-repository/Camp/Inventory.hs 80 -inventoryToCompactRelativeInventoryFrom cur (InventoryItem n _ from to : is) - = let to' = cur + (to - from) - in RelativeInventoryItem n cur to' : - inventoryToCompactRelativeInventoryFrom (to' + 1) is +inventoryToCompactRelativeInventoryFrom cur (InventoryItem n _ _ len : is) + = RelativeInventoryItem n cur len : + inventoryToCompactRelativeInventoryFrom (cur + len) is hunk ./camp-repository/Camp/Inventory.hs 84 -relativeInventoryToInventory :: ByteString{- XXX FilePath -} -> Integer{- XXX Bytes -} +relativeInventoryToInventory :: ByteString{- XXX FilePath -} -> Bytes hunk ./camp-repository/Camp/Inventory.hs 88 -relativeInventoryToInventory fp offset (RelativeInventoryItem n from to : is) - = InventoryItem n fp (from + offset) (to + offset) +relativeInventoryToInventory fp offset (RelativeInventoryItem n from len : is) + = InventoryItem n fp (from + offset) len hunk ./camp-repository/Camp/Repository.hs 41 +import Camp.Types hunk ./camp-repository/Camp/Repository.hs 136 - -> IO (ByteString{- FilePath -}, Integer{- XXX Bytes -}) + -> IO (ByteString{- FilePath -}, Bytes) hunk ./camp-repository/Camp/Repository.hs 140 - startSize <- hFileSize h + startSize <- hFileSizeBytes h hunk ./camp-repository/Camp/Repository.hs 157 -getContent (Repository r) (Content fp from to) +getContent (Repository r) (Content fp from len) hunk ./camp-repository/Camp/Repository.hs 160 - hSeek h AbsoluteSeek from - content <- hGetLazily BS.defaultChunkSize + hSeekBytes h AbsoluteSeek from + content <- hGetLazily (fromIntegral BS.defaultChunkSize) hunk ./camp-repository/Camp/Repository.hs 163 - (fromIntegral (1 + to - from)) + len hunk ./camp-repository/Camp/Repository.hs 165 -{- - = do -- putStrLn ("Getting content: XXX") -- XXX Proper logging please - h <- openBinaryFile (r BSC.unpack fp) ReadMode - hSeek h AbsoluteSeek from - content <- BS.hGet h (fromIntegral (1 + to - from)) - hClose h - return content --} hunk ./camp-repository/Camp/Repository.hs 181 -readMegaPatch (Repository r) (InventoryItem _ fp from to) +readMegaPatch (Repository r) (InventoryItem _ fp from len) hunk ./camp-repository/Camp/Repository.hs 183 - hSeek h AbsoluteSeek from - content <- BS.hGet h (fromIntegral (1 + to - from)) + hSeekBytes h AbsoluteSeek from + content <- hGetBytes h len hunk ./camp-repository/Camp/Repository.hs 194 - startSize <- hFileSize h + startSize <- hFileSizeBytes h hunk ./camp-repository/Camp/Repository.hs 199 -hWriteMegaPatches :: FilePath -> Handle -> Integer{- XXX Bytes -} -> Seq MegaPatch from to +hWriteMegaPatches :: FilePath -> Handle -> Bytes -> Seq MegaPatch from to hunk ./camp-repository/Camp/Repository.hs 211 - startSize <- hFileSize h + startSize <- hFileSizeBytes h hunk ./camp-repository/Camp/Repository.hs 216 -hWriteMegaPatch :: FilePath -> Handle -> Integer{- XXX Bytes -} - -> MegaPatch from to - -> IO (Integer{- XXX Bytes -}, InventoryItem) +hWriteMegaPatch :: FilePath -> Handle -> Bytes -> MegaPatch from to + -> IO (Bytes, InventoryItem) hunk ./camp-repository/Camp/Repository.hs 220 - endSize <- hFileSize h + endSize <- hFileSizeBytes h + let len = 1 + endSize - startSize hunk ./camp-repository/Camp/Repository.hs 223 - return (endSize, - InventoryItem n (BSC.pack fp) startSize (endSize - 1)) + return (endSize, InventoryItem n (BSC.pack fp) startSize len) hunk ./camp-bin/Camp/Command/Get.hs 8 +import Control.Exception as Exception +import System.Directory +import System.FilePath +import System.IO.Error + hunk ./camp-bin/Camp/Command/Get.hs 20 + d <- newDirectory $ takeFileName + $ dropTrailingPathSeparator remoteRepoPath + setCurrentDirectory d hunk ./camp-bin/Camp/Command/Get.hs 55 +-- XXX Should sanity check the filepath, e.g. not "", "/foo" or "foo/bar" +-- XXX Should we also check for non-printable-ascii chars or something? +newDirectory :: FilePath -> IO FilePath +newDirectory fp = do createDirectory fp + return fp + `whenAlreadyExists` f 0 + where f i = do let fp' = fp ++ "_" ++ show i + createDirectory fp' + return fp' + `whenAlreadyExists` + f (i + 1) + +whenAlreadyExists :: IO a -> IO a -> IO a +whenAlreadyExists f g = f `Exception.catch` \e -> if isAlreadyExistsError e + then g + else throw e + hunk ./camp-repository/Camp/Repository.hs 44 -import Control.Monad hunk ./camp-repository/Camp/Repository.hs 247 - let r = Repository d - exists <- doesDirectoryExist (repoBase r) - unless exists $ do - -- XXX make a proper logging function - hPutStrLn stderr "You aren't in a repo!" - exitFailure - return r + f d + where f d = do let r = Repository d + exists <- doesDirectoryExist (repoBase r) + if exists + then return r + else do let d' = takeDirectory d + if d' == d + then -- XXX make a proper failure function + do hPutStrLn stderr "You aren't in a repo!" + exitFailure + else f d' hunk ./camp-repository/Camp/Options.hs 2 -module Camp.Options (GeneralFlags(..), parseGeneralFlags) where +module Camp.Options ( + GeneralFlags(..), parseGeneralFlags, + RecordFlags(..), parseRecordFlags, + ) where hunk ./camp-repository/Camp/Options.hs 9 +type ChangeFlags a = (GeneralFlags, a) -> Either String (GeneralFlags, a) + hunk ./camp-repository/Camp/Options.hs 18 -type ChangeGeneralFlags = GeneralFlags -> Either String GeneralFlags hunk ./camp-repository/Camp/Options.hs 23 -generalOpts :: [OptDescr ChangeGeneralFlags] +generalOpts :: [OptDescr (ChangeFlags a)] hunk ./camp-repository/Camp/Options.hs 25 - Option ['v'] ["verbose"] (OptArg setVerbosity "Verbosity") "verbosity", - Option ['l'] ["log"] (NoArg (setLog True)) "logging", - Option [] ["no-log"] (NoArg (setLog False)) "no logging" + Option ['v'] ["verbose"] (OptArg setGfVerbosity "Verbosity") "verbosity", + Option ['l'] ["log"] (NoArg (setGfLog True)) "logging", + Option [] ["no-log"] (NoArg (setGfLog False)) "no logging" hunk ./camp-repository/Camp/Options.hs 30 -setLog :: Bool -> ChangeGeneralFlags -setLog b gf = Right $ gf { gfLog = b } +setGfLog :: Bool -> ChangeFlags a +setGfLog b (gf, x) = Right (gf { gfLog = b }, x) hunk ./camp-repository/Camp/Options.hs 33 -setVerbosity :: Maybe String -> ChangeGeneralFlags -setVerbosity Nothing gf = Right $ gf { gfVerbosity = Verbose } -setVerbosity (Just "0") gf = Right $ gf { gfVerbosity = Silent } -setVerbosity (Just "1") gf = Right $ gf { gfVerbosity = Normal } -setVerbosity (Just "2") gf = Right $ gf { gfVerbosity = Verbose } -setVerbosity (Just s) _ = Left ("Bad verbosity: " ++ show s) +setGfVerbosity :: Maybe String -> ChangeFlags a +setGfVerbosity Nothing (gf, x) = Right (gf { gfVerbosity = Verbose }, x) +setGfVerbosity (Just "0") (gf, x) = Right (gf { gfVerbosity = Silent }, x) +setGfVerbosity (Just "1") (gf, x) = Right (gf { gfVerbosity = Normal }, x) +setGfVerbosity (Just "2") (gf, x) = Right (gf { gfVerbosity = Verbose }, x) +setGfVerbosity (Just s) _ = Left ("Bad verbosity: " ++ show s) hunk ./camp-repository/Camp/Options.hs 49 - case apply fs defaultGeneralFlags of + case apply fs (defaultGeneralFlags, ()) of hunk ./camp-repository/Camp/Options.hs 51 - Right flags -> Right (flags, afterOpts) + Right (flags, ()) -> Right (flags, afterOpts) hunk ./camp-repository/Camp/Options.hs 59 +---------------------------------------------------------------------- + +type ChangeRecordFlags = ChangeFlags RecordFlags +data RecordFlags = RecordFlags { + rfAll :: Bool, + rfMessage :: Maybe String + } + +recordOpts :: [OptDescr ChangeRecordFlags] +recordOpts = [ + Option ['a'] ["all"] (NoArg (setRfAll True)) "all", + Option ['m'] ["message"] (ReqArg setRfMessage "Message") "message" + ] + +setRfAll :: Bool -> ChangeRecordFlags +setRfAll b (gf, rf) = Right (gf, rf { rfAll = b }) + +setRfMessage :: String -> ChangeRecordFlags +setRfMessage msg (gf, rf) = Right (gf, rf { rfMessage = Just msg }) + +defaultRecordFlags :: RecordFlags +defaultRecordFlags = RecordFlags { + rfAll = False, + rfMessage = Nothing + } + +parseRecordFlags :: [String] -> GeneralFlags + -> Either [String] (GeneralFlags, RecordFlags, [String]) +parseRecordFlags args gf = case getOpt RequireOrder (recordOpts ++ generalOpts) args of + (fs, afterOpts, []) -> + case apply fs (gf, defaultRecordFlags) of + Left err -> Left [err] + Right (gf', rf) -> Right (gf', rf, afterOpts) + (_, _, errs) -> + Left errs + where apply [] flags = Right flags + apply (f:fs) flags = case f flags of + Left err -> Left err + Right flags' -> apply fs flags' + hunk ./camp-bin/Camp/Command/Record.hs 12 -import Camp.Patch.Name hunk ./camp-bin/Camp/Command/Record.hs 22 +import System.Exit hunk ./camp-bin/Camp/Command/Record.hs 29 -record gf [] = do r <- getRepo - withLog gf r $ \_ -> do - n <- genName r - recordName r n Nothing True -record gf ["-a"] = do r <- getRepo - withLog gf r $ \_ -> do - n <- genName r - recordName r n Nothing False -record gf ["-a", "-m", short] = do r <- getRepo - withLog gf r $ \_ -> do - n <- genName r - recordName r n (Just short) False -record gf [n] = do r <- getRepo - withLog gf r $ \_ -> do - recordName r (Name Positive (BSC.pack n)) Nothing True -record gf ["-a", n] = do r <- getRepo - withLog gf r $ \_ -> do - recordName r (Name Positive (BSC.pack n)) Nothing False -record _ _ = error "Unknown arguments to record" +record gf args = case parseRecordFlags args gf of + Right (gf', rf, []) -> + record' gf' rf + Right (_, _, xs) -> + do mapM_ (hPutStrLn stderr . ("Bad arg: " ++ ) . show) xs + exitFailure + Left errs -> + do mapM_ (hPutStrLn stderr) errs + exitFailure hunk ./camp-bin/Camp/Command/Record.hs 39 -recordName :: Repository -> Name -> Maybe String -> Bool -> IO () -recordName r n m chooseInteractively = do +record' :: GeneralFlags -> RecordFlags -> IO () +record' gf rf = do + r <- getRepo + withLog gf r $ \_ -> do + n <- genName r hunk ./camp-bin/Camp/Command/Record.hs 47 - primitives' <- if chooseInteractively - then do wanted `Then` _ <- interactive pprint primitives + primitives' <- if rfAll rf + then return primitives + else do wanted `Then` _ <- interactive pprint primitives hunk ./camp-bin/Camp/Command/Record.hs 51 - else return primitives hunk ./camp-bin/Camp/Command/Record.hs 57 - (short, long) <- case m of - Just x -> return (BSC.pack x, BS.empty) + (short, long) <- case rfMessage rf of + -- XXX Should we allow '\n' in rfMessage, and unlines + -- it to get the long message? + Just msg -> return (BSC.pack msg, BS.empty) hunk ./camp-repository/Camp/Options.hs 76 +-- XXX Check for \n's? Or unlines it and use them for the long message? hunk ./camp-bin/Camp/Command/Interactive.hs 12 -#ifdef WINDOWS -import System.Posix.Internals -#endif hunk ./camp-bin/Camp/Command/Interactive.hs 19 -#ifdef WINDOWS - -- XXX Hack for Windows - stdinEcho <- getEcho 0 -#endif hunk ./camp-bin/Camp/Command/Interactive.hs 21 -#ifdef WINDOWS - -- XXX Hack for Windows - setEcho 0 False - setCooked 0 False -#endif hunk ./camp-bin/Camp/Command/Interactive.hs 24 -#ifdef WINDOWS - -- XXX Hack for Windows - setEcho 0 stdinEcho - -- XXX We can't find the cooked state before, so hope - -- it's the same as the edit state? - setCooked 0 stdinEcho -#endif hunk ./camp-bin/Camp/Command/Get.hs 35 - localInventory = relativeInventoryToInventory filename offset - relativeInventory + localInventory = compactRelativeInventoryToInventory + filename offset relativeInventory hunk ./camp-bin/Camp/Command/Get.hs 38 - -- XXX This reading should actually check that the inventory - -- aligns correctly with the patch text, whereas the default - -- readMegaPatches will just parse the whole sequence. - -- Otherwise we should check at an earlier stage that the - -- inventory correctly points at the patch boundaries. hunk ./camp-bin/Camp/Command/Get.hs 39 - -- We read the inventory and patches twice to avoid a space leak - localInventory' <- readInventoryLazily localRepo - patches' <- readMegaPatches localRepo localInventory' - applyToPristine localRepo patches' + -- The remote repo may not be correctly formed, so we need + -- to check that each element of the inventory actually + -- points to a syntactically valid patch. Just reading the + -- patches when we apply them won't do it, as we read the + -- whole contents and then read it as a list of patches, so + -- the boundary between patches may be in the wrong place. + checkInventory localRepo + + localInventoryApply <- readInventoryLazily localRepo + patchesApply <- readMegaPatches localRepo localInventoryApply + applyToPristine localRepo patchesApply hunk ./camp-repository/Camp/Inventory.hs 4 + CompactRelativeInventoryItem(..), hunk ./camp-repository/Camp/Inventory.hs 6 - relativeInventoryToInventory, + compactRelativeInventoryToInventory, hunk ./camp-repository/Camp/Inventory.hs 26 -data RelativeInventoryItem = RelativeInventoryItem Name -- MegaPatch name - Bytes -- Offset - Bytes -- Length +data CompactRelativeInventoryItem + = CompactRelativeInventoryItem Name -- MegaPatch name + Bytes -- Length hunk ./camp-repository/Camp/Inventory.hs 73 - -> [RelativeInventoryItem] -inventoryToCompactRelativeInventory = inventoryToCompactRelativeInventoryFrom 0 + -> [CompactRelativeInventoryItem] +inventoryToCompactRelativeInventory [] = [] +inventoryToCompactRelativeInventory (InventoryItem n _ _ len : is) + = CompactRelativeInventoryItem n len : + inventoryToCompactRelativeInventory is hunk ./camp-repository/Camp/Inventory.hs 79 --- XXX Make this local to the above -inventoryToCompactRelativeInventoryFrom :: Bytes - -> [InventoryItem] - -> [RelativeInventoryItem] -inventoryToCompactRelativeInventoryFrom _ [] = [] -inventoryToCompactRelativeInventoryFrom cur (InventoryItem n _ _ len : is) - = RelativeInventoryItem n cur len : - inventoryToCompactRelativeInventoryFrom (cur + len) is - -relativeInventoryToInventory :: ByteString{- XXX FilePath -} -> Bytes - -> [RelativeInventoryItem] - -> [InventoryItem] -relativeInventoryToInventory _ _ [] = [] -relativeInventoryToInventory fp offset (RelativeInventoryItem n from len : is) - = InventoryItem n fp (from + offset) len - : relativeInventoryToInventory fp offset is +compactRelativeInventoryToInventory :: ByteString{- XXX FilePath -} -> Bytes + -> [CompactRelativeInventoryItem] + -> [InventoryItem] +compactRelativeInventoryToInventory _ _ [] = [] +compactRelativeInventoryToInventory fp offset + (CompactRelativeInventoryItem n len : is) + = InventoryItem n fp offset len + : compactRelativeInventoryToInventory fp (offset + len) is hunk ./camp-repository/Camp/Repository.hs 12 + checkInventory, hunk ./camp-repository/Camp/Repository.hs 15 - relativeInventoryToInventory, + compactRelativeInventoryToInventory, hunk ./camp-repository/Camp/Repository.hs 45 +import Control.Monad hunk ./camp-repository/Camp/Repository.hs 121 +-- Check that each element of the inventory actually +-- points to a syntactically valid patch. Just reading the +-- patches and validating them won't do it, as it doesn't check +-- the boundary between patches are in the right place. +checkInventory :: Repository -> IO () +checkInventory r = do -- Read inventory twice to avoid a spaceleak + i1 <- readInventoryLazily r + i2 <- readInventoryLazily r + let ri = inventoryToCompactRelativeInventory i1 + bs <- getMegaPatches r i2 + check ri bs + where check [] bs = unless (BS.null bs) $ error "XXX too many bytes" + check (CompactRelativeInventoryItem _ len : is) bs + -- XXX This should be a splitAt that complains if there aren't + -- enough bytes + = case BS.splitAt len bs of + (this, rest) -> + case valid this (undefined :: MegaPatch from to) of + Left (err, _) -> error ("XXX checkInventory " ++ err) + Right bs' -> + do unless (BS.null bs') $ error "XXX too many bytes" + check is rest + hunk ./camp-bin/Camp/Command/Get.hs 4 +import Camp.Inventory hunk ./camp-bin/Camp/Command/Inventory.hs 4 +import Camp.Inventory hunk ./camp-bin/Camp/Command/Pull.hs 5 +import Camp.Inventory hunk ./camp-bin/Camp/Command/Show.hs 4 +import Camp.Inventory hunk ./camp-repository/Camp/Repository.hs 13 - InventoryItem(..), - inventoryToCompactRelativeInventory, - compactRelativeInventoryToInventory, hunk ./camp-bin/Camp/Command/Get.hs 28 - patches <- getMegaPatches remoteRepo remoteInventory + patches <- getMegaPatches gf remoteRepo remoteInventory hunk ./camp-bin/Camp/Command/Get.hs 46 - checkInventory localRepo + checkInventory gf localRepo hunk ./camp-bin/Camp/Command/Get.hs 49 - patchesApply <- readMegaPatches localRepo localInventoryApply + patchesApply <- readMegaPatches gf localRepo localInventoryApply hunk ./camp-bin/Camp/Command/Pull.hs 45 - localReadPatches <- readMegaPatches localRepo localReadInventory - remoteReadPatches <- readMegaPatches remoteRepo remoteReadInventory + localReadPatches <- readMegaPatches gf localRepo localReadInventory + remoteReadPatches <- readMegaPatches gf remoteRepo remoteReadInventory addfile ./camp-repository/Camp/Messages.hs hunk ./camp-repository/Camp/Messages.hs 1 + +module Camp.Messages ( + msg + ) where + +import Camp.Options + +import Control.Monad + +msg :: GeneralFlags -> Verbosity -> String -> IO () +msg gf v str = when (v >= gfVerbosity gf) $ putStrLn str + hunk ./camp-repository/Camp/Options.hs 5 + Verbosity(..) -- XXX Should this be in another module? hunk ./camp-repository/Camp/Options.hs 18 + deriving (Eq, Ord) hunk ./camp-repository/Camp/Repository.hs 33 +import Camp.Messages +import Camp.Options hunk ./camp-repository/Camp/Repository.hs 124 -checkInventory :: Repository -> IO () -checkInventory r = do -- Read inventory twice to avoid a spaceleak - i1 <- readInventoryLazily r - i2 <- readInventoryLazily r - let ri = inventoryToCompactRelativeInventory i1 - bs <- getMegaPatches r i2 - check ri bs +checkInventory :: GeneralFlags -> Repository -> IO () +checkInventory gf r + = do -- Read inventory twice to avoid a spaceleak + i1 <- readInventoryLazily r + i2 <- readInventoryLazily r + let ri = inventoryToCompactRelativeInventory i1 + bs <- getMegaPatches gf r i2 + check ri bs hunk ./camp-repository/Camp/Repository.hs 169 -getMegaPatches :: Repository -> [InventoryItem] -> IO ByteString -getMegaPatches r is = getContents r (inventoryToContents is) +getMegaPatches :: GeneralFlags -> Repository -> [InventoryItem] + -> IO ByteString +getMegaPatches gf r is = getContents gf r (inventoryToContents is) hunk ./camp-repository/Camp/Repository.hs 173 -getContents :: Repository -> [Content] -> IO ByteString -getContents _ [] = return BS.empty -getContents r (c : cs) = do x <- getContent r c - xs <- unsafeInterleaveIO $ getContents r cs - return (x `BS.append` xs) +getContents :: GeneralFlags -> Repository -> [Content] -> IO ByteString +getContents _ _ [] = return BS.empty +getContents gf r (c : cs) = do x <- getContent gf r c + xs <- unsafeInterleaveIO $ getContents gf r cs + return (x `BS.append` xs) hunk ./camp-repository/Camp/Repository.hs 181 -getContent :: Repository -> Content -> IO ByteString -getContent (Repository r) (Content fp from len) - = do -- putStrLn ("Getting content: XXX") -- XXX Proper logging please +getContent :: GeneralFlags -> Repository -> Content -> IO ByteString +getContent gf (Repository r) (Content fp from len) + = do msg gf Verbose "Getting content: XXX" hunk ./camp-repository/Camp/Repository.hs 191 -readMegaPatches :: forall from to . - Repository -> [InventoryItem] -> IO (Seq MegaPatch from to) -readMegaPatches r ns = do content <- getMegaPatches r ns - case input content of - (Stream2 s, _{- "" -}) -> return s +readMegaPatches :: GeneralFlags -> Repository -> [InventoryItem] + -> IO (Seq MegaPatch from to) +readMegaPatches gf r ns = do content <- getMegaPatches gf r ns + case input content of + (Stream2 s, _{- "" -}) -> return s hunk ./camp-repository/camp-repository.cabal 23 + Camp.Messages hunk ./camp-bin/Camp/Main.hs 82 - -- XXX This should be in the withLog function - -- logException l e hunk ./camp-repository/Camp/Logging.hs 11 - logException, hunk ./camp-repository/Camp/Logging.hs 17 -import Control.Exception +import Control.Exception as Exception hunk ./camp-repository/Camp/Logging.hs 43 -withLog gf r f = if gfLog gf then bracket (startLog r) (endLog r) f - else f NoLog +withLog gf r f + = if gfLog gf + then bracket (startLog r) (endLog r) $ + \l -> f l `Exception.catch` \e -> do logException l e + throw e + else f NoLog hunk ./camp-repository/Camp/Logging.hs 7 - -- XXX Get rid of these - startLog, - endLog, hunk ./camp-repository/Camp/Logging.hs 19 -import Prelude hiding (getContents) -- XXX - hunk ./camp-repository/Camp/Inventory.hs 19 --- XXX Curently this is start/end. Should it be start/length? hunk ./camp-bin/Camp/Main.hs 31 - "XXX This is out of date", - "", hunk ./camp-bin/Camp/Main.hs 33 - "This is not intended to be a useful application. Rather, it is a", - "prototype of a new patch theory, similar to that used in darcs.", - "", - "This is not designed to be a robust application. If you do things", - "like ask camp to add a file that doesn't exist to your repo,", - "then it'll probably rudely crash. Directories are not supported,", - "only files. None of the commands are interactive at all.", - "", - "You certainly wouldn't want to use this for a real project.", - "", - "However, if missing features etc are getting in the way of working", - "on and testing the patch theory, let me know.", - "", - "The following commands are supported:", - "", - " init Initialise a repository", + "WARNING:", + "This is not yet suitable for use yet! Do not trust your data to it!", hunk ./camp-bin/Camp/Main.hs 36 - " add file_1 ... file_n Mark file_1 ... file_n as to be added", - " to the repository on the next 'record'", + "Not all the below exist yet. Some extra commands may exist.", hunk ./camp-bin/Camp/Main.hs 38 - " record Record all pending adds, hunks for any", - " changed files already in the repo, and", - " remove any files that are no longer in", - " the working directory", + "Help", + " help Use \"help cmd\" for help on the cmd command", hunk ./camp-bin/Camp/Main.hs 41 - " inventory List the patch names in this repository", + "Repository admin:", + " initialise optimise check repair getpref setpref", hunk ./camp-bin/Camp/Main.hs 44 - " inventory /repo/path/ Show the list of patch names in this", - " repo and not in /repo/path/, and", - " vice-versa", + "Inter-repo commands:", + " get put pull push send", hunk ./camp-bin/Camp/Main.hs 47 - " pull /repo/path/ Pull all the patches from /repo/path/", + "Commands that affect pending changes:", + " add remove move revert unrevert", hunk ./camp-bin/Camp/Main.hs 50 - " pull /repo/path/ name_1 ... name_n", - " Pull only the patches called name_1 ...", - " name_n from /repo/path/", + "Commands that affect recorded patches:", + " record unrecord amend-record tag obliterate rollback", + "", + " mark-conflicts", hunk ./camp-bin/Camp/Main.hs 55 - " Note: If you have local changes then pulling may go wrong.", - " It is not meant to handle them.", + "Viewing the repo state:", + " whatsnew annotate inventory show", hunk ./camp-bin/Camp/Main.hs 48 - " add remove move revert unrevert", + " add remove move revert unrevert mark-conflicts", hunk ./camp-bin/Camp/Main.hs 53 - " mark-conflicts", - "", addfile ./camp-bin/Camp/Die.hs hunk ./camp-bin/Camp/Die.hs 1 + +module Camp.Die (die) where + +import System.Environment +import System.Exit +import System.IO + +die :: String -> IO a +die err = do progName <- getProgName + hPutStrLn stderr (progName ++ ": " ++ err) + exitFailure + hunk ./camp-bin/Camp/Main.hs 12 +import Camp.Die hunk ./camp-bin/Camp/Main.hs 23 -main = do args <- getArgs - if null args - then usageInfo - else -- XXX For now we always initialise curl - withGlobalCurl $ doSomething args +main = doIt `catches` + [Handler $ \e -> throw (e :: ExitCode), + Handler $ \e -> do + die ("Panic! Got an unhandled exception!\n" + ++ "Exception details:\n" + ++ show (e :: SomeException))] hunk ./camp-bin/Camp/Main.hs 30 +doIt :: IO () +doIt = do args <- getArgs + case args of + [] -> usageInfo + _ -> case parseGeneralFlags args of + Right (gf, cmd : args') -> + withGlobalCurl $ + case cmd of + "add" -> add gf args' + "get" -> get gf args' + "init" -> initialise gf args' + "inventory" -> inventory gf args' + "pull" -> pull gf args' + "record" -> record gf args' + "show" -> showC gf args' + _ -> die ("Unrecognised command: " ++ show cmd) + _ -> + do progName <- getProgName + die ("Couldn't find a command. Run\n" + ++ " " ++ progName ++ " help\n" + ++ "for usage information") + +-- XXX When we have a help command, this will be part of it hunk ./camp-bin/Camp/Main.hs 83 -doSomething :: [String] -> IO () -doSomething args - = doIt args `catches` - [Handler $ \e -> throw (e :: ExitCode), - Handler $ \e -> do - hPutStrLn stderr "Got an exception:" - hPutStrLn stderr $ show (e :: SomeException)] - -doIt :: [String] -> IO () -doIt args - = case parseGeneralFlags args of - Right (gf, cmd : args') -> - case cmd of - "add" -> add gf args' - "get" -> get gf args' - "init" -> initialise gf args' - "inventory" -> inventory gf args' - "pull" -> pull gf args' - "record" -> record gf args' - "show" -> showC gf args' - _ -> error "Unrecognised command" -- XXX Give better errors - _ -> error "Unrecognised args" -- XXX Give better errors - hunk ./camp-bin/camp.cabal 28 + Camp.Die hunk ./camp-bin/Camp/Curl.hsc 16 +import Camp.Die + hunk ./camp-bin/Camp/Curl.hsc 50 - -- XXX Make a nicer exception - error "Curl init failed" + die "Curl init failed" hunk ./camp-bin/Camp/Curl.hsc 107 - -- XXX Should make nicer exceptions - let str' = "Curl code " ++ show cc ++ ": " ++ str - error str' + die ("Curl failed with code " ++ show cc ++ ": " ++ str) hunk ./camp-bin/Camp/Command/Get.hs 14 --- XXX Should make a directory hunk ./camp-repository/Camp/Messages.hs 11 -msg gf v str = when (v >= gfVerbosity gf) $ putStrLn str +msg gf v str = when (gfVerbosity gf >= v) $ putStrLn str hunk ./camp-repository/Camp/Inventory.hs 62 +-- XXX Should getContents be the one doing the joining? hunk ./camp-repository/Camp/Repository.hs 183 - = do msg gf Verbose "Getting content: XXX" + = do msg gf Verbose ("Getting content: file " ++ show fp + ++ ", offset " ++ show from + ++ ", length " ++ show len) hunk ./camp-repository/Camp/Repository.hs 188 - content <- hGetLazily (fromIntegral BS.defaultChunkSize) - h - len + content <- hGetLazily (fromIntegral BS.defaultChunkSize) h len hunk ./camp-repository/Camp/Repository.hs 246 - let len = 1 + endSize - startSize + let len = endSize - startSize hunk ./camp-core/Camp/Patch/InputOutput.hs 2 -module Camp.Patch.InputOutput (InputOutput(..), InputOutput2(..)) where +module Camp.Patch.InputOutput ( + InputOutput(..), InputOutput2(..), thenValid + ) where hunk ./camp-core/Camp/Patch/InputOutput.hs 35 +thenValid :: InputOutput a => Either (String, ByteString) ByteString -> a + -> Either (String, ByteString) ByteString +Left err `thenValid` _ = Left err +Right bs `thenValid` x = valid bs x + hunk ./camp-core/Camp/Patch/Primitive.hs 102 - valid bs1 (undefined :: ByteString) + valid bs1 (undefined :: InRepoFileName) hunk ./camp-core/Camp/Patch/Primitive.hs 105 - valid bs1 (undefined :: ByteString) + valid bs1 (undefined :: InRepoFileName) hunk ./camp-core/Camp/Patch/Primitive.hs 108 - case valid bs1 (undefined :: ByteString) of + case valid bs1 (undefined :: InRepoFileName) of hunk ./camp-core/Camp/Patch/Primitive.hs 111 - valid bs2 (undefined :: ByteString) + valid bs2 (undefined :: InRepoFileName) hunk ./camp-core/Camp/Patch/Primitive.hs 114 - valid bs1 (undefined :: ByteString) + valid bs1 (undefined :: InRepoFileName) hunk ./camp-core/Camp/Patch/Primitive.hs 117 - valid bs1 (undefined :: ByteString) + valid bs1 (undefined :: InRepoFileName) hunk ./camp-core/Camp/Patch/Primitive.hs 120 - case valid bs1 (undefined :: ByteString) of + case valid bs1 (undefined :: InRepoFileName) of hunk ./camp-core/Camp/Patch/Primitive.hs 123 - valid bs2 (undefined :: ByteString) + valid bs2 (undefined :: InRepoFileName) hunk ./camp-core/Camp/Patch/Primitive.hs 125 - Just (6, _) -> - error "XXX valid/Hunk not written yet" + Just (6, bs') -> + valid bs' (undefined :: InRepoFileName) + `thenValid` (undefined :: Bytes) + `thenValid` (undefined :: Line) + `thenValid` (undefined :: ByteString) + `thenValid` (undefined :: Line) + `thenValid` (undefined :: ByteString) + `thenValid` (undefined :: Line) hunk ./camp-core/Camp/Patch/Primitive.hs 134 - Just (7, _) -> - error "XXX valid/Binary not written yet" + Just (7, bs') -> + valid bs' (undefined :: InRepoFileName) + `thenValid` (undefined :: ByteString) + `thenValid` (undefined :: ByteString) hunk ./camp-core/Camp/Patch/MegaPatch.hs 19 +import Camp.Utils hunk ./camp-core/Camp/Patch/MegaPatch.hs 31 - input bs0 = case input bs0 of - (n, bs1) -> + input bs0 = case stripPrefixBS megaPatchHeader bs0 of + Nothing -> error "XXX Can't happen: No MegaPatch header" + Just bs1 -> hunk ./camp-core/Camp/Patch/MegaPatch.hs 35 - (mi, bs2) -> + (n, bs2) -> hunk ./camp-core/Camp/Patch/MegaPatch.hs 37 - (cs, bs3) -> - (MegaPatch n mi cs, bs3) - valid bs0 _ = case valid bs0 (undefined :: Name) of - Left err -> Left err - Right bs1 -> - case valid bs1 (undefined :: MetaInfo) of - Left err -> Left err - Right bs2 -> valid bs2 (undefined :: Seq Catch from to) - output (MegaPatch n mi cs) = output n `BS.append` + (mi, bs3) -> + case input bs3 of + (cs, bs4) -> + (MegaPatch n mi cs, bs4) + valid bs _ = case stripPrefixBS megaPatchHeader bs of + Nothing -> Left ("No MegaPatch header", bs) + Just bs' -> + valid bs' (undefined :: Name) + `thenValid` (undefined :: MetaInfo) + `thenValid` (undefined :: Seq Catch from to) + output (MegaPatch n mi cs) = megaPatchHeader `BS.append` + output n `BS.append` hunk ./camp-core/Camp/Patch/MegaPatch.hs 52 +megaPatchHeader :: ByteString +megaPatchHeader = BSC.pack "\nMegaPatch\n" + hunk ./camp-core/Camp/Utils.hs 4 + stripPrefixBS, hunk ./camp-core/Camp/Utils.hs 60 +stripPrefixBS :: ByteString -> ByteString -> Maybe ByteString +stripPrefixBS = f + where f prefix bs = case BS.uncons prefix of + Nothing -> Just bs + Just (p, prefix') -> + case BS.uncons bs of + Just (b, bs') + | p == b -> + f prefix' bs' + _ -> Nothing + hunk ./camp-repository/Camp/Inventory.hs 2 +-- XXX Work around GHC warning bugs: +{-# OPTIONS_GHC -fno-warn-unused-imports #-} + hunk ./camp-repository/Camp/Inventory.hs 17 +import Camp.Utils hunk ./camp-repository/Camp/Inventory.hs 21 +import qualified Data.ByteString.Lazy.Char8 as BSC hunk ./camp-repository/Camp/Inventory.hs 35 - input bs0 = case input bs0 of - (n, bs1) -> + input bs0 = case stripPrefixBS inventoryItemHeader bs0 of + Nothing -> error "XXX Can't happen: No InventoryItem header" + Just bs1 -> hunk ./camp-repository/Camp/Inventory.hs 39 - (fp, bs2) -> + (n, bs2) -> hunk ./camp-repository/Camp/Inventory.hs 41 - (from, bs3) -> + (fp, bs3) -> hunk ./camp-repository/Camp/Inventory.hs 43 - (len, bs4) -> - (InventoryItem n fp from len, bs4) - valid bs0 _ = case valid bs0 (undefined :: Name) of - Left err -> Left err - Right bs1 -> - case valid bs1 (undefined :: ByteString) of - Left err -> Left err - Right bs2 -> - case valid bs2 (undefined :: Bytes) of - Left err -> Left err - Right bs3 -> - valid bs3 (undefined :: Bytes) + (from, bs4) -> + case input bs4 of + (len, bs5) -> + (InventoryItem n fp from len, bs5) + valid bs _ = case stripPrefixBS inventoryItemHeader bs of + Nothing -> Left ("No InventoryItem header", bs) + Just bs' -> + valid bs' (undefined :: Name) + `thenValid` (undefined :: ByteString) + `thenValid` (undefined :: Bytes) + `thenValid` (undefined :: Bytes) hunk ./camp-repository/Camp/Inventory.hs 55 - = output n `BS.append` + = inventoryItemHeader `BS.append` + output n `BS.append` hunk ./camp-repository/Camp/Inventory.hs 61 +inventoryItemHeader :: ByteString +inventoryItemHeader = BSC.pack "\nInventoryItem\n" + hunk ./camp-repository/Camp/Inventory.hs 75 - = let f cur [] = [Content fp from cur] + = let f cur [] = [Content fp from (cur - from)] hunk ./camp-repository/Camp/Inventory.hs 78 - | otherwise = Content fp from cur : inventoryToContents is' + | otherwise = Content fp from (cur - from) : inventoryToContents is' hunk ./camp-repository/Camp/Inventory.hs 14 +import Camp.InRepoFileName hunk ./camp-repository/Camp/Inventory.hs 23 +import System.FilePath hunk ./camp-repository/Camp/Inventory.hs 26 -data InventoryItem = InventoryItem Name -- MegaPatch name - ByteString -- Filename (relative to - -- working directory) - Bytes -- Offset - Bytes -- Length +data InventoryItem = InventoryItem Name -- MegaPatch name + InRepoFileName -- Filename (relative to + -- working directory) + Bytes -- Offset + Bytes -- Length hunk ./camp-repository/Camp/Inventory.hs 53 - `thenValid` (undefined :: ByteString) + `thenValid` (undefined :: InRepoFileName) hunk ./camp-repository/Camp/Inventory.hs 66 -data Content = Content ByteString{- FilePath -} Bytes Bytes +data Content = Content FilePath Bytes Bytes hunk ./camp-repository/Camp/Inventory.hs 74 -inventoryToContents :: [InventoryItem] -> [Content] -inventoryToContents [] = [] -inventoryToContents (InventoryItem _ fp from len : is) - = let f cur [] = [Content fp from (cur - from)] - f cur is'@(InventoryItem _ fp' from' len' : is'') - | (fp == fp') && (cur == from') = f (cur + len') is'' - | otherwise = Content fp from (cur - from) : inventoryToContents is' +inventoryToContents :: FilePath -> [InventoryItem] -> [Content] +inventoryToContents _ [] = [] +inventoryToContents root (InventoryItem _ fn from len : is) + = let f cur [] = [Content (root toFilePath fn) from (cur - from)] + f cur is'@(InventoryItem _ fn' from' len' : is'') + | (fn == fn') && (cur == from') = f (cur + len') is'' + | otherwise = Content (root toFilePath fn) from (cur - from) + : inventoryToContents root is' hunk ./camp-repository/Camp/Inventory.hs 91 -compactRelativeInventoryToInventory :: ByteString{- XXX FilePath -} -> Bytes +compactRelativeInventoryToInventory :: InRepoFileName -> Bytes hunk ./camp-repository/Camp/Repository.hs 32 +import Camp.InRepoFileName hunk ./camp-repository/Camp/Repository.hs 154 -patchesDir :: Repository -> FilePath -patchesDir r = repoRoot r "patches" +-- XXX This should be composed from other InRepoFileName's +patchesDir :: InRepoFileName +patchesDir = fromString "_camp/repo/patches" hunk ./camp-repository/Camp/Repository.hs 158 -patchFile :: Repository -> FilePath -patchFile r = patchesDir r "patchFile" +patchFile :: InRepoFileName +patchFile = fromString (toFilePath patchesDir "patchFile") + +inRepo :: Repository -> InRepoFileName -> FilePath +inRepo (Repository r) fn = r toFilePath fn hunk ./camp-repository/Camp/Repository.hs 165 - -> IO (ByteString{- FilePath -}, Bytes) + -> IO (InRepoFileName, Bytes) hunk ./camp-repository/Camp/Repository.hs 167 - = do let fp = patchFile r + = do let fp = inRepo r patchFile hunk ./camp-repository/Camp/Repository.hs 172 - return (BSC.pack fp, startSize) + return (patchFile, startSize) hunk ./camp-repository/Camp/Repository.hs 176 -getMegaPatches gf r is = getContents gf r (inventoryToContents is) +getMegaPatches gf (Repository r) is = getContents gf (inventoryToContents r is) hunk ./camp-repository/Camp/Repository.hs 178 -getContents :: GeneralFlags -> Repository -> [Content] -> IO ByteString -getContents _ _ [] = return BS.empty -getContents gf r (c : cs) = do x <- getContent gf r c - xs <- unsafeInterleaveIO $ getContents gf r cs - return (x `BS.append` xs) +getContents :: GeneralFlags -> [Content] -> IO ByteString +getContents _ [] = return BS.empty +getContents gf (c : cs) = do x <- getContent gf c + xs <- unsafeInterleaveIO $ getContents gf cs + return (x `BS.append` xs) hunk ./camp-repository/Camp/Repository.hs 186 -getContent :: GeneralFlags -> Repository -> Content -> IO ByteString -getContent gf (Repository r) (Content fp from len) +getContent :: GeneralFlags -> Content -> IO ByteString +getContent gf (Content fp from len) hunk ./camp-repository/Camp/Repository.hs 191 - h <- openBinaryFile (r BSC.unpack fp) ReadMode + h <- openBinaryFile fp ReadMode hunk ./camp-repository/Camp/Repository.hs 211 -readMegaPatch (Repository r) (InventoryItem _ fp from len) - = do h <- openBinaryFile (r BSC.unpack fp) ReadMode +readMegaPatch r (InventoryItem _ fn from len) + = do h <- openBinaryFile (inRepo r fn) ReadMode hunk ./camp-repository/Camp/Repository.hs 222 - = do let fp = patchFile r + = do let fp = inRepo r patchFile hunk ./camp-repository/Camp/Repository.hs 225 - is <- hWriteMegaPatches fp h startSize ps + is <- hWriteMegaPatches patchFile h startSize ps hunk ./camp-repository/Camp/Repository.hs 229 -hWriteMegaPatches :: FilePath -> Handle -> Bytes -> Seq MegaPatch from to +hWriteMegaPatches :: InRepoFileName -> Handle -> Bytes -> Seq MegaPatch from to hunk ./camp-repository/Camp/Repository.hs 232 -hWriteMegaPatches fp h startSize (Cons p ps) - = do (endSize, i) <- hWriteMegaPatch fp h startSize p - is <- hWriteMegaPatches fp h endSize ps +hWriteMegaPatches pf h startSize (Cons p ps) + = do (endSize, i) <- hWriteMegaPatch pf h startSize p + is <- hWriteMegaPatches pf h endSize ps hunk ./camp-repository/Camp/Repository.hs 239 - = do let fp = patchFile r + = do let fp = inRepo r patchFile hunk ./camp-repository/Camp/Repository.hs 242 - (_, i) <- hWriteMegaPatch fp h startSize m + (_, i) <- hWriteMegaPatch patchFile h startSize m hunk ./camp-repository/Camp/Repository.hs 246 -hWriteMegaPatch :: FilePath -> Handle -> Bytes -> MegaPatch from to +hWriteMegaPatch :: InRepoFileName -> Handle -> Bytes -> MegaPatch from to hunk ./camp-repository/Camp/Repository.hs 248 -hWriteMegaPatch fp h startSize m@(MegaPatch n _ _) +hWriteMegaPatch pf h startSize m@(MegaPatch n _ _) hunk ./camp-repository/Camp/Repository.hs 253 - return (endSize, InventoryItem n (BSC.pack fp) startSize len) + return (endSize, InventoryItem n pf startSize len) hunk ./camp-repository/Camp/Repository.hs 294 - createDirectory (patchesDir r) + createDirectory (inRepo r patchesDir) hunk ./camp-bin/Camp/Command/Pull.hs 7 +import Camp.Messages hunk ./camp-bin/Camp/Command/Pull.hs 27 + msg gf Verbose "Opening log" hunk ./camp-bin/Camp/Command/Pull.hs 29 + msg gf Verbose "Logging remote repo" hunk ./camp-bin/Camp/Command/Pull.hs 31 + msg gf Verbose "Reading local inventory" hunk ./camp-bin/Camp/Command/Pull.hs 33 + msg gf Verbose "Reading remote inventory" hunk ./camp-bin/Camp/Command/Pull.hs 62 - doPull localRepo localInventory localPatches remotePatches' + doPull gf localRepo localInventory localPatches remotePatches' hunk ./camp-bin/Camp/Command/Pull.hs 64 - do doPull localRepo localInventory localPatches remotePatches + do doPull gf localRepo localInventory localPatches remotePatches hunk ./camp-bin/Camp/Command/Pull.hs 70 - doPull localRepo localInventory localPatches remotePatches' + doPull gf localRepo localInventory localPatches remotePatches' hunk ./camp-bin/Camp/Command/Pull.hs 80 -doPull :: Repository -> [InventoryItem] +doPull :: GeneralFlags -> Repository -> [InventoryItem] hunk ./camp-bin/Camp/Command/Pull.hs 83 -doPull localRepo localInventory localPatches remotePatches = +doPull gf localRepo localInventory localPatches remotePatches = hunk ./camp-bin/Camp/Command/Pull.hs 89 + msg gf Verbose "Writing patches" hunk ./camp-bin/Camp/Command/Pull.hs 92 + msg gf Verbose "Applying patches to pristine" hunk ./camp-bin/Camp/Command/Pull.hs 94 + msg gf Verbose "Applying patches to working" hunk ./camp-bin/Camp/Command/Pull.hs 96 + msg gf Verbose "Writing inventory" hunk ./camp-repository/Camp/Repository.hs 252 - hClose h hunk ./camp-bin/Camp/Command/Get.hs 6 +import Camp.Messages hunk ./camp-bin/Camp/Command/Get.hs 23 + msg gf Normal ("Creating repository in " ++ show d) hunk ./camp-bin/Camp/Command/Add.hs 10 -add gf paths = do r <- getRepo +add gf paths = withLockedRepoSearch $ \r -> hunk ./camp-bin/Camp/Command/Get.hs 17 - = do remoteRepo <- mkRepo remoteRepoPath + = withUnlockedRepo remoteRepoPath $ \remoteRepo -> do + -- XXX By the time we are creating a local repo we should be + -- reasonably sure that there really is a remote repo + d <- newDirectory $ takeFileName + $ dropTrailingPathSeparator remoteRepoPath + msg gf Normal ("Creating repository in " ++ show d) + setCurrentDirectory d + withLockedRepoCreate $ \localRepo -> + withLog gf localRepo $ \l -> do + logRepo l "remote" remoteRepo + remoteInventory <- readInventoryLazily remoteRepo + patches <- getMegaPatches gf remoteRepo remoteInventory + initialiseRepo localRepo + (filename, offset) <- putMegaPatches localRepo patches + -- XXX We need to make a local copy rather than reading from + -- the remote twice + remoteInventory' <- readInventoryLazily remoteRepo + let relativeInventory = inventoryToCompactRelativeInventory + remoteInventory' + localInventory = compactRelativeInventoryToInventory + filename offset relativeInventory + writeInventory localRepo localInventory hunk ./camp-bin/Camp/Command/Get.hs 40 - -- XXX By the time we are creating a local repo we should be - -- reasonably sure that there really is a remote repo - d <- newDirectory $ takeFileName - $ dropTrailingPathSeparator remoteRepoPath - msg gf Normal ("Creating repository in " ++ show d) - setCurrentDirectory d - localRepo <- createRepo - withLog gf localRepo $ \l -> do - logRepo l "remote" remoteRepo - remoteInventory <- readInventoryLazily remoteRepo - patches <- getMegaPatches gf remoteRepo remoteInventory - initialiseRepo localRepo - (filename, offset) <- putMegaPatches localRepo patches - -- XXX We need to make a local copy rather than reading from - -- the remote twice - remoteInventory' <- readInventoryLazily remoteRepo - let relativeInventory = inventoryToCompactRelativeInventory - remoteInventory' - localInventory = compactRelativeInventoryToInventory - filename offset relativeInventory - writeInventory localRepo localInventory + -- The remote repo may not be correctly formed, so we need + -- to check that each element of the inventory actually + -- points to a syntactically valid patch. Just reading the + -- patches when we apply them won't do it, as we read the + -- whole contents and then read it as a list of patches, so + -- the boundary between patches may be in the wrong place. + checkInventory gf localRepo hunk ./camp-bin/Camp/Command/Get.hs 48 - -- The remote repo may not be correctly formed, so we need - -- to check that each element of the inventory actually - -- points to a syntactically valid patch. Just reading the - -- patches when we apply them won't do it, as we read the - -- whole contents and then read it as a list of patches, so - -- the boundary between patches may be in the wrong place. - checkInventory gf localRepo + localInventoryApply <- readInventoryLazily localRepo + patchesApply <- readMegaPatches gf localRepo localInventoryApply + applyToPristine localRepo patchesApply hunk ./camp-bin/Camp/Command/Get.hs 52 - localInventoryApply <- readInventoryLazily localRepo - patchesApply <- readMegaPatches gf localRepo localInventoryApply - applyToPristine localRepo patchesApply - - -- Rather than applying all the patches again with - -- applyToWorking, we just copy the result from pristine. - -- This is normally much faster. - copyPristineToWorking localRepo + -- Rather than applying all the patches again with + -- applyToWorking, we just copy the result from pristine. + -- This is normally much faster. + copyPristineToWorking localRepo hunk ./camp-bin/Camp/Command/Init.hs 9 -initialise gf [] = do r <- createRepo - initialiseRepo r - withLog gf r $ \_ -> return () +initialise gf [] = withLockedRepoCreate $ \r -> do + initialiseRepo r + withLog gf r $ \_ -> return () hunk ./camp-bin/Camp/Command/Inventory.hs 12 -inventory _ [] = do r <- getRepo - i <- readInventory r - let ns = [ n | InventoryItem n _ _ _ <- i ] - mapM_ (putStrLn . pprint) ns +inventory _ [] = withLockedRepoSearch $ \r -> do + i <- readInventory r + let ns = [ n | InventoryItem n _ _ _ <- i ] + mapM_ (putStrLn . pprint) ns hunk ./camp-bin/Camp/Command/Inventory.hs 17 - = do localRepo <- getRepo - remoteRepo <- mkRepo remoteRepoPath - localInventory <- readInventory localRepo - remoteInventory <- readInventory remoteRepo - let localNames = [ n | InventoryItem n _ _ _ <- localInventory ] - remoteNames = [ n | InventoryItem n _ _ _ <- remoteInventory ] - localNameSet = Set.fromList localNames - remoteNameSet = Set.fromList remoteNames - localOnly = localNameSet `Set.difference` remoteNameSet - remoteOnly = remoteNameSet `Set.difference` localNameSet - putStrLn "Here only:" - mapM_ (putStrLn . pprint) $ Set.toList localOnly - putStrLn "" - putStrLn "There only:" - mapM_ (putStrLn . pprint) $ Set.toList remoteOnly + = withUnlockedRepoSearch $ \localRepo -> + withUnlockedRepo remoteRepoPath $ \remoteRepo -> do + localInventory <- readInventory localRepo + remoteInventory <- readInventory remoteRepo + let localNames = [ n | InventoryItem n _ _ _ <- localInventory ] + remoteNames = [ n | InventoryItem n _ _ _ <- remoteInventory ] + localNameSet = Set.fromList localNames + remoteNameSet = Set.fromList remoteNames + localOnly = localNameSet `Set.difference` remoteNameSet + remoteOnly = remoteNameSet `Set.difference` localNameSet + putStrLn "Here only:" + mapM_ (putStrLn . pprint) $ Set.toList localOnly + putStrLn "" + putStrLn "There only:" + mapM_ (putStrLn . pprint) $ Set.toList remoteOnly hunk ./camp-bin/Camp/Command/Pull.hs 25 - = do remoteRepo <- mkRepo remoteRepoPath - localRepo <- getRepo + = withLockedRepoSearch $ \localRepo -> + withUnlockedRepo remoteRepoPath $ \remoteRepo -> do hunk ./camp-bin/Camp/Command/Pull.hs 80 -doPull :: GeneralFlags -> Repository -> [InventoryItem] +doPull :: GeneralFlags -> LockedRepository -> [InventoryItem] hunk ./camp-bin/Camp/Command/Record.hs 6 -import Camp.InRepoFileName as InRepoFileName +import qualified Camp.InRepoFileName as InRepoFileName hunk ./camp-bin/Camp/Command/Record.hs 40 -record' gf rf = do - r <- getRepo +record' gf rf = + withLockedRepoSearch $ \r -> hunk ./camp-bin/Camp/Command/Record.hs 44 - directory <- recordDirectory (pristineDir r) "." + directory <- recordDirectory (inRepo r pristineDir) "." hunk ./camp-bin/Camp/Command/Record.hs 73 -recordAdds :: Repository -> IO (Seq Primitive from to) +recordAdds :: LockedRepository -> IO (Seq Primitive from to) hunk ./camp-bin/Camp/Command/Show.hs 12 - = do r <- getRepo + = withUnlockedRepoSearch $ \r -> do hunk ./camp-core/Camp/InRepoFileName.hs 5 - (InRepoFileName, fromString, fromByteString, toFilePath) + (InRepoFileName, fromString, fromByteString, toFilePath, ()) hunk ./camp-core/Camp/InRepoFileName.hs 11 -import System.FilePath +import qualified System.FilePath as FP hunk ./camp-core/Camp/InRepoFileName.hs 36 -fromString fp = let fp' = normalise fp +fromString fp = let fp' = FP.normalise fp hunk ./camp-core/Camp/InRepoFileName.hs 45 +() :: InRepoFileName -> InRepoFileName -> InRepoFileName +InRepoFileName x InRepoFileName y = InRepoFileName (x FP. y) + hunk ./camp-repository/Camp/Inventory.hs 14 -import Camp.InRepoFileName +import Camp.InRepoFileName (InRepoFileName) +import qualified Camp.InRepoFileName as InRepoFileName hunk ./camp-repository/Camp/Inventory.hs 78 - = let f cur [] = [Content (root toFilePath fn) from (cur - from)] + = let f cur [] = [Content (root InRepoFileName.toFilePath fn) from (cur - from)] hunk ./camp-repository/Camp/Inventory.hs 81 - | otherwise = Content (root toFilePath fn) from (cur - from) + | otherwise = Content (root InRepoFileName.toFilePath fn) from (cur - from) hunk ./camp-repository/Camp/Logging.hs 22 -mkLog :: Repository -> Integer -> Log -mkLog r i = Log (logsDir r show i) +mkLog :: LockedRepository -> Integer -> Log +mkLog r i = Log (inRepo r logsDir show i) hunk ./camp-repository/Camp/Logging.hs 37 -withLog :: GeneralFlags -> Repository -> (Log -> IO a) -> IO a +withLog :: GeneralFlags -> LockedRepository -> (Log -> IO a) -> IO a hunk ./camp-repository/Camp/Logging.hs 45 -startLog :: Repository -> IO Log +startLog :: LockedRepository -> IO Log hunk ./camp-repository/Camp/Logging.hs 48 - let nlf = nextLogFile r + let nlf = inRepo r nextLogFile hunk ./camp-repository/Camp/Logging.hs 63 -endLog :: Repository -> Log -> IO () +endLog :: LockedRepository -> Log -> IO () hunk ./camp-repository/Camp/Logging.hs 66 -logRepo :: Log -> FilePath -> Repository -> IO () +logRepo :: Repository r => Log -> FilePath -> r -> IO () hunk ./camp-repository/Camp/Record.hs 12 -recordMegaPatch :: Repository -> Name -> MetaInfo -> Seq Primitive from to +recordMegaPatch :: LockedRepository -> Name -> MetaInfo + -> Seq Primitive from to hunk ./camp-repository/Camp/Repository.hs 3 + LockedRepository, + UnlockedRepository, hunk ./camp-repository/Camp/Repository.hs 6 - createRepo, - mkRepo, - getRepo, + withLockedRepo, + withLockedRepoCreate, + withLockedRepoSearch, + withUnlockedRepo, + withUnlockedRepoSearch, hunk ./camp-repository/Camp/Repository.hs 34 + inRepo, hunk ./camp-repository/Camp/Repository.hs 50 +import Control.Exception hunk ./camp-repository/Camp/Repository.hs 59 +import Data.IORef hunk ./camp-repository/Camp/Repository.hs 63 -import System.FilePath +import qualified System.FilePath as FilePath hunk ./camp-repository/Camp/Repository.hs 70 -newtype Repository = Repository String +data LockedRepository = LockedRepository + String -- The repository location + +data UnlockedRepository = UnlockedRepository + String -- The repository location + (IORef (Maybe FilePath)) -- The inventory + (IORef (Maybe FilePath)) -- The patches + +class Repository r where + repoLocation :: r -> String + +instance Repository LockedRepository where + repoLocation (LockedRepository loc) = loc + +instance Repository UnlockedRepository where + repoLocation (UnlockedRepository loc _ _) = loc hunk ./camp-repository/Camp/Repository.hs 88 -repoBase :: Repository -> FilePath -repoBase (Repository r) = r "_camp" +repoBase :: InRepoFileName +repoBase = fromString "_camp" hunk ./camp-repository/Camp/Repository.hs 91 -repoRoot :: Repository -> FilePath -repoRoot r = repoBase r "repo" +repoRoot :: InRepoFileName +repoRoot = repoBase fromString "repo" hunk ./camp-repository/Camp/Repository.hs 94 -inventoryFile :: Repository -> FilePath -inventoryFile r = repoRoot r "inventory" +inventoryFile :: InRepoFileName +inventoryFile = repoRoot fromString "inventory" hunk ./camp-repository/Camp/Repository.hs 97 -addsFile :: Repository -> FilePath -addsFile r = repoRoot r "adds" +addsFile :: InRepoFileName +addsFile = repoRoot fromString "adds" hunk ./camp-repository/Camp/Repository.hs 100 -prefsDir :: Repository -> FilePath -prefsDir r = repoRoot r "prefs" +prefsDir :: InRepoFileName +prefsDir = repoRoot fromString "prefs" hunk ./camp-repository/Camp/Repository.hs 103 -authorFile :: Repository -> FilePath -authorFile r = prefsDir r "author" +authorFile :: InRepoFileName +authorFile = prefsDir fromString "author" hunk ./camp-repository/Camp/Repository.hs 106 -pristineDir :: Repository -> FilePath -pristineDir r = repoRoot r "pristine" +pristineDir :: InRepoFileName +pristineDir = repoRoot fromString "pristine" hunk ./camp-repository/Camp/Repository.hs 109 -workingDir :: Repository -> FilePath -workingDir (Repository r) = r +workingDir :: InRepoFileName +workingDir = fromString "" hunk ./camp-repository/Camp/Repository.hs 113 -appendInventoryItem :: Repository -> InventoryItem -> IO () +appendInventoryItem :: LockedRepository -> InventoryItem -> IO () hunk ./camp-repository/Camp/Repository.hs 115 - = do h <- openBinaryFile (inventoryFile r) AppendMode + = do h <- openBinaryFile (inRepo r inventoryFile) AppendMode hunk ./camp-repository/Camp/Repository.hs 121 -readInventory :: Repository -> IO [InventoryItem] +readInventory :: Repository r => r -> IO [InventoryItem] hunk ./camp-repository/Camp/Repository.hs 123 - = do h <- openBinaryFile (inventoryFile r) ReadMode + = do h <- openBinaryFile (inRepo r inventoryFile) ReadMode hunk ./camp-repository/Camp/Repository.hs 132 -readInventoryLazily :: Repository -> IO [InventoryItem] +readInventoryLazily :: Repository r => r -> IO [InventoryItem] hunk ./camp-repository/Camp/Repository.hs 134 - = do content <- BS.readFile (inventoryFile r) + = do content <- BS.readFile (inRepo r inventoryFile) hunk ./camp-repository/Camp/Repository.hs 140 -writeInventory :: Repository -> [InventoryItem] -> IO () -writeInventory r ns = BS.writeFile (inventoryFile r) (output (Stream ns)) +writeInventory :: LockedRepository -> [InventoryItem] -> IO () +writeInventory r ns = BS.writeFile (inRepo r inventoryFile) + (output (Stream ns)) hunk ./camp-repository/Camp/Repository.hs 148 -checkInventory :: GeneralFlags -> Repository -> IO () +checkInventory :: Repository r => GeneralFlags -> r -> IO () hunk ./camp-repository/Camp/Repository.hs 168 -readAdds :: Repository -> IO [FilePath] -readAdds r = do content <- readBinaryFile (addsFile r) +readAdds :: Repository r => r -> IO [FilePath] +readAdds r = do content <- readBinaryFile (inRepo r addsFile) hunk ./camp-repository/Camp/Repository.hs 174 -writeAdds :: Repository -> [FilePath] -> IO () -writeAdds r ns = writeBinaryFile (addsFile r) (show ns) +writeAdds :: LockedRepository -> [FilePath] -> IO () +writeAdds r ns = writeBinaryFile (inRepo r addsFile) (show ns) hunk ./camp-repository/Camp/Repository.hs 179 -patchesDir = fromString "_camp/repo/patches" +patchesDir = repoRoot fromString "patches" hunk ./camp-repository/Camp/Repository.hs 182 -patchFile = fromString (toFilePath patchesDir "patchFile") +patchFile = patchesDir fromString "patchFile" hunk ./camp-repository/Camp/Repository.hs 184 -inRepo :: Repository -> InRepoFileName -> FilePath -inRepo (Repository r) fn = r toFilePath fn +inRepo :: Repository r => r -> InRepoFileName -> FilePath +inRepo r fn = repoLocation r FilePath. toFilePath fn hunk ./camp-repository/Camp/Repository.hs 187 -putMegaPatches :: Repository -> ByteString +putMegaPatches :: LockedRepository -> ByteString hunk ./camp-repository/Camp/Repository.hs 197 -getMegaPatches :: GeneralFlags -> Repository -> [InventoryItem] - -> IO ByteString -getMegaPatches gf (Repository r) is = getContents gf (inventoryToContents r is) +getMegaPatches :: Repository r + => GeneralFlags -> r -> [InventoryItem] -> IO ByteString +getMegaPatches gf r is + = getContents gf (inventoryToContents (repoLocation r) is) hunk ./camp-repository/Camp/Repository.hs 220 -readMegaPatches :: GeneralFlags -> Repository -> [InventoryItem] +readMegaPatches :: Repository r => GeneralFlags -> r -> [InventoryItem] hunk ./camp-repository/Camp/Repository.hs 228 -readMegaPatch :: Repository -> InventoryItem -> IO (MegaPatch from to) +readMegaPatch :: Repository r => r -> InventoryItem -> IO (MegaPatch from to) hunk ./camp-repository/Camp/Repository.hs 243 -writeMegaPatches :: Repository -> Seq MegaPatch from to -> IO [InventoryItem] +writeMegaPatches :: LockedRepository -> Seq MegaPatch from to + -> IO [InventoryItem] hunk ./camp-repository/Camp/Repository.hs 262 -writeMegaPatch :: Repository -> MegaPatch from to -> IO InventoryItem +writeMegaPatch :: LockedRepository -> MegaPatch from to -> IO InventoryItem hunk ./camp-repository/Camp/Repository.hs 279 -applyToPristine :: Apply p => Repository -> p from to -> IO () -applyToPristine r ps = inDir (pristineDir r) $ applyFully ps +applyToPristine :: Apply p => LockedRepository -> p from to -> IO () +applyToPristine r ps = inDir (inRepo r pristineDir) $ applyFully ps hunk ./camp-repository/Camp/Repository.hs 283 -applyToWorking :: Apply p => Repository -> p from to -> IO () -applyToWorking r ps = inDir (workingDir r) $ applyFully ps +applyToWorking :: Apply p => LockedRepository -> p from to -> IO () +applyToWorking r ps = inDir (inRepo r workingDir) $ applyFully ps hunk ./camp-repository/Camp/Repository.hs 287 -copyPristineToWorking :: Repository -> IO () -copyPristineToWorking r = copyTreeToDirectory (pristineDir r) (workingDir r) +copyPristineToWorking :: LockedRepository -> IO () +copyPristineToWorking r = copyTreeToDirectory (inRepo r pristineDir) + (inRepo r workingDir) + +-- XXX This should check that it really is a repo +-- XXX This should also lock it +withLockedRepo :: FilePath -> (LockedRepository -> IO a) -> IO a +withLockedRepo fp f = f (LockedRepository fp) + +withLockedRepoCreate :: (LockedRepository -> IO a) -> IO a +withLockedRepoCreate f = do -- XXX catch failure + createDirectory (toFilePath repoBase) + withLockedRepo "." f + +withLockedRepoSearch :: (LockedRepository -> IO a) -> IO a +withLockedRepoSearch f = do fp <- searchForRepo + withLockedRepo fp f hunk ./camp-repository/Camp/Repository.hs 306 -mkRepo :: FilePath -> IO Repository -mkRepo p = return (Repository p) +withUnlockedRepo :: FilePath -> (UnlockedRepository -> IO a) -> IO a +withUnlockedRepo fp f + = do inventoryRef <- newIORef Nothing + patchesRef <- newIORef Nothing + f (UnlockedRepository fp inventoryRef patchesRef) + `finally` do mInventoryFile <- readIORef inventoryRef + case mInventoryFile of + Nothing -> return () + Just file -> removeFile file -- XXX catch error + mPatchesFile <- readIORef patchesRef + case mPatchesFile of + Nothing -> return () + Just file -> removeFile file -- XXX catch error hunk ./camp-repository/Camp/Repository.hs 320 -createRepo :: IO Repository -createRepo = do d <- getCurrentDirectory - let r = Repository d - createDirectory (repoBase r) -- XXX catch failure - return r +withUnlockedRepoSearch :: (UnlockedRepository -> IO a) -> IO a +withUnlockedRepoSearch f = do fp <- searchForRepo + withUnlockedRepo fp f hunk ./camp-repository/Camp/Repository.hs 324 -getRepo :: IO Repository -getRepo = do d <- getCurrentDirectory - f d - where f d = do let r = Repository d - exists <- doesDirectoryExist (repoBase r) +searchForRepo :: IO FilePath +searchForRepo = do d <- getCurrentDirectory + f d + where f d = do let fp = d FilePath. toFilePath repoBase + exists <- doesDirectoryExist fp hunk ./camp-repository/Camp/Repository.hs 330 - then return r - else do let d' = takeDirectory d + then return d + else do let d' = FilePath.takeDirectory d hunk ./camp-repository/Camp/Repository.hs 339 -initialiseRepo :: Repository -> IO () -initialiseRepo r = do createDirectory (logsDir r) - createDirectory (repoRoot r) +initialiseRepo :: LockedRepository -> IO () +initialiseRepo r = do createDirectory (inRepo r logsDir) + createDirectory (inRepo r repoRoot) hunk ./camp-repository/Camp/Repository.hs 343 - createDirectory (pristineDir r) + createDirectory (inRepo r pristineDir) hunk ./camp-repository/Camp/Repository.hs 348 -genName :: Repository -> IO Name +genName :: LockedRepository -> IO Name hunk ./camp-repository/Camp/Repository.hs 369 -getAuthor :: Repository -> IO (Maybe ByteString) -getAuthor r = do m1 <- maybeReadLine $ authorFile r +getAuthor :: LockedRepository -> IO (Maybe ByteString) +getAuthor r = do m1 <- maybeReadLine $ inRepo r authorFile hunk ./camp-repository/Camp/Repository.hs 374 - maybeReadLine (globalCampDir "author") + maybeReadLine (globalCampDir FilePath. "author") hunk ./camp-repository/Camp/Repository.hs 387 -nextLogFile :: Repository -> FilePath -nextLogFile r = repoBase r "nextLog" +nextLogFile :: InRepoFileName +nextLogFile = repoBase fromString "nextLog" hunk ./camp-repository/Camp/Repository.hs 390 -logsDir :: Repository -> FilePath -logsDir r = repoBase r "logs" +logsDir :: InRepoFileName +logsDir = repoBase fromString "logs" hunk ./camp-repository/Camp/Repository.hs 393 -copyRepo :: Repository -> FilePath -> IO () -copyRepo r to = copyTree (repoRoot r) to +copyRepo :: Repository r => r -> FilePath -> IO () +copyRepo r to = copyTree (inRepo r repoRoot) to hunk ./camp-bin/Camp/Curl.hsc 1 - --- We don't know what type CURLcode (for example) is going to be, so we --- don't know whether or not we need Data.Int and Data.Word: -{-# OPTIONS_GHC -fno-warn-unused-imports #-} - -module Camp.Curl ( - withGlobalCurl, withCurl, perform, - setURL, setRange, unsetRange, - setSSHPrivateKey, setSSHPublicKey, setVerbosity, - setWriteFunction, makeWriteFunction, - setDebugFunction, makeDebugFunction - ) where - -#include - -import Camp.Die - -import Control.Exception -import Control.Monad -import Data.Int -import Data.Word -import Foreign -import Foreign.C - -type URL = String - -type CURLcode = #type CURLcode -type CURLoption = #type CURLoption --- type CurlOff = #type curl_off_t -type CurlInfo = #type curl_infotype - -data CurlHandle -newtype Curl = Curl (Ptr CurlHandle) - --- We always use CURL_GLOBAL_ALL rather than trying to predict what --- we'll want to use -withGlobalCurl :: IO a -> IO a -withGlobalCurl = bracket_ initialise cleanup - where initialise = do cc <- curl_global_init initialiseAll - checkForCurlException cc - cleanup = curl_global_cleanup - -initialiseAll :: CLong -initialiseAll = #const CURL_GLOBAL_ALL - -withCurl :: (Curl -> IO a) -> IO a -withCurl = bracket initialise cleanup - where initialise = do c@(Curl p) <- curl_easy_init - when (p == nullPtr) $ - die "Curl init failed" - return c - cleanup = curl_easy_cleanup - -perform :: Curl -> IO () -perform c = do cc <- curl_easy_perform c - checkForCurlException cc - -setVerbosity :: Curl -> Bool -> IO () -setVerbosity c verbose = do cc <- curl_easy_setopt_long c opt v - checkForCurlException cc - where opt = #const CURLOPT_VERBOSE - v = if verbose then 1 else 0 - -setURL :: Curl -> URL -> IO () -setURL c url = do cc <- withCString url $ curl_easy_setopt_ptr c opt - checkForCurlException cc - where opt = #const CURLOPT_URL - -setRange :: Curl -> Integer -> Integer -> IO () -setRange c from to = do cc <- withCString (show from ++ "-" ++ show to) $ - curl_easy_setopt_ptr c opt - checkForCurlException cc - where opt = #const CURLOPT_RANGE - -unsetRange :: Curl -> IO () -unsetRange c = do cc <- curl_easy_setopt_ptr c opt nullPtr - checkForCurlException cc - where opt = #const CURLOPT_RANGE - -setSSHPrivateKey :: Curl -> FilePath -> IO () -setSSHPrivateKey c fp = do cc <- withCString fp $ curl_easy_setopt_ptr c opt - checkForCurlException cc - where opt = #const CURLOPT_SSH_PRIVATE_KEYFILE - -setSSHPublicKey :: Curl -> FilePath -> IO () -setSSHPublicKey c fp = do cc <- withCString fp $ curl_easy_setopt_ptr c opt - checkForCurlException cc - where opt = #const CURLOPT_SSH_PUBLIC_KEYFILE - -setWriteFunction :: Curl -> FunPtr WriteFunction -> IO () -setWriteFunction c wf = - do cc <- curl_easy_setopt_funptr c opt wf - checkForCurlException cc - where opt = #const CURLOPT_WRITEFUNCTION - -setDebugFunction :: Curl -> FunPtr DebugFunction -> IO () -setDebugFunction c df = - do cc <- curl_easy_setopt_funptr c opt df - checkForCurlException cc - where opt = #const CURLOPT_DEBUGFUNCTION - -checkForCurlException :: CURLcode -> IO () -checkForCurlException cc - = when (cc /= 0) $ - do err <- curl_easy_strerror cc - str <- peekCString err - die ("Curl failed with code " ++ show cc ++ ": " ++ str) - --- CURLcode curl_global_init(long flags ); -foreign import ccall unsafe "curl_global_init" - curl_global_init :: CLong -> IO CURLcode - --- void curl_global_cleanup(void); -foreign import ccall unsafe "curl_global_cleanup" - curl_global_cleanup :: IO () - --- CURL *curl_easy_init( ); -foreign import ccall unsafe "curl_easy_init" - curl_easy_init :: IO Curl - --- void curl_easy_cleanup(CURL * handle ); -foreign import ccall unsafe "curl_easy_cleanup" - curl_easy_cleanup :: Curl -> IO () - --- CURLcode curl_easy_perform(CURL * handle ); -foreign import ccall safe "curl_easy_perform" - curl_easy_perform :: Curl -> IO CURLcode - --- CURLcode curl_easy_setopt(CURL *handle, CURLoption option, parameter); --- parameter can be: --- a long --- a function pointer --- an object pointer --- a curl_off_t -foreign import ccall unsafe "curl_easy_setopt_long" - curl_easy_setopt_long :: Curl -> CURLoption -> CLong -> IO CURLcode -foreign import ccall unsafe "curl_easy_setopt_funptr" - curl_easy_setopt_funptr :: Curl -> CURLoption -> FunPtr a -> IO CURLcode -foreign import ccall unsafe "curl_easy_setopt_ptr" - curl_easy_setopt_ptr :: Curl -> CURLoption -> Ptr a -> IO CURLcode -{- -foreign import ccall unsafe "curl_easy_setopt_off" - curl_easy_setopt_off :: Curl -> CURLoption -> CurlOff -> IO CURLcode --} - --- CURLOPT_WRITEFUNCTION --- size_t function( void *ptr, size_t size, size_t nmemb, void *stream) -type WriteFunction = CString -> CSize -> CSize -> Ptr () -> IO CSize - -foreign import ccall "wrapper" - makeWriteFunction :: WriteFunction -> IO (FunPtr WriteFunction) - --- CURLOPT_DEBUGFUNCTION --- int curl_debug_callback (CURL *, curl_infotype, char *, size_t, void *); -type DebugFunction = Curl -> CurlInfo -> CString -> CSize -> Ptr () -> IO CInt - -foreign import ccall "wrapper" - makeDebugFunction :: DebugFunction -> IO (FunPtr DebugFunction) - --- const char *curl_easy_strerror(CURLcode errornum ); -foreign import ccall unsafe "curl_easy_strerror" - curl_easy_strerror :: CURLcode -> IO CString - rmfile ./camp-bin/Camp/Curl.hsc hunk ./camp-bin/Camp/Main.hs 11 -import Camp.Curl hunk ./camp-bin/Camp/Main.hs 12 +import Camp.Network hunk ./camp-bin/camp.cabal 27 - Camp.Curl hunk ./camp-bin/camp.cabal 28 - C-Sources: cbits/curl.c hunk ./camp-bin/camp.cabal 31 - Build-Depends: base, bytestring, camp-core, camp-repository, + Build-Depends: base, bytestring, camp-core, camp-network, camp-repository, hunk ./camp-bin/camp.cabal 34 - Pkgconfig-Depends: libcurl - hunk ./camp-bin/cbits/curl.c 1 - -#include - -CURLcode -curl_easy_setopt_long(CURL *handle, CURLoption option, long parameter) { - curl_easy_setopt(handle, option, parameter); -} - -CURLcode -curl_easy_setopt_ptr(CURL *handle, CURLoption option, void *parameter) { - curl_easy_setopt(handle, option, parameter); -} - -CURLcode -curl_easy_setopt_funptr(CURL *handle, CURLoption option, void *parameter) { - curl_easy_setopt(handle, option, parameter); -} - -CURLcode -curl_easy_setopt_off(CURL *handle, CURLoption option, curl_off_t parameter) { - curl_easy_setopt(handle, option, parameter); -} rmfile ./camp-bin/cbits/curl.c rmdir ./camp-bin/cbits adddir ./camp-network adddir ./camp-network/Camp addfile ./camp-network/Camp/Curl.hsc hunk ./camp-network/Camp/Curl.hsc 1 + +-- We don't know what type CURLcode (for example) is going to be, so we +-- don't know whether or not we need Data.Int and Data.Word: +{-# OPTIONS_GHC -fno-warn-unused-imports #-} + +module Camp.Curl ( + withGlobalCurl, withCurl, perform, + setURL, setRange, unsetRange, + setSSHPrivateKey, setSSHPublicKey, setVerbosity, + setWriteFunction, makeWriteFunction, + setDebugFunction, makeDebugFunction + ) where + +#include + +import Control.Exception +import Control.Monad +import Data.Int +import Data.Word +import Foreign +import Foreign.C + +type URL = String + +type CURLcode = #type CURLcode +type CURLoption = #type CURLoption +-- type CurlOff = #type curl_off_t +type CurlInfo = #type curl_infotype + +data CurlHandle +newtype Curl = Curl (Ptr CurlHandle) + +-- We always use CURL_GLOBAL_ALL rather than trying to predict what +-- we'll want to use +withGlobalCurl :: IO a -> IO a +withGlobalCurl = bracket_ initialise cleanup + where initialise = do cc <- curl_global_init initialiseAll + checkForCurlException cc + cleanup = curl_global_cleanup + +initialiseAll :: CLong +initialiseAll = #const CURL_GLOBAL_ALL + +withCurl :: (Curl -> IO a) -> IO a +withCurl = bracket initialise cleanup + where initialise = do c@(Curl p) <- curl_easy_init + when (p == nullPtr) $ + error "Curl init failed" + return c + cleanup = curl_easy_cleanup + +perform :: Curl -> IO () +perform c = do cc <- curl_easy_perform c + checkForCurlException cc + +setVerbosity :: Curl -> Bool -> IO () +setVerbosity c verbose = do cc <- curl_easy_setopt_long c opt v + checkForCurlException cc + where opt = #const CURLOPT_VERBOSE + v = if verbose then 1 else 0 + +setURL :: Curl -> URL -> IO () +setURL c url = do cc <- withCString url $ curl_easy_setopt_ptr c opt + checkForCurlException cc + where opt = #const CURLOPT_URL + +setRange :: Curl -> Integer -> Integer -> IO () +setRange c from to = do cc <- withCString (show from ++ "-" ++ show to) $ + curl_easy_setopt_ptr c opt + checkForCurlException cc + where opt = #const CURLOPT_RANGE + +unsetRange :: Curl -> IO () +unsetRange c = do cc <- curl_easy_setopt_ptr c opt nullPtr + checkForCurlException cc + where opt = #const CURLOPT_RANGE + +setSSHPrivateKey :: Curl -> FilePath -> IO () +setSSHPrivateKey c fp = do cc <- withCString fp $ curl_easy_setopt_ptr c opt + checkForCurlException cc + where opt = #const CURLOPT_SSH_PRIVATE_KEYFILE + +setSSHPublicKey :: Curl -> FilePath -> IO () +setSSHPublicKey c fp = do cc <- withCString fp $ curl_easy_setopt_ptr c opt + checkForCurlException cc + where opt = #const CURLOPT_SSH_PUBLIC_KEYFILE + +setWriteFunction :: Curl -> FunPtr WriteFunction -> IO () +setWriteFunction c wf = + do cc <- curl_easy_setopt_funptr c opt wf + checkForCurlException cc + where opt = #const CURLOPT_WRITEFUNCTION + +setDebugFunction :: Curl -> FunPtr DebugFunction -> IO () +setDebugFunction c df = + do cc <- curl_easy_setopt_funptr c opt df + checkForCurlException cc + where opt = #const CURLOPT_DEBUGFUNCTION + +checkForCurlException :: CURLcode -> IO () +checkForCurlException cc + = when (cc /= 0) $ + do err <- curl_easy_strerror cc + str <- peekCString err + error ("Curl failed with code " ++ show cc ++ ": " ++ str) + +-- CURLcode curl_global_init(long flags ); +foreign import ccall unsafe "curl_global_init" + curl_global_init :: CLong -> IO CURLcode + +-- void curl_global_cleanup(void); +foreign import ccall unsafe "curl_global_cleanup" + curl_global_cleanup :: IO () + +-- CURL *curl_easy_init( ); +foreign import ccall unsafe "curl_easy_init" + curl_easy_init :: IO Curl + +-- void curl_easy_cleanup(CURL * handle ); +foreign import ccall unsafe "curl_easy_cleanup" + curl_easy_cleanup :: Curl -> IO () + +-- CURLcode curl_easy_perform(CURL * handle ); +foreign import ccall safe "curl_easy_perform" + curl_easy_perform :: Curl -> IO CURLcode + +-- CURLcode curl_easy_setopt(CURL *handle, CURLoption option, parameter); +-- parameter can be: +-- a long +-- a function pointer +-- an object pointer +-- a curl_off_t +foreign import ccall unsafe "curl_easy_setopt_long" + curl_easy_setopt_long :: Curl -> CURLoption -> CLong -> IO CURLcode +foreign import ccall unsafe "curl_easy_setopt_funptr" + curl_easy_setopt_funptr :: Curl -> CURLoption -> FunPtr a -> IO CURLcode +foreign import ccall unsafe "curl_easy_setopt_ptr" + curl_easy_setopt_ptr :: Curl -> CURLoption -> Ptr a -> IO CURLcode +{- +foreign import ccall unsafe "curl_easy_setopt_off" + curl_easy_setopt_off :: Curl -> CURLoption -> CurlOff -> IO CURLcode +-} + +-- CURLOPT_WRITEFUNCTION +-- size_t function( void *ptr, size_t size, size_t nmemb, void *stream) +type WriteFunction = CString -> CSize -> CSize -> Ptr () -> IO CSize + +foreign import ccall "wrapper" + makeWriteFunction :: WriteFunction -> IO (FunPtr WriteFunction) + +-- CURLOPT_DEBUGFUNCTION +-- int curl_debug_callback (CURL *, curl_infotype, char *, size_t, void *); +type DebugFunction = Curl -> CurlInfo -> CString -> CSize -> Ptr () -> IO CInt + +foreign import ccall "wrapper" + makeDebugFunction :: DebugFunction -> IO (FunPtr DebugFunction) + +-- const char *curl_easy_strerror(CURLcode errornum ); +foreign import ccall unsafe "curl_easy_strerror" + curl_easy_strerror :: CURLcode -> IO CString + addfile ./camp-network/Camp/Network.hs hunk ./camp-network/Camp/Network.hs 1 + +module Camp.Network (module Camp.Curl) where + +import Camp.Curl + addfile ./camp-network/LICENSE hunk ./camp-network/LICENSE 1 +Copyright (c) Ian Lynagh, 2008. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. Neither the name of the Authors nor the names of any contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. + addfile ./camp-network/Setup.hs hunk ./camp-network/Setup.hs 1 + +module Main (main) where + +import Distribution.Simple + +main :: IO () +main = defaultMain + addfile ./camp-network/camp-network.cabal hunk ./camp-network/camp-network.cabal 1 +Name: camp-network +Version: 0.1 +License: BSD3 +License-File: LICENSE +Copyright: 2008 Ian Lynagh +Author: Ian Lynagh +Maintainer: Ian Lynagh +Synopsis: Camp +Description: + Camp (Commute And Merge Patches) +Category: Development +Build-Type: Simple +Cabal-Version: >=1.2 + +Library + Exposed-Modules: + Camp.Network + Other-Modules: + Camp.Curl + C-Sources: cbits/curl.c + + Extensions: ForeignFunctionInterface, EmptyDataDecls + Ghc-Options: -Wall -fwarn-tabs -Werror + + Build-Depends: base, bytestring + + Pkgconfig-Depends: libcurl + adddir ./camp-network/cbits addfile ./camp-network/cbits/curl.c hunk ./camp-network/cbits/curl.c 1 + +#include + +CURLcode +curl_easy_setopt_long(CURL *handle, CURLoption option, long parameter) { + curl_easy_setopt(handle, option, parameter); +} + +CURLcode +curl_easy_setopt_ptr(CURL *handle, CURLoption option, void *parameter) { + curl_easy_setopt(handle, option, parameter); +} + +CURLcode +curl_easy_setopt_funptr(CURL *handle, CURLoption option, void *parameter) { + curl_easy_setopt(handle, option, parameter); +} + +CURLcode +curl_easy_setopt_off(CURL *handle, CURLoption option, curl_off_t parameter) { + curl_easy_setopt(handle, option, parameter); +} hunk ./camp-bin/Camp/Command/Get.hs 27 - remoteInventory <- readInventoryLazily remoteRepo - patches <- getMegaPatches gf remoteRepo remoteInventory + patches <- getMegaPatches gf remoteRepo hunk ./camp-bin/Camp/Command/Get.hs 32 - remoteInventory' <- readInventoryLazily remoteRepo + remoteInventory' <- readInventoryLazily gf remoteRepo hunk ./camp-bin/Camp/Command/Get.hs 34 - remoteInventory' + remoteInventory' hunk ./camp-bin/Camp/Command/Get.hs 47 - localInventoryApply <- readInventoryLazily localRepo + localInventoryApply <- readInventoryLazily gf localRepo hunk ./camp-bin/Camp/Command/Inventory.hs 12 -inventory _ [] = withLockedRepoSearch $ \r -> do - i <- readInventory r - let ns = [ n | InventoryItem n _ _ _ <- i ] - mapM_ (putStrLn . pprint) ns -inventory _ [remoteRepoPath] +inventory gf [] = withLockedRepoSearch $ \r -> do + i <- readInventory gf r + let ns = [ n | InventoryItem n _ _ _ <- i ] + mapM_ (putStrLn . pprint) ns +inventory gf [remoteRepoPath] hunk ./camp-bin/Camp/Command/Inventory.hs 19 - localInventory <- readInventory localRepo - remoteInventory <- readInventory remoteRepo + localInventory <- readInventory gf localRepo + remoteInventory <- readInventory gf remoteRepo hunk ./camp-bin/Camp/Command/Pull.hs 32 - localInventory <- readInventory localRepo + localInventory <- readInventory gf localRepo hunk ./camp-bin/Camp/Command/Pull.hs 34 - remoteInventory <- readInventory remoteRepo + remoteInventory <- readInventory gf remoteRepo hunk ./camp-bin/Camp/Command/Record.hs 43 - n <- genName r + n <- genName gf r hunk ./camp-bin/Camp/Command/Show.hs 11 -showC _ [wanted] +showC gf [wanted] hunk ./camp-bin/Camp/Command/Show.hs 13 - inventory <- readInventory r + inventory <- readInventory gf r hunk ./camp-network/Camp/Curl.hsc 7 - withGlobalCurl, withCurl, perform, + withGlobalCurl, + Content(..), + downloadToTemporaryFile, downloadContentsToTemporaryFile, + -- XXX Do we need to export anything below here? + withCurl, + perform, hunk ./camp-network/Camp/Curl.hsc 27 +import System.IO hunk ./camp-network/Camp/Curl.hsc 168 +writeToHandle :: Handle -> CString -> CSize -> CSize -> Ptr () -> IO CSize +writeToHandle h str size memb _ = do let len = size * memb + hPutBuf h str (fromIntegral len) + return len + +downloadToTemporaryFile :: URL -> IO FilePath +downloadToTemporaryFile url = + withCurl $ \curl -> do + bracket (openBinaryTempFile "/tmp"{- XXX -} "camp.tmp") + (\(_, h) -> hClose h) + (\(fp, h) -> + bracket (makeWriteFunction (writeToHandle h)) + (\wfp -> do setWriteFunction curl nullFunPtr + freeHaskellFunPtr wfp) + (\wfp -> do setWriteFunction curl wfp + setURL curl url + perform curl + return fp)) + +downloadContentsToTemporaryFile :: [Content] -> IO FilePath +downloadContentsToTemporaryFile cs = + withCurl $ \curl -> do + bracket (openBinaryTempFile "/tmp"{- XXX -} "camp.tmp") + (\(_, h) -> hClose h) + (\(fp, h) -> + bracket (makeWriteFunction (writeToHandle h)) + (\wfp -> do setWriteFunction curl nullFunPtr + freeHaskellFunPtr wfp) + (\wfp -> do setWriteFunction curl wfp + let f (Content url from len) = do + setURL curl url + let to = from + len - 1 + setRange curl (toInteger from) + (toInteger to) + perform curl + mapM_ f $ simplifyContents cs + return fp)) + +data Content = Content FilePath Int64{- XXX Bytes -}{- from -} Int64{- XXX Bytes -}{- length -} + +-- When we have a list of contents like +-- myfile 100 3 +-- myfile 103 6 +-- we don't want to download bytes 100-102 and 103-108 separately, so +-- this function will merge them into a single content of 100-108. +simplifyContents :: [Content] -> [Content] +simplifyContents [] = [] +simplifyContents (Content fp from len : is) + = let f cur [] = [Content fp from (cur - from)] + f cur is'@(Content fp' from' len' : is'') + | (fp == fp') && (cur == from') = f (cur + len') is'' + | otherwise = Content fp from (cur - from) : simplifyContents is' + in f (from + len) is + hunk ./camp-repository/Camp/Inventory.hs 10 - Content(..), hunk ./camp-repository/Camp/Inventory.hs 15 +import Camp.Network hunk ./camp-repository/Camp/Inventory.hs 67 -data Content = Content FilePath Bytes Bytes - --- When we have an inventory like --- name1 myfile 100 3 --- name2 myfile 103 6 --- we don't want to download bytes 100-102 and 103-108 separately, so --- this function will merge them into a single content of 100-108. --- XXX Should getContents be the one doing the joining? hunk ./camp-repository/Camp/Inventory.hs 68 -inventoryToContents _ [] = [] -inventoryToContents root (InventoryItem _ fn from len : is) - = let f cur [] = [Content (root InRepoFileName.toFilePath fn) from (cur - from)] - f cur is'@(InventoryItem _ fn' from' len' : is'') - | (fn == fn') && (cur == from') = f (cur + len') is'' - | otherwise = Content (root InRepoFileName.toFilePath fn) from (cur - from) - : inventoryToContents root is' - in f (from + len) is +inventoryToContents root is = map (inventoryItemToContent root) is + +inventoryItemToContent :: FilePath -> InventoryItem -> Content +inventoryItemToContent root (InventoryItem _ fn from len) + = Content (root InRepoFileName.toFilePath fn) from len hunk ./camp-repository/Camp/Repository.hs 40 +import Camp.Network hunk ./camp-repository/Camp/Repository.hs 66 +import System.IO.Error hunk ./camp-repository/Camp/Repository.hs 79 + -- XXX Should we also cache the relative inventory? hunk ./camp-repository/Camp/Repository.hs 83 + getInventory :: GeneralFlags -> r -> IO ByteString + getMegaPatches :: GeneralFlags -> r -> IO ByteString hunk ./camp-repository/Camp/Repository.hs 88 + getInventory _ r = BS.readFile (inRepo r inventoryFile) + getMegaPatches gf r = do inv <- readInventory gf r + getMegaPatchesWithInventory gf r inv hunk ./camp-repository/Camp/Repository.hs 94 + getInventory _ r@(UnlockedRepository _ ref _) + = do mfp <- readIORef ref + case mfp of + Just fp -> BS.readFile fp + Nothing -> do + let repoInventory = inRepo r inventoryFile + fp <- bracketOnError (downloadToTemporaryFile repoInventory) + (\fp -> do tryJust (guard . isDoesNotExistError) + (removeFile fp)) + (\fp -> do bs <- BS.readFile fp + case valid bs (undefined :: Stream InventoryItem) of + Left (err, _) -> error ("XXX Bad remote inventory " ++ err) + Right bs' -> + unless (BS.null bs') $ error "XXX too many bytes" + writeIORef ref (Just fp) + return fp) + BS.readFile fp + getMegaPatches gf r@(UnlockedRepository _ _ ref) + = do mfp <- readIORef ref + case mfp of + Just fp -> BS.readFile fp + Nothing -> + do inv1 <- readInventory gf r + let cs = inventoryToContents (repoLocation r) inv1 + fp <- bracketOnError (downloadContentsToTemporaryFile cs) + (\fp -> do tryJust (guard . isDoesNotExistError) + (removeFile fp) + writeIORef ref Nothing) + (\fp -> do inv2 <- readInventory gf r + let relInv2 = inventoryToCompactRelativeInventory inv2 + bs <- BS.readFile fp + checkInventoryMatchesPatches relInv2 bs + writeIORef ref (Just fp) + return fp) + BS.readFile fp + +checkInventoryMatchesPatches :: [CompactRelativeInventoryItem] -> ByteString -> IO () +checkInventoryMatchesPatches [] bs + = unless (BS.null bs) $ error "XXX too many bytes" +checkInventoryMatchesPatches (CompactRelativeInventoryItem _ len : is) bs + -- XXX This should be a splitAt that complains if there aren't + -- enough bytes + = case BS.splitAt len bs of + (this, rest) -> + case valid this (undefined :: MegaPatch from to) of + Left (err, _) -> error ("XXX checkInventoryMatchesPatches " ++ err) + Right bs' -> + do unless (BS.null bs') $ error "XXX too many bytes" + checkInventoryMatchesPatches is rest hunk ./camp-repository/Camp/Repository.hs 176 --- XXX Need a safe variant for remote repos, "camp check", etc --- We don't use readFile because we want to strictly read the inventory. -readInventory :: Repository r => r -> IO [InventoryItem] -readInventory r - = do h <- openBinaryFile (inRepo r inventoryFile) ReadMode - size <- hFileSize h - content <- BS.hGet h (fromIntegral size) - hClose h - -- XXX check snd == ""? - case fst $ input content of - Stream is -> return is +-- XXX Should have a strict variant? +readInventory :: Repository r => GeneralFlags -> r -> IO [InventoryItem] +readInventory = readInventoryLazily hunk ./camp-repository/Camp/Repository.hs 180 --- XXX Need a safe variant for remote repos, "camp check", etc -readInventoryLazily :: Repository r => r -> IO [InventoryItem] -readInventoryLazily r - = do content <- BS.readFile (inRepo r inventoryFile) +readInventoryLazily :: Repository r => GeneralFlags -> r -> IO [InventoryItem] +readInventoryLazily gf r + = do content <- getInventory gf r hunk ./camp-repository/Camp/Repository.hs 199 - i1 <- readInventoryLazily r - i2 <- readInventoryLazily r + i1 <- readInventoryLazily gf r + i2 <- readInventoryLazily gf r hunk ./camp-repository/Camp/Repository.hs 202 - bs <- getMegaPatches gf r i2 - check ri bs - where check [] bs = unless (BS.null bs) $ error "XXX too many bytes" - check (CompactRelativeInventoryItem _ len : is) bs - -- XXX This should be a splitAt that complains if there aren't - -- enough bytes - = case BS.splitAt len bs of - (this, rest) -> - case valid this (undefined :: MegaPatch from to) of - Left (err, _) -> error ("XXX checkInventory " ++ err) - Right bs' -> - do unless (BS.null bs') $ error "XXX too many bytes" - check is rest + bs <- getMegaPatchesWithInventory gf r i2 + checkInventoryMatchesPatches ri bs hunk ./camp-repository/Camp/Repository.hs 234 -getMegaPatches :: Repository r - => GeneralFlags -> r -> [InventoryItem] -> IO ByteString -getMegaPatches gf r is +getMegaPatchesWithInventory :: Repository r + => GeneralFlags -> r -> [InventoryItem] + -> IO ByteString +getMegaPatchesWithInventory gf r is hunk ./camp-repository/Camp/Repository.hs 260 -readMegaPatches gf r ns = do content <- getMegaPatches gf r ns +readMegaPatches gf r ns = do content <- getMegaPatchesWithInventory gf r ns hunk ./camp-repository/Camp/Repository.hs 386 -genName :: LockedRepository -> IO Name -genName r +genName :: GeneralFlags -> LockedRepository -> IO Name +genName gf r hunk ./camp-repository/Camp/Repository.hs 390 - inv <- readInventory r + inv <- readInventory gf r hunk ./camp-repository/camp-repository.cabal 31 - Build-Depends: base, bytestring, camp-core, directory, filepath, old-time + Build-Depends: base, bytestring, camp-core, camp-network, + directory, filepath, old-time hunk ./camp-bin/Camp/Command/Get.hs 25 - withLog gf localRepo $ \l -> do - logRepo l "remote" remoteRepo + withLog gf localRepo $ \_l -> do + -- XXX can't log unlocked repos ATM: + -- logRepo l "remote" remoteRepo hunk ./camp-bin/Camp/Command/Get.hs 31 - -- XXX We need to make a local copy rather than reading from - -- the remote twice - remoteInventory' <- readInventoryLazily gf remoteRepo - let relativeInventory = inventoryToCompactRelativeInventory - remoteInventory' - localInventory = compactRelativeInventoryToInventory - filename offset relativeInventory + remoteInventory <- readInventoryLazily gf remoteRepo + let localInventory = compactInventoryWithOffset + filename + offset + remoteInventory hunk ./camp-bin/Camp/Command/Get.hs 38 - -- The remote repo may not be correctly formed, so we need - -- to check that each element of the inventory actually - -- points to a syntactically valid patch. Just reading the - -- patches when we apply them won't do it, as we read the - -- whole contents and then read it as a list of patches, so - -- the boundary between patches may be in the wrong place. - checkInventory gf localRepo - - localInventoryApply <- readInventoryLazily gf localRepo - patchesApply <- readMegaPatches gf localRepo localInventoryApply + patchesApply <- readMegaPatches gf localRepo hunk ./camp-bin/Camp/Command/Pull.hs 28 - withLog gf localRepo $ \l -> do + withLog gf localRepo $ \_l -> do hunk ./camp-bin/Camp/Command/Pull.hs 30 - logRepo l "remote" remoteRepo + -- XXX can't log unlocked repos ATM: + -- logRepo l "remote" remoteRepo hunk ./camp-bin/Camp/Command/Pull.hs 33 - localInventory <- readInventory gf localRepo + localInventory <- readLocalInventory gf localRepo hunk ./camp-bin/Camp/Command/Pull.hs 35 - remoteInventory <- readInventory gf remoteRepo + remoteInventory <- readLocalInventory gf remoteRepo + -- XXX We've actually just downloaded the entire patchfile + -- in order to do readLocalInventory, even though we're + -- about to ignore (hopefully) most of it. Need to + -- restructure to avoid that. hunk ./camp-bin/Camp/Command/Pull.hs 55 - localReadPatches <- readMegaPatches gf localRepo localReadInventory - remoteReadPatches <- readMegaPatches gf remoteRepo remoteReadInventory + localReadPatches <- readMegaPatchesWithAbsoluteInventory gf localReadInventory + remoteReadPatches <- readMegaPatchesWithAbsoluteInventory gf remoteReadInventory hunk ./camp-bin/Camp/Command/Pull.hs 67 - doPull gf localRepo localInventory localPatches remotePatches' + doPull gf localRepo localPatches remotePatches' hunk ./camp-bin/Camp/Command/Pull.hs 69 - do doPull gf localRepo localInventory localPatches remotePatches + do doPull gf localRepo localPatches remotePatches hunk ./camp-bin/Camp/Command/Pull.hs 75 - doPull gf localRepo localInventory localPatches remotePatches' + doPull gf localRepo localPatches remotePatches' hunk ./camp-bin/Camp/Command/Pull.hs 85 -doPull :: GeneralFlags -> LockedRepository -> [InventoryItem] +doPull :: GeneralFlags -> LockedRepository hunk ./camp-bin/Camp/Command/Pull.hs 88 -doPull gf localRepo localInventory localPatches remotePatches = +doPull gf localRepo localPatches remotePatches = hunk ./camp-bin/Camp/Command/Pull.hs 96 + localInventory <- readInventory gf localRepo hunk ./camp-bin/Camp/Command/Record.hs 44 - directory <- recordDirectory (inRepo r pristineDir) "." + directory <- recordDirectory (inRepoPath r pristineDir) "." hunk ./camp-bin/Camp/Command/Show.hs 7 -import Camp.Patch.Pretty +-- XXX import Camp.Patch.Pretty hunk ./camp-bin/Camp/Command/Show.hs 24 - inventoryLine = case filter isWanted inventory of + _inventoryLine = case filter isWanted inventory of hunk ./camp-bin/Camp/Command/Show.hs 27 - patch <- readMegaPatch r inventoryLine - putStrLn $ pprint patch + _patch <- error "XXX" -- readMegaPatch r inventoryLine + error "XXX" -- putStrLn $ pprint patch hunk ./camp-repository/Camp/Inventory.hs 6 + Inventory, + InRepoInventory, + AbsoluteInventory, + InRepoInventoryItem, + AbsoluteInventoryItem, hunk ./camp-repository/Camp/Inventory.hs 12 - CompactRelativeInventoryItem(..), - inventoryToCompactRelativeInventory, - compactRelativeInventoryToInventory, + toAbsoluteInventory, + compactInventory, + compactInventoryWithOffset, + absoluteInventoryToContents, hunk ./camp-repository/Camp/Inventory.hs 32 +type Inventory a = [InventoryItem a] + hunk ./camp-repository/Camp/Inventory.hs 35 -data InventoryItem = InventoryItem Name -- MegaPatch name - InRepoFileName -- Filename (relative to - -- working directory) - Bytes -- Offset - Bytes -- Length +data InventoryItem a = InventoryItem Name -- MegaPatch name + a -- Filename + Bytes -- Offset + Bytes -- Length hunk ./camp-repository/Camp/Inventory.hs 40 -data CompactRelativeInventoryItem - = CompactRelativeInventoryItem Name -- MegaPatch name - Bytes -- Length +-- In an InRepoInventory, filenames are relative to the working directory +type InRepoInventory = [InRepoInventoryItem] +type InRepoInventoryItem = InventoryItem InRepoFileName +-- In an AbsoluteInventory, filenames are absolute FilePath's +type AbsoluteInventory = [AbsoluteInventoryItem] +type AbsoluteInventoryItem = InventoryItem FilePath hunk ./camp-repository/Camp/Inventory.hs 47 -instance InputOutput InventoryItem where +instance InputOutput a => InputOutput (InventoryItem a) where hunk ./camp-repository/Camp/Inventory.hs 64 - `thenValid` (undefined :: InRepoFileName) + `thenValid` (undefined :: a) hunk ./camp-repository/Camp/Inventory.hs 77 -inventoryToContents :: FilePath -> [InventoryItem] -> [Content] +-- XXX Remove or rename me? +inventoryToContents :: FilePath -> InRepoInventory -> [Content] hunk ./camp-repository/Camp/Inventory.hs 81 -inventoryItemToContent :: FilePath -> InventoryItem -> Content +-- XXX Remove or rename me? +inventoryItemToContent :: FilePath -> InRepoInventoryItem -> Content hunk ./camp-repository/Camp/Inventory.hs 86 -inventoryToCompactRelativeInventory :: [InventoryItem] - -> [CompactRelativeInventoryItem] -inventoryToCompactRelativeInventory [] = [] -inventoryToCompactRelativeInventory (InventoryItem n _ _ len : is) - = CompactRelativeInventoryItem n len : - inventoryToCompactRelativeInventory is +absoluteInventoryToContents :: AbsoluteInventory -> [Content] +absoluteInventoryToContents is = map absoluteInventoryItemToContent is + +absoluteInventoryItemToContent :: AbsoluteInventoryItem -> Content +absoluteInventoryItemToContent (InventoryItem _ fp from len) + = Content fp from len + +toAbsoluteInventory :: FilePath -> InRepoInventory -> AbsoluteInventory +toAbsoluteInventory fp = map f + where f (InventoryItem n fn from len) + = InventoryItem n (fp InRepoFileName.toFilePath fn) from len + +compactInventory :: file -> Inventory a -> Inventory file +compactInventory f = compactInventoryWithOffset f 0 hunk ./camp-repository/Camp/Inventory.hs 101 -compactRelativeInventoryToInventory :: InRepoFileName -> Bytes - -> [CompactRelativeInventoryItem] - -> [InventoryItem] -compactRelativeInventoryToInventory _ _ [] = [] -compactRelativeInventoryToInventory fp offset - (CompactRelativeInventoryItem n len : is) - = InventoryItem n fp offset len - : compactRelativeInventoryToInventory fp (offset + len) is +compactInventoryWithOffset :: file -> Bytes -> Inventory a -> Inventory file +compactInventoryWithOffset f = compact + where compact _ [] = [] + compact offset (InventoryItem n _ _ len : is) + = InventoryItem n f offset len : compact (offset + len) is hunk ./camp-repository/Camp/Logging.hs 23 -mkLog r i = Log (inRepo r logsDir show i) +mkLog r i = Log (inRepoPath r logsDir show i) hunk ./camp-repository/Camp/Logging.hs 48 - let nlf = inRepo r nextLogFile + let nlf = inRepoPath r nextLogFile hunk ./camp-repository/Camp/Logging.hs 66 -logRepo :: Repository r => Log -> FilePath -> r -> IO () +logRepo :: Log -> FilePath -> LockedRepository -> IO () hunk ./camp-repository/Camp/Repository.hs 13 - readInventory, - readInventoryLazily, + readInventory, -- XXX Shouldn't be exported? + readInventoryLazily, -- XXX Shouldn't be exported? + readLocalInventory, hunk ./camp-repository/Camp/Repository.hs 17 - checkInventory, hunk ./camp-repository/Camp/Repository.hs 21 - readMegaPatch, + -- XXX readMegaPatch, hunk ./camp-repository/Camp/Repository.hs 23 + readMegaPatchesWithAbsoluteInventory, hunk ./camp-repository/Camp/Repository.hs 35 - inRepo, + inRepoURL, + inRepoPath, hunk ./camp-repository/Camp/Repository.hs 84 - repoLocation :: r -> String + repoURL :: r -> String hunk ./camp-repository/Camp/Repository.hs 86 + readLocalInventory :: GeneralFlags -> r -> IO AbsoluteInventory hunk ./camp-repository/Camp/Repository.hs 90 - repoLocation (LockedRepository loc) = loc - getInventory _ r = BS.readFile (inRepo r inventoryFile) + repoURL (LockedRepository fp) = "file://" ++ fp + getInventory _ r = BS.readFile (inRepoPath r inventoryFile) + readLocalInventory gf r@(LockedRepository fp) + = do inv <- readInventory gf r + return $ toAbsoluteInventory fp inv hunk ./camp-repository/Camp/Repository.hs 99 - repoLocation (UnlockedRepository loc _ _) = loc - getInventory _ r@(UnlockedRepository _ ref _) - = do mfp <- readIORef ref - case mfp of - Just fp -> BS.readFile fp - Nothing -> do - let repoInventory = inRepo r inventoryFile - fp <- bracketOnError (downloadToTemporaryFile repoInventory) - (\fp -> do tryJust (guard . isDoesNotExistError) - (removeFile fp)) - (\fp -> do bs <- BS.readFile fp - case valid bs (undefined :: Stream InventoryItem) of - Left (err, _) -> error ("XXX Bad remote inventory " ++ err) - Right bs' -> - unless (BS.null bs') $ error "XXX too many bytes" - writeIORef ref (Just fp) - return fp) - BS.readFile fp - getMegaPatches gf r@(UnlockedRepository _ _ ref) - = do mfp <- readIORef ref - case mfp of - Just fp -> BS.readFile fp - Nothing -> - do inv1 <- readInventory gf r - let cs = inventoryToContents (repoLocation r) inv1 - fp <- bracketOnError (downloadContentsToTemporaryFile cs) - (\fp -> do tryJust (guard . isDoesNotExistError) - (removeFile fp) - writeIORef ref Nothing) - (\fp -> do inv2 <- readInventory gf r - let relInv2 = inventoryToCompactRelativeInventory inv2 - bs <- BS.readFile fp - checkInventoryMatchesPatches relInv2 bs - writeIORef ref (Just fp) - return fp) - BS.readFile fp + repoURL (UnlockedRepository loc _ _) = loc + getInventory _ r = do fp <- getUnlockedRepositoryInventoryLocalFilePath r + BS.readFile fp + readLocalInventory gf r + = do bs <- getInventory gf r + fp <- getUnlockedRepositoryPatchesLocalFilePath gf r + case (fst $ input bs) :: Stream InRepoInventoryItem of + Stream is -> return $ compactInventory fp is + getMegaPatches gf r + = do fp <- getUnlockedRepositoryPatchesLocalFilePath gf r + BS.readFile fp hunk ./camp-repository/Camp/Repository.hs 111 -checkInventoryMatchesPatches :: [CompactRelativeInventoryItem] -> ByteString -> IO () -checkInventoryMatchesPatches [] bs - = unless (BS.null bs) $ error "XXX too many bytes" -checkInventoryMatchesPatches (CompactRelativeInventoryItem _ len : is) bs - -- XXX This should be a splitAt that complains if there aren't - -- enough bytes - = case BS.splitAt len bs of - (this, rest) -> - case valid this (undefined :: MegaPatch from to) of - Left (err, _) -> error ("XXX checkInventoryMatchesPatches " ++ err) - Right bs' -> - do unless (BS.null bs') $ error "XXX too many bytes" - checkInventoryMatchesPatches is rest +getUnlockedRepositoryInventoryLocalFilePath :: UnlockedRepository + -> IO FilePath +getUnlockedRepositoryInventoryLocalFilePath r@(UnlockedRepository _ ref _) + = do mfp <- readIORef ref + case mfp of + Just fp -> return fp + Nothing -> do + let repoInventory = inRepoURL r inventoryFile + bracketOnError (downloadToTemporaryFile repoInventory) + (\fp -> do tryJust (guard . isDoesNotExistError) + (removeFile fp)) + $ \fp -> + do bs <- BS.readFile fp + case valid bs (undefined :: Stream InRepoInventoryItem) of + Left (err, _) -> + error ("XXX Bad remote inventory " ++ err) + Right bs' -> + unless (BS.null bs') $ error "XXX too many bytes" + writeIORef ref (Just fp) + return fp + +getUnlockedRepositoryPatchesLocalFilePath :: GeneralFlags -> UnlockedRepository + -> IO FilePath +getUnlockedRepositoryPatchesLocalFilePath gf r@(UnlockedRepository _ _ ref) + = do mfp <- readIORef ref + case mfp of + Just fp -> return fp + Nothing -> + do inv1 <- readInventory gf r + let cs = inventoryToContents (repoURL r) inv1 + bracketOnError (downloadContentsToTemporaryFile cs) + (\fp -> do tryJust (guard . isDoesNotExistError) + (removeFile fp) + writeIORef ref Nothing) + $ \fp -> + do -- The remote repo may not be correctly formed, + -- so we need to check that each element of the + -- inventory actually points to a syntactically + -- valid patch. Just reading the patches when we + -- apply them won't do it, as we read the whole + -- contents and then read it as a list of + -- patches, so the boundary between patches may + -- be in the wrong place. + inv2 <- readInventory gf r + bs <- BS.readFile fp + checkInventoryMatchesPatches inv2 bs + writeIORef ref (Just fp) + return fp + +checkInventoryMatchesPatches :: InRepoInventory -> ByteString -> IO () +checkInventoryMatchesPatches inv = f (compactInventory noFile inv) + where noFile = error "checkInventoryMatchesPatches: Can't happen" + f [] bs = unless (BS.null bs) $ error "XXX too many bytes" + f (InventoryItem _ _ _ len : is) bs + -- XXX This should be a splitAt that complains if there aren't + -- enough bytes + = case BS.splitAt len bs of + (this, rest) -> + case valid this (undefined :: MegaPatch from to) of + Left (err, _) -> + error ("XXX checkInventoryMatchesPatches " ++ err) + Right bs' -> + do unless (BS.null bs') $ error "XXX too many bytes" + f is rest hunk ./camp-repository/Camp/Repository.hs 202 -appendInventoryItem :: LockedRepository -> InventoryItem -> IO () +appendInventoryItem :: LockedRepository -> InRepoInventoryItem -> IO () hunk ./camp-repository/Camp/Repository.hs 204 - = do h <- openBinaryFile (inRepo r inventoryFile) AppendMode + = do h <- openBinaryFile (inRepoPath r inventoryFile) AppendMode hunk ./camp-repository/Camp/Repository.hs 208 --- XXX Should have a strict variant? -readInventory :: Repository r => GeneralFlags -> r -> IO [InventoryItem] -readInventory = readInventoryLazily +-- XXX write this differently? +readInventory :: Repository r => GeneralFlags -> r -> IO InRepoInventory +readInventory gf r = do inv <- readInventoryLazily gf r + evaluate $ length inv + return inv hunk ./camp-repository/Camp/Repository.hs 214 -readInventoryLazily :: Repository r => GeneralFlags -> r -> IO [InventoryItem] +readInventoryLazily :: Repository r => GeneralFlags -> r -> IO InRepoInventory hunk ./camp-repository/Camp/Repository.hs 222 -writeInventory :: LockedRepository -> [InventoryItem] -> IO () -writeInventory r ns = BS.writeFile (inRepo r inventoryFile) +writeInventory :: LockedRepository -> InRepoInventory -> IO () +writeInventory r ns = BS.writeFile (inRepoPath r inventoryFile) hunk ./camp-repository/Camp/Repository.hs 226 --- Check that each element of the inventory actually --- points to a syntactically valid patch. Just reading the --- patches and validating them won't do it, as it doesn't check --- the boundary between patches are in the right place. -checkInventory :: Repository r => GeneralFlags -> r -> IO () -checkInventory gf r - = do -- Read inventory twice to avoid a spaceleak - i1 <- readInventoryLazily gf r - i2 <- readInventoryLazily gf r - let ri = inventoryToCompactRelativeInventory i1 - bs <- getMegaPatchesWithInventory gf r i2 - checkInventoryMatchesPatches ri bs - -readAdds :: Repository r => r -> IO [FilePath] -readAdds r = do content <- readBinaryFile (inRepo r addsFile) +readAdds :: LockedRepository -> IO [FilePath] +readAdds r = do content <- readBinaryFile (inRepoPath r addsFile) hunk ./camp-repository/Camp/Repository.hs 233 -writeAdds r ns = writeBinaryFile (inRepo r addsFile) (show ns) +writeAdds r ns = writeBinaryFile (inRepoPath r addsFile) (show ns) hunk ./camp-repository/Camp/Repository.hs 242 -inRepo :: Repository r => r -> InRepoFileName -> FilePath -inRepo r fn = repoLocation r FilePath. toFilePath fn +inRepoPath :: LockedRepository -> InRepoFileName -> FilePath +inRepoPath (LockedRepository fp) fn = fp FilePath. toFilePath fn + +inRepoURL :: Repository r => r -> InRepoFileName -> FilePath +inRepoURL r fn = repoURL r FilePath. toFilePath fn hunk ./camp-repository/Camp/Repository.hs 251 - = do let fp = inRepo r patchFile + = do let fp = inRepoPath r patchFile hunk ./camp-repository/Camp/Repository.hs 259 - => GeneralFlags -> r -> [InventoryItem] + => GeneralFlags -> r -> InRepoInventory hunk ./camp-repository/Camp/Repository.hs 262 - = getContents gf (inventoryToContents (repoLocation r) is) + = getContents gf (inventoryToContents (repoURL r) is) + +getMegaPatchesWithAbsoluteInventory :: GeneralFlags -> AbsoluteInventory + -> IO ByteString +getMegaPatchesWithAbsoluteInventory gf inv + = getContents gf (absoluteInventoryToContents inv) + +readMegaPatchesWithAbsoluteInventory :: GeneralFlags -> AbsoluteInventory + -> IO (Seq MegaPatch from to) +readMegaPatchesWithAbsoluteInventory gf inv + = do bs <- getMegaPatchesWithAbsoluteInventory gf inv + case input bs of + (Stream2 ps, _{- "" -}) -> return ps hunk ./camp-repository/Camp/Repository.hs 294 -readMegaPatches :: Repository r => GeneralFlags -> r -> [InventoryItem] +readMegaPatches :: Repository r => GeneralFlags -> r hunk ./camp-repository/Camp/Repository.hs 296 -readMegaPatches gf r ns = do content <- getMegaPatchesWithInventory gf r ns - case input content of - (Stream2 s, _{- "" -}) -> return s +readMegaPatches gf r = do content <- getMegaPatches gf r + case input content of + (Stream2 s, _{- "" -}) -> return s hunk ./camp-repository/Camp/Repository.hs 300 --- XXX Is this still used? +{- hunk ./camp-repository/Camp/Repository.hs 310 - = do h <- openBinaryFile (inRepo r fn) ReadMode + = do h <- openBinaryFile (inRepoPath r fn) ReadMode hunk ./camp-repository/Camp/Repository.hs 316 +-} hunk ./camp-repository/Camp/Repository.hs 319 - -> IO [InventoryItem] + -> IO [InRepoInventoryItem] hunk ./camp-repository/Camp/Repository.hs 322 - = do let fp = inRepo r patchFile + = do let fp = inRepoPath r patchFile hunk ./camp-repository/Camp/Repository.hs 330 - -> IO [InventoryItem] + -> IO [InRepoInventoryItem] hunk ./camp-repository/Camp/Repository.hs 337 -writeMegaPatch :: LockedRepository -> MegaPatch from to -> IO InventoryItem +writeMegaPatch :: LockedRepository -> MegaPatch from to + -> IO InRepoInventoryItem hunk ./camp-repository/Camp/Repository.hs 340 - = do let fp = inRepo r patchFile + = do let fp = inRepoPath r patchFile hunk ./camp-repository/Camp/Repository.hs 348 - -> IO (Bytes, InventoryItem) + -> IO (Bytes, InRepoInventoryItem) hunk ./camp-repository/Camp/Repository.hs 356 -applyToPristine r ps = inDir (inRepo r pristineDir) $ applyFully ps +applyToPristine r ps = inDir (inRepoPath r pristineDir) $ applyFully ps hunk ./camp-repository/Camp/Repository.hs 360 -applyToWorking r ps = inDir (inRepo r workingDir) $ applyFully ps +applyToWorking r ps = inDir (inRepoPath r workingDir) $ applyFully ps hunk ./camp-repository/Camp/Repository.hs 364 -copyPristineToWorking r = copyTreeToDirectory (inRepo r pristineDir) - (inRepo r workingDir) +copyPristineToWorking r = copyTreeToDirectory (inRepoPath r pristineDir) + (inRepoPath r workingDir) + +mkRepoPath :: String -> IO String +mkRepoPath xs | looksLikeURL xs = return xs + | otherwise = do fp <- canonicalizePath xs + return ("file://" ++ fp) + +looksLikeURL :: String -> Bool +looksLikeURL xs = case break (\c -> c == '/' || c == ':') xs of + ([_], ':':_) -> False -- c:/path + (_, ':':_) -> True -- host:/path or proto://path + _ -> False hunk ./camp-repository/Camp/Repository.hs 381 -withLockedRepo fp f = f (LockedRepository fp) +withLockedRepo fp f = do fp' <- canonicalizePath fp + f (LockedRepository fp') hunk ./camp-repository/Camp/Repository.hs 398 - f (UnlockedRepository fp inventoryRef patchesRef) + fp' <- mkRepoPath fp + f (UnlockedRepository fp' inventoryRef patchesRef) hunk ./camp-repository/Camp/Repository.hs 429 -initialiseRepo r = do createDirectory (inRepo r logsDir) - createDirectory (inRepo r repoRoot) - createDirectory (inRepo r patchesDir) - createDirectory (inRepo r pristineDir) +initialiseRepo r = do createDirectory (inRepoPath r logsDir) + createDirectory (inRepoPath r repoRoot) + createDirectory (inRepoPath r patchesDir) + createDirectory (inRepoPath r pristineDir) hunk ./camp-repository/Camp/Repository.hs 459 -getAuthor r = do m1 <- maybeReadLine $ inRepo r authorFile +getAuthor r = do m1 <- maybeReadLine $ inRepoPath r authorFile hunk ./camp-repository/Camp/Repository.hs 482 -copyRepo :: Repository r => r -> FilePath -> IO () -copyRepo r to = copyTree (inRepo r repoRoot) to +copyRepo :: LockedRepository -> FilePath -> IO () +copyRepo r to = copyTree (inRepoPath r repoRoot) to hunk ./camp-bin/camp.cabal 35 - EmptyDataDecls, Rank2Types, FlexibleContexts + EmptyDataDecls, Rank2Types, FlexibleContexts, GADTs hunk ./camp-repository/camp-repository.cabal 33 - Extensions: CPP, ScopedTypeVariables - -- Build-Depends: base, bytestring, camp-core, directory, filepath, old-time - -- Extensions: GADTs, ScopedTypeVariables, - -- MultiParamTypeClasses, FunctionalDependencies, - -- FlexibleContexts, FlexibleInstances, - -- OverlappingInstances, UndecidableInstances, - -- IncoherentInstances, - -- EmptyDataDecls + Extensions: CPP, ScopedTypeVariables, GADTs hunk ./camp-bin/Camp/Command/Get.hs 16 -get gf [remoteRepoPath] +get gf (remoteRepoPath : args) hunk ./camp-bin/Camp/Command/Get.hs 18 - -- XXX By the time we are creating a local repo we should be - -- reasonably sure that there really is a remote repo - d <- newDirectory $ takeFileName - $ dropTrailingPathSeparator remoteRepoPath - msg gf Normal ("Creating repository in " ++ show d) - setCurrentDirectory d + localDirectory <- + case args of + [localDirectory] -> do -- XXX Catch exists exception and complain + createDirectory localDirectory + return localDirectory + [] -> newDirectory $ takeFileName + $ dropTrailingPathSeparator remoteRepoPath + _ -> error "XXX Bad arguments to get" + msg gf Normal ("Creating repository in " ++ show localDirectory) + setCurrentDirectory localDirectory hunk ./camp-repository/Camp/Repository.hs 95 - getMegaPatches gf r = do inv <- readInventory gf r + getMegaPatches gf r = do msg gf Verbose "Getting patches from locked repo" + inv <- readInventory gf r hunk ./camp-repository/Camp/Repository.hs 109 - = do fp <- getUnlockedRepositoryPatchesLocalFilePath gf r + = do msg gf Verbose "Getting patches from unlocked repo" + fp <- getUnlockedRepositoryPatchesLocalFilePath gf r hunk ./camp-network/Camp/Curl.hsc 21 +import Camp.Types + hunk ./camp-network/Camp/Curl.hsc 208 -data Content = Content FilePath Int64{- XXX Bytes -}{- from -} Int64{- XXX Bytes -}{- length -} +data Content = Content FilePath Bytes{- from -} Bytes{- length -} hunk ./camp-network/camp-network.cabal 25 - Build-Depends: base, bytestring + Build-Depends: base, bytestring, camp-core adddir ./tests/get addfile ./tests/get/run_test.sh hunk ./tests/get/run_test.sh 1 +#!/bin/sh + +# This test tests that the "get" command works + +set -e + +HERE=`pwd` +CAMP="${CAMP:-$HERE/../../camp-bin/dist/build/camp/camp}" + +cleanup() { + cd "$HERE" + rm -rf a + rm -rf b +} + +cleanup + +if [ "$1" = "clean-only" ] +then + exit 0 +fi + +if [ "$1" != "no-clean-after" ] +then + trap cleanup EXIT +fi + +# Set up the base repo in "a" +mkdir a +cd a +"$CAMP" init +printf 'This is the file\n' > f +"$CAMP" add f +"$CAMP" record -a -m base +printf 'Change the file\n' > f +"$CAMP" record -a -m changed +cd .. + +"$CAMP" -v0 get a b + +# Now check that the file content is what we expect +diff -u a/f b/f + addfile ./camp-core/Camp/Content.hs hunk ./camp-core/Camp/Content.hs 1 + +module Camp.Content (Content(..), simplifyContents) where + +import Camp.Types + +data Content loc = Content loc Bytes{- from -} Bytes{- length -} + deriving Show + +-- When we have a list of contents like +-- myfile 100 3 +-- myfile 103 6 +-- we don't want to download bytes 100-102 and 103-108 separately, so +-- this function will merge them into a single content of 100-108. +simplifyContents :: Eq loc => [Content loc] -> [Content loc] +simplifyContents [] = [] +simplifyContents (Content loc from len : is) + = let f cur [] = [Content loc from (cur - from)] + f cur is'@(Content loc' from' len' : is'') + | (loc == loc') && (cur == from') = f (cur + len') is'' + | otherwise = Content loc from (cur - from) : simplifyContents is' + in f (from + len) is + hunk ./camp-core/camp-core.cabal 17 + Camp.Content hunk ./camp-network/Camp/Curl.hsc 7 + URL, mkURL, (), hunk ./camp-network/Camp/Curl.hsc 22 +import Camp.Content +import Camp.InRepoFileName (InRepoFileName) +import qualified Camp.InRepoFileName as FN hunk ./camp-network/Camp/Curl.hsc 33 +import qualified System.FilePath as FP hunk ./camp-network/Camp/Curl.hsc 36 -type URL = String +newtype URL = URL String + deriving (Eq, Ord, Show) + +mkURL :: String -> URL +mkURL = URL + +() :: URL -> InRepoFileName -> URL +URL x y = URL (x FP. FN.toFilePath y) hunk ./camp-network/Camp/Curl.hsc 83 -setURL c url = do cc <- withCString url $ curl_easy_setopt_ptr c opt - checkForCurlException cc +setURL c (URL url) = do cc <- withCString url $ curl_easy_setopt_ptr c opt + checkForCurlException cc hunk ./camp-network/Camp/Curl.hsc 201 -downloadContentsToTemporaryFile :: [Content] -> IO FilePath +downloadContentsToTemporaryFile :: [Content URL] -> IO FilePath hunk ./camp-network/Camp/Curl.hsc 220 -data Content = Content FilePath Bytes{- from -} Bytes{- length -} - --- When we have a list of contents like --- myfile 100 3 --- myfile 103 6 --- we don't want to download bytes 100-102 and 103-108 separately, so --- this function will merge them into a single content of 100-108. -simplifyContents :: [Content] -> [Content] -simplifyContents [] = [] -simplifyContents (Content fp from len : is) - = let f cur [] = [Content fp from (cur - from)] - f cur is'@(Content fp' from' len' : is'') - | (fp == fp') && (cur == from') = f (cur + len') is'' - | otherwise = Content fp from (cur - from) : simplifyContents is' - in f (from + len) is - hunk ./camp-network/camp-network.cabal 25 - Build-Depends: base, bytestring, camp-core + Build-Depends: base, bytestring, camp-core, filepath hunk ./camp-repository/Camp/Inventory.hs 16 + inventoryToContentURLs, hunk ./camp-repository/Camp/Inventory.hs 22 -import Camp.Network +import Camp.Network as Network hunk ./camp-repository/Camp/Inventory.hs 31 -import System.FilePath +import qualified System.FilePath as FP hunk ./camp-repository/Camp/Inventory.hs 78 +inventoryToContentURLs :: URL -> InRepoInventory -> [Content URL] +inventoryToContentURLs root is = map (inventoryItemToContentURL root) is + +inventoryItemToContentURL :: URL -> InRepoInventoryItem -> Content URL +inventoryItemToContentURL root (InventoryItem _ fn from len) + = Content (root Network. fn) from len + hunk ./camp-repository/Camp/Inventory.hs 86 -inventoryToContents :: FilePath -> InRepoInventory -> [Content] +inventoryToContents :: FilePath -> InRepoInventory -> [Content FilePath] hunk ./camp-repository/Camp/Inventory.hs 90 -inventoryItemToContent :: FilePath -> InRepoInventoryItem -> Content +inventoryItemToContent :: FilePath -> InRepoInventoryItem -> Content FilePath hunk ./camp-repository/Camp/Inventory.hs 92 - = Content (root InRepoFileName.toFilePath fn) from len + = Content (root FP. InRepoFileName.toFilePath fn) from len hunk ./camp-repository/Camp/Inventory.hs 94 -absoluteInventoryToContents :: AbsoluteInventory -> [Content] +absoluteInventoryToContents :: AbsoluteInventory -> [Content FilePath] hunk ./camp-repository/Camp/Inventory.hs 97 -absoluteInventoryItemToContent :: AbsoluteInventoryItem -> Content +absoluteInventoryItemToContent :: AbsoluteInventoryItem -> Content FilePath hunk ./camp-repository/Camp/Inventory.hs 104 - = InventoryItem n (fp InRepoFileName.toFilePath fn) from len + = InventoryItem n + (fp FP. InRepoFileName.toFilePath fn) + from + len hunk ./camp-repository/Camp/Repository.hs 39 -import Camp.InRepoFileName +import Camp.InRepoFileName as FN hunk ./camp-repository/Camp/Repository.hs 42 -import Camp.Network +import Camp.Network as Network hunk ./camp-repository/Camp/Repository.hs 78 - String -- The repository location + URL -- The repository location hunk ./camp-repository/Camp/Repository.hs 84 - repoURL :: r -> String + repoURL :: r -> URL hunk ./camp-repository/Camp/Repository.hs 90 - repoURL (LockedRepository fp) = "file://" ++ fp + repoURL (LockedRepository fp) = mkURL ("file://" ++ fp) hunk ./camp-repository/Camp/Repository.hs 142 - let cs = inventoryToContents (repoURL r) inv1 + let cs = inventoryToContentURLs (repoURL r) inv1 hunk ./camp-repository/Camp/Repository.hs 183 -repoRoot = repoBase fromString "repo" +repoRoot = repoBase FN. fromString "repo" hunk ./camp-repository/Camp/Repository.hs 186 -inventoryFile = repoRoot fromString "inventory" +inventoryFile = repoRoot FN. fromString "inventory" hunk ./camp-repository/Camp/Repository.hs 189 -addsFile = repoRoot fromString "adds" +addsFile = repoRoot FN. fromString "adds" hunk ./camp-repository/Camp/Repository.hs 192 -prefsDir = repoRoot fromString "prefs" +prefsDir = repoRoot FN. fromString "prefs" hunk ./camp-repository/Camp/Repository.hs 195 -authorFile = prefsDir fromString "author" +authorFile = prefsDir FN. fromString "author" hunk ./camp-repository/Camp/Repository.hs 198 -pristineDir = repoRoot fromString "pristine" +pristineDir = repoRoot FN. fromString "pristine" hunk ./camp-repository/Camp/Repository.hs 239 -patchesDir = repoRoot fromString "patches" +patchesDir = repoRoot FN. fromString "patches" hunk ./camp-repository/Camp/Repository.hs 242 -patchFile = patchesDir fromString "patchFile" +patchFile = patchesDir FN. fromString "patchFile" hunk ./camp-repository/Camp/Repository.hs 247 -inRepoURL :: Repository r => r -> InRepoFileName -> FilePath -inRepoURL r fn = repoURL r FilePath. toFilePath fn +inRepoURL :: Repository r => r -> InRepoFileName -> URL +inRepoURL r fn = repoURL r Network. fn hunk ./camp-repository/Camp/Repository.hs 260 -getMegaPatchesWithInventory :: Repository r - => GeneralFlags -> r -> InRepoInventory +getMegaPatchesWithInventory :: GeneralFlags + -> LockedRepository -> InRepoInventory hunk ./camp-repository/Camp/Repository.hs 263 -getMegaPatchesWithInventory gf r is - = getContents gf (inventoryToContents (repoURL r) is) +getMegaPatchesWithInventory gf (LockedRepository fp) is + = getContents gf (inventoryToContents fp is) hunk ./camp-repository/Camp/Repository.hs 278 -getContents :: GeneralFlags -> [Content] -> IO ByteString +getContents :: GeneralFlags -> [Content FilePath] -> IO ByteString hunk ./camp-repository/Camp/Repository.hs 286 -getContent :: GeneralFlags -> Content -> IO ByteString +getContent :: GeneralFlags -> Content FilePath -> IO ByteString hunk ./camp-repository/Camp/Repository.hs 369 -mkRepoPath :: String -> IO String -mkRepoPath xs | looksLikeURL xs = return xs - | otherwise = do fp <- canonicalizePath xs - return ("file://" ++ fp) +mkRepoURL :: String -> IO URL +mkRepoURL xs | looksLikeURL xs = return (mkURL xs) + | otherwise = do fp <- canonicalizePath xs + return (mkURL ("file://" ++ fp)) hunk ./camp-repository/Camp/Repository.hs 400 - fp' <- mkRepoPath fp + fp' <- mkRepoURL fp hunk ./camp-repository/Camp/Repository.hs 479 -nextLogFile = repoBase fromString "nextLog" +nextLogFile = repoBase FN. fromString "nextLog" hunk ./camp-repository/Camp/Repository.hs 482 -logsDir = repoBase fromString "logs" +logsDir = repoBase FN. fromString "logs" hunk ./camp-bin/camp.cabal 29 - Ghc-Options: -Wall -fwarn-tabs -Werror + Ghc-Options: -Wall -fwarn-tabs -Werror -O2 hunk ./camp-core/camp-core.cabal 40 - Ghc-Options: -Wall -fwarn-tabs -Werror + Ghc-Options: -Wall -fwarn-tabs -Werror -O2 hunk ./camp-network/camp-network.cabal 23 - Ghc-Options: -Wall -fwarn-tabs -Werror + Ghc-Options: -Wall -fwarn-tabs -Werror -O2 hunk ./camp-repository/camp-repository.cabal 29 - Ghc-Options: -Wall -fwarn-tabs -Werror + Ghc-Options: -Wall -fwarn-tabs -Werror -O2 hunk ./camp-view/camp-view.cabal 24 - Ghc-Options: -Wall -fwarn-tabs -Werror + Ghc-Options: -Wall -fwarn-tabs -Werror -O2 hunk ./camp-core/Camp/Patch/InputOutput.hs 6 +import Camp.Utils + hunk ./camp-core/Camp/Patch/InputOutput.hs 48 - input bs = case input bs of - (w, bs') -> - -- XXX Should be sanity checking that it was big enough? - BS.splitAt w bs' + input bs0 = case input bs0 of + (w, bs1) -> + case splitAtExactlyBS w bs1 of + Just (bs2, bs3) -> (bs2, bs3) + Nothing -> error "input ByteString: Too short" hunk ./camp-core/Camp/Patch/InputOutput.hs 71 - input bs = case BS.splitAt 8 bs of - (xs, bs') -> + input bs = case splitAtExactlyBS 8 bs of + Just (xs, bs') -> hunk ./camp-core/Camp/Patch/InputOutput.hs 74 + Nothing -> error "input Word64: Not long enough" hunk ./camp-core/Camp/Patch/Primitive.hs 367 - else case BS.splitAt (skipBytes - startBytes) finish of - -- XXX Should check length of skipped - (skipped, finish') -> - case BS.splitAt (BS.length oldBytes) finish' of - (_old, finish'') -> + else case splitAtExactlyBS (skipBytes - startBytes) finish of + Just (skipped, finish') -> + case splitAtExactlyBS (BS.length oldBytes) finish' of + Just (_old, finish'') -> hunk ./camp-core/Camp/Patch/Primitive.hs 381 - -- Nothing -> error "Old patch content is wrong" - -- Nothing -> error "Not enough lines to skip" + Nothing -> error "Old patch content is wrong" + Nothing -> error "Not enough lines to skip" hunk ./camp-core/Camp/Utils.hs 4 - stripPrefixBS, + stripPrefixBS, splitAtExactlyBS, hunk ./camp-core/Camp/Utils.hs 16 +import qualified Data.ByteString as SBS hunk ./camp-core/Camp/Utils.hs 18 +import qualified Data.ByteString.Lazy.Internal as BSI hunk ./camp-core/Camp/Utils.hs 21 +import Data.Int hunk ./camp-core/Camp/Utils.hs 74 +splitAtExactlyBS :: Int64 -> ByteString -> Maybe (ByteString, ByteString) +splitAtExactlyBS n0 bs0 = if n0 < 0 + then Nothing + else f n0 bs0 + where f 0 bs = Just (BS.empty, bs) + f _ BSI.Empty = Nothing + f n (BSI.Chunk c cs) = let len = fromIntegral (SBS.length c) + in if n >= len + then case f (n - len) cs of + Just (cs', bs') -> + Just (BSI.Chunk c cs', bs') + Nothing -> Nothing + else -- XXX fromIntegral overflow possible + case SBS.splitAt (fromIntegral n) c of + (xs, ys) -> + Just (BSI.Chunk xs BSI.Empty, + BSI.Chunk ys cs) + hunk ./camp-repository/Camp/Repository.hs 167 - -- XXX This should be a splitAt that complains if there aren't - -- enough bytes - = case BS.splitAt len bs of - (this, rest) -> + = case splitAtExactlyBS len bs of + Just (this, rest) -> hunk ./camp-repository/Camp/Repository.hs 175 + Nothing -> error "XXX checkInventoryMatchesPatches" hunk ./camp-core/Camp/Patch/InputOutput.hs 66 - (w, bs') -> (fromIntegral (w :: Word64), bs') + (w, bs') -> + let i = fromIntegral (w :: Word64) + in i `seq` (i, bs') hunk ./camp-core/Camp/Patch/InputOutput.hs 75 - (foldl1' f $ map fromIntegral $ BS.unpack xs, bs') + let w = foldl1' f $ map fromIntegral $ BS.unpack xs + in w `seq` (w, bs') hunk ./camp-core/Camp/Patch/InputOutput.hs 94 - (i, bs') -> (fromIntegral (i :: Int64), bs') + (i, bs') -> + let i' = fromIntegral (i :: Int64) + in i' `seq` (i', bs') hunk ./camp-core/Camp/Patch/InputOutput.hs 98 - (ws, bs') -> (fromWord64s ws, bs') + (ws, bs') -> + let i = fromWord64s ws + in i `seq` (i, bs') hunk ./camp-core/Camp/Patch/InputOutput.hs 102 - (ws, bs') -> (negate $ fromWord64s ws, bs') + (ws, bs') -> + let i = negate $ fromWord64s ws + in i `seq` (i, bs') hunk ./camp-core/Camp/Patch/InputOutput.hs 58 - let len = fromIntegral (w :: Word64) - in if BS.length bs' < len - then Left ("InputOutput ByteString truncated", bs) - else Right (BS.drop len bs') + case splitAtExactlyBS (fromIntegral (w :: Word64)) bs' of + Just (_, bs'') -> Right bs'' + Nothing -> + Left ("InputOutput ByteString truncated", bs) hunk ./camp-core/Camp/Patch/InputOutput.hs 79 - valid bs _ = if BS.length bs < 8 - then Left ("InputOutput Word64 not enough bytes", bs) - else Right (BS.drop 8 bs) + valid bs _ = case splitAtExactlyBS 8 bs of + Just (_, bs') -> Right bs' + Nothing -> Left ("InputOutput Word64 not enough bytes", bs) hunk ./camp-repository/Camp/Repository.hs 96 - inv <- readInventory gf r + inv <- readInventoryLazily gf r hunk ./camp-repository/Camp/Repository.hs 141 - do inv1 <- readInventory gf r + do inv1 <- readInventoryLazily gf r hunk ./camp-repository/Camp/Repository.hs 156 - inv2 <- readInventory gf r + inv2 <- readInventoryLazily gf r hunk ./camp-repository/Camp/Inventory.hs 114 - where compact _ [] = [] - compact offset (InventoryItem n _ _ len : is) + where compact !_ [] = [] + compact !offset (InventoryItem n _ _ len : is) hunk ./camp-repository/camp-repository.cabal 33 - Extensions: CPP, ScopedTypeVariables, GADTs + Extensions: CPP, ScopedTypeVariables, GADTs, BangPatterns hunk ./camp-core/Camp/Patch/Sequence.hs 13 +import Camp.Types hunk ./camp-core/Camp/Patch/Sequence.hs 16 +import Data.ByteString.Lazy (ByteString) hunk ./camp-core/Camp/Patch/Sequence.hs 51 - valid bs _ = case BS.uncons bs of - Just (0, bs') -> Right bs' - Just (1, bs') -> - case valid2 bs' (undefined :: p from mid) of - Right bs'' -> - valid bs'' (undefined :: Seq p mid to) - Left err -> Left err - _ -> Left ("InputOutput Seq: Bad value", bs) + valid bs _ = validSeq bs (undefined :: Proxy2 p) hunk ./camp-core/Camp/Patch/Sequence.hs 55 +-- This is written in such a way as to avoid leaking space due to +-- http://hackage.haskell.org/trac/ghc/ticket/2762 +validSeq :: forall p . InputOutput2 p + => ByteString -> Proxy2 p + -> Either (String, ByteString) ByteString +validSeq bs typeProxy = case BS.uncons bs of + Just (0, bs') -> Right bs' + Just (1, bs') -> + case valid2 bs' (undefined :: p from mid) of + Right bs'' -> + validSeq bs'' typeProxy + Left err -> Left err + _ -> Left ("InputOutput Seq: Bad value", bs) + hunk ./camp-core/Camp/Patch/Stream.hs 6 +import Camp.Types hunk ./camp-core/Camp/Patch/Stream.hs 8 +import Data.ByteString.Lazy (ByteString) hunk ./camp-core/Camp/Patch/Stream.hs 21 - valid bs _ = if BS.null bs - then Right BS.empty - else case valid bs (undefined :: a) of - Right bs'' -> - valid bs'' (undefined :: Stream a) - Left err -> Left err + valid bs _ = validStream bs (undefined :: Proxy a) hunk ./camp-core/Camp/Patch/Stream.hs 25 +-- This is written in such a way as to avoid leaking space due to +-- http://hackage.haskell.org/trac/ghc/ticket/2762 +validStream :: forall a . InputOutput a + => ByteString -> Proxy a + -> Either (String, ByteString) ByteString +validStream bs typeProxy = if BS.null bs + then Right BS.empty + else case valid bs (undefined :: a) of + Right bs'' -> + validStream bs'' typeProxy + Left err -> Left err + hunk ./camp-core/Camp/Patch/Stream.hs 48 - valid bs _ = if BS.null bs - then Right BS.empty - else case valid2 bs (undefined :: p from mid) of - Right bs'' -> - valid bs'' (undefined :: Stream2 p mid to) - Left err -> Left err + valid bs _ = validStream2 bs (undefined :: Proxy2 p) hunk ./camp-core/Camp/Patch/Stream.hs 52 +-- This is written in such a way as to avoid leaking space due to +-- http://hackage.haskell.org/trac/ghc/ticket/2762 +validStream2 :: forall p . InputOutput2 p + => ByteString -> Proxy2 p + -> Either (String, ByteString) ByteString +validStream2 bs typeProxy = if BS.null bs + then Right BS.empty + else case valid2 bs (undefined :: p from mid) of + Right bs'' -> + validStream2 bs'' typeProxy + Left err -> Left err + hunk ./camp-core/Camp/Types.hs 12 + +data Proxy p +data Proxy2 (p :: * -> * -> *) hunk ./camp-core/camp-core.cabal 44 - Extensions: GADTs, ScopedTypeVariables, + Extensions: GADTs, ScopedTypeVariables, KindSignatures, hunk ./camp-core/Camp/Patch/Apply.hs 2 -module Camp.Patch.Apply (Apply(..), flush, applyFully) where +module Camp.Patch.Apply (Apply(..), flush, applyFully, ApplyState) where hunk ./camp-core/Camp/Patch/Sequence.hs 41 - input bs = case BS.head bs of - 0 -> (unsafeCoerce Nil, BS.tail bs) - 1 -> case input2 (BS.tail bs) of - (x, bs') -> - case input bs' of - -- XXX There's a performance penalty for this ~, - -- but a significant space bonus - ~(xs, bs'') -> - (x `Cons` xs, bs'') - _ -> error "InputOutput Seq: Bad value" + input bs = case inputSeq bs of + (s, bs') -> (unhide2 s, bs') hunk ./camp-core/Camp/Patch/Sequence.hs 47 +-- This is written in such a way as to avoid leaking space due to +-- http://hackage.haskell.org/trac/ghc/ticket/2762 +inputSeq :: InputOutput2 p => ByteString -> (Hide2 (Seq p), ByteString) +inputSeq bs = case BS.head bs of + 0 -> (hide2 Nil, BS.tail bs) + 1 -> case input2 (BS.tail bs) of + (x, bs') -> + case inputSeq bs' of + -- XXX There's a performance penalty for this ~, + -- but a significant space bonus + ~(xs, bs'') -> + (hide2 (x `Cons` unhide2 xs), bs'') + _ -> error "InputOutput Seq: Bad value" + hunk ./camp-core/Camp/Patch/Sequence.hs 152 - apply m Nil = return m - apply m (Cons p ps) = do m' <- apply m p - apply m' ps + apply = applySeq + +-- This is written in such a way as to avoid leaking space due to +-- http://hackage.haskell.org/trac/ghc/ticket/2762 +applySeq :: Apply p => ApplyState -> Seq p from to -> IO ApplyState +applySeq m Nil = return m +applySeq m (Cons p ps) = do m' <- apply m p + applySeq m' ps hunk ./camp-core/Camp/Types.hs 2 -module Camp.Types where +module Camp.Types (Bytes, Line, Proxy, Proxy2, Hide2, hide2, unhide2) where hunk ./camp-core/Camp/Types.hs 5 +import Unsafe.Coerce hunk ./camp-core/Camp/Types.hs 17 +data Hide2 t where + Hide2 :: t a b -> Hide2 t + +hide2 :: t a b -> Hide2 t +hide2 x = Hide2 x + +unhide2 :: Hide2 t -> t a b +unhide2 (Hide2 x) = unsafeCoerce x + hunk ./camp-repository/Camp/Repository.hs 167 - = case splitAtExactlyBS len bs of - Just (this, rest) -> + = -- We assume that the total file size is right, or the + -- download should have failed. + -- So we can use splitAt rather than splitAtExactlyBS, + -- and know that we won't get any short splits. + -- This is important, as early commit patches tend to + -- be large (e.g. GHC has a 21M patch near the start), + -- so we don't want to keep a whole patch in memory + -- while we're checking that we have enough bytes. + case BS.splitAt len bs of + (this, rest) -> hunk ./camp-repository/Camp/Repository.hs 183 - Nothing -> error "XXX checkInventoryMatchesPatches" hunk ./camp-core/Camp/Patch/Primitive.hs 286 - apply m prim = case prim of - AddDir fn -> do let fp = InRepoFileName.toFilePath fn - fileExists <- doesFileExist fp - directoryExists <- doesDirectoryExist fp - if fileExists || directoryExists - then error ("Already exists: " ++ show fp) - else createDirectory fp - return m - RmDir fn -> do let fp = InRepoFileName.toFilePath fn - -- XXX Should check for emptiness - removeDirectory fp + apply = applyPrimitive + +applyPrimitive :: ApplyState -> Primitive from to -> IO ApplyState +applyPrimitive m prim + = case prim of + AddDir fn -> do let fp = InRepoFileName.toFilePath fn + fileExists <- doesFileExist fp + directoryExists <- doesDirectoryExist fp + if fileExists || directoryExists + then error ("Already exists: " ++ show fp) + else createDirectory fp + return m + RmDir fn -> do let fp = InRepoFileName.toFilePath fn + -- XXX Should check for emptiness + removeDirectory fp + return m + -- XXX We could be clever here, and just update m if necessary + MvDir fromFn toFn -> + do flush m + let from = InRepoFileName.toFilePath fromFn + to = InRepoFileName.toFilePath toFn + fromDirectoryExists <- doesDirectoryExist from + toFileExists <- doesFileExist to + toDirectoryExists <- doesDirectoryExist to + if fromDirectoryExists + then if toFileExists || toDirectoryExists + then error ("Already exists: " ++ show to) + else renameDirectory from to + else error ("Not a directory: " ++ show from) + return Nothing + AddFile fn -> do let fp = InRepoFileName.toFilePath fn + fileExists <- doesFileExist fp + directoryExists <- doesDirectoryExist fp + if fileExists || directoryExists + then error ("Already exists: " ++ show fp) + else withBinaryFile fp WriteMode $ \_ -> + return () hunk ./camp-core/Camp/Patch/Primitive.hs 324 - -- XXX We could be clever here, and just update m if necessary - MvDir fromFn toFn -> - do flush m - let from = InRepoFileName.toFilePath fromFn - to = InRepoFileName.toFilePath toFn - fromDirectoryExists <- doesDirectoryExist from - toFileExists <- doesFileExist to - toDirectoryExists <- doesDirectoryExist to - if fromDirectoryExists - then if toFileExists || toDirectoryExists - then error ("Already exists: " ++ show to) - else renameDirectory from to - else error ("Not a directory: " ++ show from) - return Nothing - AddFile fn -> do let fp = InRepoFileName.toFilePath fn - fileExists <- doesFileExist fp - directoryExists <- doesDirectoryExist fp - if fileExists || directoryExists - then error ("Already exists: " ++ show fp) - else withBinaryFile fp WriteMode $ \_ -> - return () - return m - RmFile fn -> - case m of - Just (fn', _, start, finish) - | fn == fn' -> - if BS.null start && BS.null finish - then do let fp = InRepoFileName.toFilePath fn - -- The file might not exist, as we may be applying - -- AddFile f; RmFile f - -- But if we get to this point then we know that - -- the directory doesn't exist, because either - -- AddFile checked it doesn't, or we read the - -- file when doing e.g. a Hunk. - fileExists <- doesFileExist fp - when fileExists $ removeFile fp - return Nothing - else do flush m - error ("Not empty: " ++ show fn) - _ -> - do let fp = InRepoFileName.toFilePath fn - size <- withBinaryFile fp ReadMode hFileSize - if size /= 0 - then error ("Not empty: " ++ show fn) - else removeFile fp - return m - -- XXX We could be clever here, and just update m if necessary - MvFile fromFn toFn -> - do flush m - let from = InRepoFileName.toFilePath fromFn - to = InRepoFileName.toFilePath toFn - fromFileExists <- doesFileExist from - toFileExists <- doesFileExist to - toDirectoryExists <- doesDirectoryExist to - if fromFileExists - then if toFileExists || toDirectoryExists - then error ("Already exists: " ++ show to) - else renameFile from to - else error ("Not a file: " ++ show from) - return Nothing - -- XXX Handle m - Hunk fn skipBytes _ oldBytes _ newBytes _ -> - case m of - Just (fn', startBytes, start, finish) - | fn == fn' -> - -- XXX This should check that the file is big enough etc - if skipBytes < startBytes - then do let finish' = start `BS.append` finish - apply (Just (fn, 0, BS.empty, finish')) prim - else case splitAtExactlyBS (skipBytes - startBytes) finish of - Just (skipped, finish') -> - case splitAtExactlyBS (BS.length oldBytes) finish' of - Just (_old, finish'') -> - -- XXX sanity check oldBytes == old - do let start' = start `BS.append` - skipped `BS.append` - newBytes - startBytes' = skipBytes - + BS.length newBytes - return (Just (fn, - startBytes', - start', - finish'')) - Nothing -> error "Old patch content is wrong" - Nothing -> error "Not enough lines to skip" - _ -> - do flush m - let fp = InRepoFileName.toFilePath fn - content <- BS.readFile fp - -- XXX Currently we force the length, so that we can - -- safely overwrite. Perhaps we should mv to somewhere - -- under _camp instead? - evaluate $ BS.length content - apply (Just (fn, 0, BS.empty, content)) prim - -- XXX Handle m - Binary fn oldBytes newBytes -> - do flush m - let fp = InRepoFileName.toFilePath fn - content <- BS.readFile fp - if oldBytes == content - then BS.writeFile fp newBytes - else error "XXX Old content is wrong" - return Nothing + RmFile fn -> + case m of + Just (fn', _, start, finish) + | fn == fn' -> + if BS.null start && BS.null finish + then do let fp = InRepoFileName.toFilePath fn + -- The file might not exist, as we may be applying + -- AddFile f; RmFile f + -- But if we get to this point then we know that + -- the directory doesn't exist, because either + -- AddFile checked it doesn't, or we read the + -- file when doing e.g. a Hunk. + fileExists <- doesFileExist fp + when fileExists $ removeFile fp + return Nothing + else do flush m + error ("Not empty: " ++ show fn) + _ -> + do let fp = InRepoFileName.toFilePath fn + size <- withBinaryFile fp ReadMode hFileSize + if size /= 0 + then error ("Not empty: " ++ show fn) + else removeFile fp + return m + -- XXX We could be clever here, and just update m if necessary + MvFile fromFn toFn -> + do flush m + let from = InRepoFileName.toFilePath fromFn + to = InRepoFileName.toFilePath toFn + fromFileExists <- doesFileExist from + toFileExists <- doesFileExist to + toDirectoryExists <- doesDirectoryExist to + if fromFileExists + then if toFileExists || toDirectoryExists + then error ("Already exists: " ++ show to) + else renameFile from to + else error ("Not a file: " ++ show from) + return Nothing + -- XXX Handle m + Hunk fn skipBytes _ oldBytes _ newBytes _ -> + case m of + Just (fn', startBytes, start, finish) + | fn == fn' -> + -- XXX This should check that the file is big enough etc + if skipBytes < startBytes + then do let finish' = start `BS.append` finish + apply (Just (fn, 0, BS.empty, finish')) prim + else case splitAtExactlyBS (skipBytes - startBytes) finish of + Just (skipped, finish') -> + case splitAtExactlyBS (BS.length oldBytes) finish' of + Just (_old, finish'') -> + -- XXX sanity check oldBytes == old + do let start' = start `BS.append` + skipped `BS.append` + newBytes + startBytes' = skipBytes + + BS.length newBytes + return (Just (fn, + startBytes', + start', + finish'')) + Nothing -> error "Old patch content is wrong" + Nothing -> error "Not enough lines to skip" + _ -> + do flush m + let fp = InRepoFileName.toFilePath fn + content <- BS.readFile fp + -- XXX Currently we force the length, so that we can + -- safely overwrite. Perhaps we should mv to somewhere + -- under _camp instead? + evaluate $ BS.length content + apply (Just (fn, 0, BS.empty, content)) prim + -- XXX Handle m + Binary fn oldBytes newBytes -> + do flush m + let fp = InRepoFileName.toFilePath fn + content <- BS.readFile fp + if oldBytes == content + then BS.writeFile fp newBytes + else error "XXX Old content is wrong" + return Nothing hunk ./camp-repository/Camp/Repository.hs 91 - getInventory _ r = BS.readFile (inRepoPath r inventoryFile) + getInventory = getInventoryLocked hunk ./camp-repository/Camp/Repository.hs 99 +getInventoryLocked :: GeneralFlags -> LockedRepository -> IO ByteString +getInventoryLocked _ r = BS.readFile (inRepoPath r inventoryFile) + hunk ./camp-repository/Camp/Repository.hs 104 - getInventory _ r = do fp <- getUnlockedRepositoryInventoryLocalFilePath r - BS.readFile fp + getInventory = getInventoryUnlocked hunk ./camp-repository/Camp/Repository.hs 115 +getInventoryUnlocked :: GeneralFlags -> UnlockedRepository -> IO ByteString +getInventoryUnlocked _ r + = do fp <- getUnlockedRepositoryInventoryLocalFilePath r + BS.readFile fp + hunk ./camp-repository/Camp/Repository.hs 291 +-- XXX Should compact the contents? hunk ./camp-core/Camp/Patch/InputOutput.hs 137 - input bs = case BS.head bs of + input = inputList + valid bs _ = validList bs (undefined :: a) + output = outputList + +inputList :: InputOutput a => ByteString -> ([a], ByteString) +inputList bs = case BS.head bs of hunk ./camp-core/Camp/Patch/InputOutput.hs 146 - case input bs' of - -- XXX This ~ is just for lazy inventory reading; use a different type? + case inputList bs' of + -- XXX This ~ is just for lazy inventory reading; + -- use a different type? hunk ./camp-core/Camp/Patch/InputOutput.hs 151 - valid bs _ = case BS.uncons bs of - Just (0, bs') -> Right bs' - Just (1, bs') -> - case valid bs' (undefined :: a) of - Right bs'' -> - valid bs'' (undefined :: [a]) - Left err -> Left err - _ -> Left ("InputOutput []: Bad value", bs) - output [] = BS.singleton 0 - output (x : xs) = 1 `BS.cons` output x `BS.append` output xs + +validList :: InputOutput a + => ByteString -> a -> Either (String, ByteString) ByteString +validList bs typeProxy = case BS.uncons bs of + Just (0, bs') -> Right bs' + Just (1, bs') -> + case valid bs' typeProxy of + Right bs'' -> + validList bs'' typeProxy + Left err -> Left err + _ -> Left ("InputOutput []: Bad value", bs) + +outputList :: InputOutput a => [a] -> ByteString +outputList [] = BS.singleton 0 +outputList (x : xs) = 1 `BS.cons` output x `BS.append` outputList xs hunk ./camp-network/camp-network.cabal 27 - Pkgconfig-Depends: libcurl + -- Strictly speaking we don't need libcurl >= 7.19.1, but we do need + -- it if we want to be able to download partial files over SFTP: + -- Fixed in 7.19.1 - November 5 2008 + -- Bugfixes: + -- * CURLOPT_RANGE now works for SFTP downloads + Pkgconfig-Depends: libcurl >= 7.19.1 addfile ./README.txt hunk ./README.txt 1 + +This is the camp source repo. These four subdirectories contain the +packages that make up camp: + +* camp-core The central patch modules +* camp-network The CURL binding +* camp-repository Repositories of patches +* camp-bin The actual camp executable + +This contains the testsuite: + +* tests + +And finally, the camp-view tool prototype is here: + +* camp-view + hunk ./camp-repository/Camp/Repository.hs 39 +import Camp.Content hunk ./camp-repository/Camp/Repository.hs 292 --- XXX Should compact the contents? hunk ./camp-repository/Camp/Repository.hs 293 -getContents _ [] = return BS.empty -getContents gf (c : cs) = do x <- getContent gf c - xs <- unsafeInterleaveIO $ getContents gf cs - return (x `BS.append` xs) +getContents gf cs = getContents' gf $ simplifyContents cs + +getContents' :: GeneralFlags -> [Content FilePath] -> IO ByteString +getContents' _ [] = return BS.empty +getContents' gf (c : cs) = do x <- getContent gf c + xs <- unsafeInterleaveIO $ getContents' gf cs + return (x `BS.append` xs) adddir ./camp-fragment addfile ./camp-fragment/Fragment.hs hunk ./camp-fragment/Fragment.hs 1 + +module Main (main) where + +import Camp.Content +import Camp.Inventory +import Camp.Options +import Camp.Repository + +import qualified Data.ByteString.Lazy.Char8 as BSC +import System.Directory +import System.Environment +import System.IO.Unsafe +import System.Random + +main :: IO () +main = do args <- getArgs + case args of + [compactRepoDir, fragmentedRepoDir] -> + withLockedRepo compactRepoDir $ \compactRepo -> + do createDirectory fragmentedRepoDir + setCurrentDirectory fragmentedRepoDir + withLockedRepoCreate $ \fragmentedRepo -> + do initialiseRepo fragmentedRepo + fragment compactRepo fragmentedRepo + _ -> error "Bad args" + +fragment :: LockedRepository -> LockedRepository -> IO () +fragment compactRepo fragmentedRepo + = do compactInv <- readInventory gf compactRepo + writeRandom + fragmentedInv <- f compactInv + writeInventory fragmentedRepo fragmentedInv + where gf = GeneralFlags { + gfVerbosity = Normal, + gfLog = False + } + writeRandom = do n <- randomRIO (5000, 6000) + let n' = fromInteger n + putMegaPatches fragmentedRepo (BSC.replicate n' 'x') + return () + f [] = return [] + f (InventoryItem name fn from len : is) + = do let c = Content (inRepoPath compactRepo fn) from len + bs <- getContent gf c + (fn', from') <- putMegaPatches fragmentedRepo bs + writeRandom + is' <- unsafeInterleaveIO $ f is + return (InventoryItem name fn' from' len : is') + addfile ./camp-fragment/LICENSE hunk ./camp-fragment/LICENSE 1 +Copyright (c) Ian Lynagh, 2009. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. Neither the name of the Authors nor the names of any contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. + addfile ./camp-fragment/Setup.hs hunk ./camp-fragment/Setup.hs 1 + +module Main (main) where + +import Distribution.Simple + +main :: IO () +main = defaultMain + addfile ./camp-fragment/camp-fragment.cabal hunk ./camp-fragment/camp-fragment.cabal 1 +Name: camp-fragment +Version: 0.1 +License: BSD3 +License-File: LICENSE +Copyright: 2009 Ian Lynagh +Author: Ian Lynagh +Maintainer: Ian Lynagh +Synopsis: Camp fragment +Description: + This fragments the patch file of a repository. + It is only useful for testing and benchmarking. +Category: Development +Build-Type: Simple +Cabal-Version: >=1.2 + +Executable camp-fragment + Main-Is: Fragment.hs + + Ghc-Options: -Wall -fwarn-tabs -Werror -O2 + + Build-Depends: base, bytestring, camp-core, camp-repository, + directory, random + hunk ./camp-repository/Camp/Repository.hs 19 + getContent, adddir ./camp-bin/packages adddir ./camp-bin/packages/extensible-exceptions adddir ./camp-bin/packages/extensible-exceptions/Control adddir ./camp-bin/packages/extensible-exceptions/Control/Exception adddir ./camp-repository/packages/extensible-exceptions adddir ./camp-repository/packages/extensible-exceptions/Control adddir ./camp-repository/packages/extensible-exceptions/Control/Exception hunk ./camp-bin/Camp/Command/Get.hs 10 -import Control.Exception as Exception +import Control.Exception.Extensible as Exception hunk ./camp-bin/camp.cabal 15 -Flag have_lcs - Description: Do we have the lcs package? +Flag have_extensible_exceptions + Description: Do we have the extensible-exceptions package? hunk ./camp-bin/camp.cabal 28 + Hs-Source-Dirs: . hunk ./camp-bin/camp.cabal 44 + -- This is a hack to avoid needing to install the extensible-exceptions + -- package when building with the GHC HEAD. The in-tree sources are from + -- extensible-exceptions 0.1.1.0 + if flag(have_extensible_exceptions) + Build-Depends: extensible-exceptions + else + Hs-Source-Dirs: packages/extensible-exceptions + Other-Modules: + Control.Exception.Extensible + addfile ./camp-bin/packages/extensible-exceptions/Control/Exception/Extensible.hs hunk ./camp-bin/packages/extensible-exceptions/Control/Exception/Extensible.hs 1 +{-# LANGUAGE DeriveDataTypeable #-} +------------------------- +-- | +-- Module : Control.Exception +-- Copyright : (c) The University of Glasgow 2001 +-- License : BSD-style (see the file LICENSE) +-- +-- Maintainer : libraries@haskell.org +-- Stability : experimental +-- Portability : non-portable (uses ExistentialQuantification and DeriveDataTypeable) +-- +-- This module provides the extensible exceptions API for raising and catching both +-- built-in and user-defined exceptions. +-- +-- For newer versions of GHC (>=6.9), this package re-exports 'Control.Exception'. +-- Otherwise, it provides a compatibility layer around the previous version of the +-- extensions API. + +#ifdef USE_NEW_EXCEPTIONS +module Control.Exception.Extensible (module Control.Exception) where + +import Control.Exception hiding (blocked) + +#else +module Control.Exception.Extensible ( + -- * The Exception type + SomeException(..), + Exception(..), + E.IOException, + E.ArithException(..), + E.ArrayException(..), + AssertionFailed(..), + E.AsyncException(..), + NonTermination(..), + NestedAtomically(..), + ExitCode(..), + BlockedOnDeadMVar(..), + BlockedIndefinitely(..), + Deadlock(..), + NoMethodError(..), + PatternMatchFail(..), + RecConError(..), + RecSelError(..), + RecUpdError(..), + ErrorCall(..), + + -- * Throwing exceptions + throwIO, + throw, + ioError, + throwTo, + -- * Catching Exceptions + + -- |There are several functions for catching and examining + -- exceptions; all of them may only be used from within the + -- 'IO' monad. + + -- ** The @catch@ functions + catch, -- :: IO a -> (Exception -> IO a) -> IO a + catches, Handler(..), + catchJust, -- :: (Exception -> Maybe b) -> IO a -> (b -> IO a) -> IO a + + -- ** The @handle@ functions + handle, -- :: (Exception -> IO a) -> IO a -> IO a + handleJust,-- :: (Exception -> Maybe b) -> (b -> IO a) -> IO a -> IO a + + -- ** The @try@ functions + try, -- :: IO a -> IO (Either Exception a) + tryJust, -- :: (Exception -> Maybe b) -> a -> IO (Either b a) + onException, + + -- ** The @evaluate@ function + E.evaluate, -- :: a -> IO a + + -- ** The @mapException@ function + mapException, -- :: (Exception -> Exception) -> a -> a + + -- * Asynchronous Exceptions + + -- $async + + -- ** Asynchronous exception control + + -- |The following two functions allow a thread to control delivery of + -- asynchronous exceptions during a critical region. + + E.block, + E.unblock, + + -- *** Applying @block@ to an exception handler + + -- $block_handler + + -- *** Interruptible operations + + -- $interruptible + + -- * Assertions + + assert, + + -- * Utilities + bracket, + bracket_, + bracketOnError, + finally + ) where + +import Prelude hiding (catch) +import Control.Concurrent hiding (throwTo) +import qualified Control.Exception as E +import Data.Dynamic +import Data.Typeable +import System.Exit +import System.IO.Unsafe(unsafePerformIO) + +class (Typeable e, Show e) => Exception e where + toException :: e -> SomeException + fromException :: SomeException -> Maybe e + + toException = SomeException + fromException (SomeException e) = cast e + +data SomeException = forall e . Exception e => SomeException e + deriving Typeable + +instance Show SomeException where + showsPrec p (SomeException e) = showsPrec p e + +instance Exception SomeException where + toException se = se + fromException = Just + +mkOldException :: Exception e => e -> E.Exception +mkOldException e = let e' = toException e + in case fromException e' of + Just e'' -> -- If the exception is actually a legacy exception + -- then throw it directly so the legacy functions + -- catch it as they expect + e'' + Nothing -> -- Otherwise, throw it as a dynamic + E.DynException (toDyn e') + +throw :: Exception e => e -> a +throw e = E.throw (mkOldException e) + +throwIO :: Exception e => e -> IO a +throwIO e = E.throwIO (mkOldException e) + +throwTo :: Exception e => ThreadId -> e -> IO () +throwTo tid e = E.throwTo tid (mkOldException e) + +----------------------------------------------------------------------------- +-- Catching exceptions + +-- |This is the simplest of the exception-catching functions. It +-- takes a single argument, runs it, and if an exception is raised +-- the \"handler\" is executed, with the value of the exception passed as an +-- argument. Otherwise, the result is returned as normal. For example: +-- +-- > catch (openFile f ReadMode) +-- > (\e -> hPutStr stderr ("Couldn't open "++f++": " ++ show e)) +-- +-- For catching exceptions in pure (non-'IO') expressions, see the +-- function 'evaluate'. +-- +-- Note that due to Haskell\'s unspecified evaluation order, an +-- expression may return one of several possible exceptions: consider +-- the expression @error \"urk\" + 1 \`div\` 0@. Does +-- 'catch' execute the handler passing +-- @ErrorCall \"urk\"@, or @ArithError DivideByZero@? +-- +-- The answer is \"either\": 'catch' makes a +-- non-deterministic choice about which exception to catch. If you +-- call it again, you might get a different exception back. This is +-- ok, because 'catch' is an 'IO' computation. +-- +-- Note that 'catch' catches all types of exceptions, and is generally +-- used for \"cleaning up\" before passing on the exception using +-- 'throwIO'. It is not good practice to discard the exception and +-- continue, without first checking the type of the exception (it +-- might be a 'ThreadKilled', for example). In this case it is usually better +-- to use 'catchJust' and select the kinds of exceptions to catch. +-- +-- Also note that the "Prelude" also exports a function called +-- 'Prelude.catch' with a similar type to 'Control.Exception.catch', +-- except that the "Prelude" version only catches the IO and user +-- families of exceptions (as required by Haskell 98). +-- +-- We recommend either hiding the "Prelude" version of 'Prelude.catch' +-- when importing "Control.Exception": +-- +-- > import Prelude hiding (catch) +-- +-- or importing "Control.Exception" qualified, to avoid name-clashes: +-- +-- > import qualified Control.Exception as C +-- +-- and then using @C.catch@ +-- +catch :: Exception e + => IO a -- ^ The computation to run + -> (e -> IO a) -- ^ Handler to invoke if an exception is raised + -> IO a +catch io handler = io `E.catch` handler' + where handler' e = case fromException (toException e) of + Just e' -> + -- Handle the case where e == E.Exception, + -- or one of the types that make up E.Exception + handler e' + Nothing -> + case e of + E.DynException dyn -> + case fromDynamic dyn of + Just (SomeException exc) -> + case cast exc of + Just e' -> + -- Handle the case where we have + -- a new exception type encoded + -- as a Dynamic + handler e' + Nothing -> E.throw e + Nothing -> E.throw e + _ -> E.throw e + +-- | When you want to acquire a resource, do some work with it, and +-- then release the resource, it is a good idea to use 'bracket', +-- because 'bracket' will install the necessary exception handler to +-- release the resource in the event that an exception is raised +-- during the computation. If an exception is raised, then 'bracket' will +-- re-raise the exception (after performing the release). +-- +-- A common example is opening a file: +-- +-- > bracket +-- > (openFile "filename" ReadMode) +-- > (hClose) +-- > (\handle -> do { ... }) +-- +-- The arguments to 'bracket' are in this order so that we can partially apply +-- it, e.g.: +-- +-- > withFile name mode = bracket (openFile name mode) hClose +-- +bracket + :: IO a -- ^ computation to run first (\"acquire resource\") + -> (a -> IO b) -- ^ computation to run last (\"release resource\") + -> (a -> IO c) -- ^ computation to run in-between + -> IO c -- returns the value from the in-between computation +bracket before after thing = + E.block (do + a <- before + r <- E.unblock (thing a) `onException` after a + after a + return r + ) + +onException :: IO a -> IO b -> IO a +onException io what = io `catch` \e -> do what + throw (e :: SomeException) + +block, unblock :: IO a -> IO a +block = E.block +unblock = E.unblock + +-- | A specialised variant of 'bracket' with just a computation to run +-- afterward. +-- +finally :: IO a -- ^ computation to run first + -> IO b -- ^ computation to run afterward (even if an exception + -- was raised) + -> IO a -- returns the value from the first computation +a `finally` sequel = + E.block (do + r <- E.unblock a `onException` sequel + sequel + return r + ) + +-- | A variant of 'bracket' where the return value from the first computation +-- is not required. +bracket_ :: IO a -> IO b -> IO c -> IO c +bracket_ before after thing = bracket before (const after) (const thing) + +-- | Like bracket, but only performs the final action if there was an +-- exception raised by the in-between computation. +bracketOnError + :: IO a -- ^ computation to run first (\"acquire resource\") + -> (a -> IO b) -- ^ computation to run last (\"release resource\") + -> (a -> IO c) -- ^ computation to run in-between + -> IO c -- returns the value from the in-between computation +bracketOnError before after thing = + block (do + a <- before + unblock (thing a) `onException` after a + ) + +assert :: Bool -> a -> a +assert True x = x +assert False _ = throw (AssertionFailed "") + +-- | The function 'catchJust' is like 'catch', but it takes an extra +-- argument which is an /exception predicate/, a function which +-- selects which type of exceptions we\'re interested in. +-- +-- > result <- catchJust errorCalls thing_to_try handler +-- +-- Any other exceptions which are not matched by the predicate +-- are re-raised, and may be caught by an enclosing +-- 'catch' or 'catchJust'. +catchJust + :: Exception e + => (e -> Maybe b) -- ^ Predicate to select exceptions + -> IO a -- ^ Computation to run + -> (b -> IO a) -- ^ Handler + -> IO a +catchJust p a handler = catch a handler' + where handler' e = case p e of + Nothing -> throw e + Just b -> handler b + +-- | A version of 'catch' with the arguments swapped around; useful in +-- situations where the code for the handler is shorter. For example: +-- +-- > do handle (\e -> exitWith (ExitFailure 1)) $ +-- > ... +handle :: Exception e => (e -> IO a) -> IO a -> IO a +handle = flip catch + +-- | A version of 'catchJust' with the arguments swapped around (see +-- 'handle'). +handleJust :: Exception e => (e -> Maybe b) -> (b -> IO a) -> IO a -> IO a +handleJust p = flip (catchJust p) + +----------------------------------------------------------------------------- +-- 'mapException' + +-- | This function maps one exception into another as proposed in the +-- paper \"A semantics for imprecise exceptions\". + +-- Notice that the usage of 'unsafePerformIO' is safe here. + +mapException :: (Exception e1, Exception e2) => (e1 -> e2) -> a -> a +mapException f v = unsafePerformIO (catch (E.evaluate v) + (\x -> throw (f x))) + +----------------------------------------------------------------------------- +-- 'try' and variations. + +-- | Similar to 'catch', but returns an 'Either' result which is +-- @('Right' a)@ if no exception was raised, or @('Left' e)@ if an +-- exception was raised and its value is @e@. +-- +-- > try a = catch (Right `liftM` a) (return . Left) +-- +-- Note: as with 'catch', it is only polite to use this variant if you intend +-- to re-throw the exception after performing whatever cleanup is needed. +-- Otherwise, 'tryJust' is generally considered to be better. +-- +-- Also note that "System.IO.Error" also exports a function called +-- 'System.IO.Error.try' with a similar type to 'Control.Exception.try', +-- except that it catches only the IO and user families of exceptions +-- (as required by the Haskell 98 @IO@ module). + +try :: Exception e => IO a -> IO (Either e a) +try a = catch (a >>= \ v -> return (Right v)) (\e -> return (Left e)) + +-- | A variant of 'try' that takes an exception predicate to select +-- which exceptions are caught (c.f. 'catchJust'). If the exception +-- does not match the predicate, it is re-thrown. +tryJust :: Exception e => (e -> Maybe b) -> IO a -> IO (Either b a) +tryJust p a = do + r <- try a + case r of + Right v -> return (Right v) + Left e -> case p e of + Nothing -> throw e + Just b -> return (Left b) + + +------------- + + + +data Handler a = forall e . Exception e => Handler (e -> IO a) + +catches :: IO a -> [Handler a] -> IO a +catches io handlers = io `catch` catchesHandler handlers + +catchesHandler :: [Handler a] -> SomeException -> IO a +catchesHandler handlers e = foldr tryHandler (throw e) handlers + where tryHandler (Handler handler) res + = case fromException e of + Just e' -> handler e' + Nothing -> res + + +-- ----------------------------------------------------------------------------- +-- Asynchronous exceptions + +{- $async + + #AsynchronousExceptions# Asynchronous exceptions are so-called because they arise due to +external influences, and can be raised at any point during execution. +'StackOverflow' and 'HeapOverflow' are two examples of +system-generated asynchronous exceptions. + +The primary source of asynchronous exceptions, however, is +'throwTo': + +> throwTo :: ThreadId -> Exception -> IO () + +'throwTo' (also 'throwDynTo' and 'Control.Concurrent.killThread') allows one +running thread to raise an arbitrary exception in another thread. The +exception is therefore asynchronous with respect to the target thread, +which could be doing anything at the time it receives the exception. +Great care should be taken with asynchronous exceptions; it is all too +easy to introduce race conditions by the over zealous use of +'throwTo'. +-} + +{- $block_handler +There\'s an implied 'block' around every exception handler in a call +to one of the 'catch' family of functions. This is because that is +what you want most of the time - it eliminates a common race condition +in starting an exception handler, because there may be no exception +handler on the stack to handle another exception if one arrives +immediately. If asynchronous exceptions are blocked on entering the +handler, though, we have time to install a new exception handler +before being interrupted. If this weren\'t the default, one would have +to write something like + +> block ( +> catch (unblock (...)) +> (\e -> handler) +> ) + +If you need to unblock asynchronous exceptions again in the exception +handler, just use 'unblock' as normal. + +Note that 'try' and friends /do not/ have a similar default, because +there is no exception handler in this case. If you want to use 'try' +in an asynchronous-exception-safe way, you will need to use +'block'. +-} + +{- $interruptible + +Some operations are /interruptible/, which means that they can receive +asynchronous exceptions even in the scope of a 'block'. Any function +which may itself block is defined as interruptible; this includes +'Control.Concurrent.MVar.takeMVar' +(but not 'Control.Concurrent.MVar.tryTakeMVar'), +and most operations which perform +some I\/O with the outside world. The reason for having +interruptible operations is so that we can write things like + +> block ( +> a <- takeMVar m +> catch (unblock (...)) +> (\e -> ...) +> ) + +if the 'Control.Concurrent.MVar.takeMVar' was not interruptible, +then this particular +combination could lead to deadlock, because the thread itself would be +blocked in a state where it can\'t receive any asynchronous exceptions. +With 'Control.Concurrent.MVar.takeMVar' interruptible, however, we can be +safe in the knowledge that the thread can receive exceptions right up +until the point when the 'Control.Concurrent.MVar.takeMVar' succeeds. +Similar arguments apply for other interruptible operations like +'System.IO.openFile'. +-} + + +---------------------------------------------------------------------- +-- Exception instance for the legacy Exception type + +instance Exception E.Exception + +---------------------------------------------------------------------- +-- The new Exception types. These need to map to/from E.Exception so +-- that uses of legacy catch/throw functions work. + +---- + +instance Exception E.ArithException where + toException ae = toException (E.ArithException ae) + fromException (SomeException e) = case cast e of + Just (E.ArithException ae) -> + Just ae + _ -> Nothing +---- + +instance Exception E.ArrayException where + toException ae = toException (E.ArrayException ae) + fromException (SomeException e) = case cast e of + Just (E.ArrayException ae) -> + Just ae + _ -> Nothing + +---- + +data AssertionFailed = AssertionFailed String + deriving Typeable + +instance Exception AssertionFailed where + toException (AssertionFailed str) = toException (E.AssertionFailed str) + fromException (SomeException e) = case cast e of + Just (E.AssertionFailed str) -> + Just (AssertionFailed str) + _ -> Nothing + +instance Show AssertionFailed where + showsPrec _ (AssertionFailed err) = showString err + +----- + +instance Exception E.AsyncException where + toException ae = toException (E.AsyncException ae) + fromException (SomeException e) = case cast e of + Just (E.AsyncException ae) -> + Just ae + _ -> Nothing + +---- + +data BlockedOnDeadMVar = BlockedOnDeadMVar + deriving Typeable + +instance Exception BlockedOnDeadMVar where + toException BlockedOnDeadMVar = toException (E.BlockedOnDeadMVar) + fromException (SomeException e) = case cast e of + Just E.BlockedOnDeadMVar -> + Just BlockedOnDeadMVar + _ -> Nothing +instance Show BlockedOnDeadMVar where + showsPrec n BlockedOnDeadMVar = showsPrec n E.BlockedOnDeadMVar + +---- + +data BlockedIndefinitely = BlockedIndefinitely + deriving Typeable + +instance Exception BlockedIndefinitely where + toException BlockedIndefinitely = toException E.BlockedIndefinitely + fromException (SomeException e) = case cast e of + Just E.BlockedIndefinitely -> + Just BlockedIndefinitely + _ -> Nothing + +instance Show BlockedIndefinitely where + showsPrec n BlockedIndefinitely = showsPrec n E.BlockedIndefinitely + +---- + +data NestedAtomically = NestedAtomically + deriving Typeable + +instance Exception NestedAtomically where + toException NestedAtomically = toException E.NestedAtomically + fromException (SomeException e) = case cast e of + Just E.NestedAtomically -> + Just NestedAtomically + _ -> Nothing + +instance Show NestedAtomically where + showsPrec n NestedAtomically = showsPrec n E.NestedAtomically + +---- + +data Deadlock = Deadlock + deriving Typeable + +instance Exception Deadlock where + toException Deadlock = toException E.Deadlock + fromException (SomeException e) = case cast e of + Just E.Deadlock -> + Just Deadlock + _ -> Nothing + +instance Show Deadlock where + showsPrec n Deadlock = showsPrec n E.Deadlock + +----- + +data ErrorCall = ErrorCall String + deriving Typeable + +instance Exception ErrorCall where + toException (ErrorCall str) = toException (E.ErrorCall str) + fromException (SomeException e) = case cast e of + Just (E.ErrorCall str) -> + Just (ErrorCall str) + _ -> Nothing + +instance Show ErrorCall where + showsPrec _ (ErrorCall err) = showString err + +----- + +instance Typeable ExitCode where + typeOf _ = mkTyConApp (mkTyCon "ExitCode") [] + +instance Exception ExitCode where + toException ee = toException (E.ExitException ee) + fromException (SomeException e) = case cast e of + Just (E.ExitException ee) -> + Just ee + _ -> Nothing +----- + +instance Exception E.IOException where + toException ioe = toException (E.IOException ioe) + fromException (SomeException e) = case cast e of + Just (E.IOException ioe) -> + Just ioe + _ -> Nothing + +---- + +data NoMethodError = NoMethodError String + deriving Typeable + +instance Exception NoMethodError where + toException (NoMethodError str) = toException (E.NoMethodError str) + fromException (SomeException e) = case cast e of + Just (E.NoMethodError str) -> + Just (NoMethodError str) + _ -> Nothing + +instance Show NoMethodError where + showsPrec _ (NoMethodError str) = showString str + +---- + +data NonTermination = NonTermination + deriving Typeable + +instance Exception NonTermination where + toException NonTermination = toException E.NonTermination + fromException (SomeException e) = case cast e of + Just E.NonTermination -> + Just NonTermination + _ -> Nothing + +instance Show NonTermination where + showsPrec n NonTermination = showsPrec n E.NonTermination + +---- + +data PatternMatchFail = PatternMatchFail String + deriving Typeable + +instance Exception PatternMatchFail where + toException (PatternMatchFail str) = toException (E.PatternMatchFail str) + fromException (SomeException e) = case cast e of + Just (E.PatternMatchFail str) -> + Just (PatternMatchFail str) + _ -> Nothing + +instance Show PatternMatchFail where + showsPrec _ (PatternMatchFail str) = showString str + + +---- + +data RecConError = RecConError String + deriving Typeable + +instance Exception RecConError where + toException (RecConError str) = toException (E.RecConError str) + fromException (SomeException e) = case cast e of + Just (E.RecConError str) -> + Just (RecConError str) + _ -> Nothing + +instance Show RecConError where + showsPrec _ (RecConError str) = showString str + + + +---- + +data RecSelError = RecSelError String + deriving Typeable + +instance Exception RecSelError where + toException (RecSelError str) = toException (E.RecSelError str) + fromException (SomeException e) = case cast e of + Just (E.RecSelError str) -> + Just (RecSelError str) + _ -> Nothing + +instance Show RecSelError where + showsPrec _ (RecSelError str) = showString str + +---- + +data RecUpdError = RecUpdError String + deriving Typeable + +instance Exception RecUpdError where + toException (RecUpdError str) = toException (E.RecUpdError str) + fromException (SomeException e) = case cast e of + Just (E.RecUpdError str) -> + Just (RecUpdError str) + _ -> Nothing + +instance Show RecUpdError where + showsPrec _ (RecUpdError str) = showString str + + +#endif + addfile ./camp-bin/packages/extensible-exceptions/LICENSE hunk ./camp-bin/packages/extensible-exceptions/LICENSE 1 +This library (libraries/extensible-exceptions) is derived from code from several +sources: + + * Code from the GHC project which is largely (c) The University of + Glasgow, and distributable under a BSD-style license (see below), + + * Code from the Haskell 98 Report which is (c) Simon Peyton Jones + and freely redistributable (but see the full license for + restrictions). + + * Code from the Haskell Foreign Function Interface specification, + which is (c) Manuel M. T. Chakravarty and freely redistributable + (but see the full license for restrictions). + +The full text of these licenses is reproduced below. All of the +licenses are BSD-style or compatible. + +----------------------------------------------------------------------------- + +The Glasgow Haskell Compiler License + +Copyright 2004, The University Court of the University of Glasgow. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +- Redistributions of source code must retain the above copyright notice, +this list of conditions and the following disclaimer. + +- Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation +and/or other materials provided with the distribution. + +- Neither name of the University nor the names of its contributors may be +used to endorse or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY COURT OF THE UNIVERSITY OF +GLASGOW AND THE CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, +INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +UNIVERSITY COURT OF THE UNIVERSITY OF GLASGOW OR THE CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. + +----------------------------------------------------------------------------- + +Code derived from the document "Report on the Programming Language +Haskell 98", is distributed under the following license: + + Copyright (c) 2002 Simon Peyton Jones + + The authors intend this Report to belong to the entire Haskell + community, and so we grant permission to copy and distribute it for + any purpose, provided that it is reproduced in its entirety, + including this Notice. Modified versions of this Report may also be + copied and distributed for any purpose, provided that the modified + version is clearly presented as such, and that it does not claim to + be a definition of the Haskell 98 Language. + +----------------------------------------------------------------------------- + +Code derived from the document "The Haskell 98 Foreign Function +Interface, An Addendum to the Haskell 98 Report" is distributed under +the following license: + + Copyright (c) 2002 Manuel M. T. Chakravarty + + The authors intend this Report to belong to the entire Haskell + community, and so we grant permission to copy and distribute it for + any purpose, provided that it is reproduced in its entirety, + including this Notice. Modified versions of this Report may also be + copied and distributed for any purpose, provided that the modified + version is clearly presented as such, and that it does not claim to + be a definition of the Haskell 98 Foreign Function Interface. + +----------------------------------------------------------------------------- addfile ./camp-bin/packages/extensible-exceptions/Setup.hs hunk ./camp-bin/packages/extensible-exceptions/Setup.hs 1 +module Main (main) where + +import Distribution.Simple + +main :: IO () +main = defaultMain addfile ./camp-bin/packages/extensible-exceptions/extensible-exceptions.cabal hunk ./camp-bin/packages/extensible-exceptions/extensible-exceptions.cabal 1 +name: extensible-exceptions +version: 0.1.1.0 +license: BSD3 +license-file: LICENSE +maintainer: libraries@haskell.org +synopsis: Extensible exceptions +description: + This package provides extensible exceptions for both new and + old versions of GHC (i.e., < 6.10). +cabal-version: >=1.2 +build-type: Simple + +Library { + if impl(ghc>=6.9) + cpp-options: -DUSE_NEW_EXCEPTIONS + build-depends: base>=4&&<5 + else + build-depends: base<4 + exposed-modules: + Control.Exception.Extensible + extensions: CPP, ExistentialQuantification, DeriveDataTypeable +} hunk ./camp-core/camp-core.cabal 50 + + if impl(ghc < 6.9) + Extensions: PatternSignatures hunk ./camp-repository/Camp/Repository.hs 55 -import Control.Exception +import Control.Exception.Extensible hunk ./camp-repository/camp-repository.cabal 18 +Flag have_extensible_exceptions + Description: Do we have the extensible-exceptions package? + hunk ./camp-repository/camp-repository.cabal 50 + -- This is a hack to avoid needing to install the extensible-exceptions + -- package when building with the GHC HEAD. The in-tree sources are from + -- extensible-exceptions 0.1.1.0 + if flag(have_extensible_exceptions) + Build-Depends: extensible-exceptions + else + Hs-Source-Dirs: packages/extensible-exceptions + Other-Modules: + Control.Exception.Extensible + addfile ./camp-repository/packages/extensible-exceptions/Control/Exception/Extensible.hs hunk ./camp-repository/packages/extensible-exceptions/Control/Exception/Extensible.hs 1 +{-# LANGUAGE DeriveDataTypeable #-} +------------------------- +-- | +-- Module : Control.Exception +-- Copyright : (c) The University of Glasgow 2001 +-- License : BSD-style (see the file LICENSE) +-- +-- Maintainer : libraries@haskell.org +-- Stability : experimental +-- Portability : non-portable (uses ExistentialQuantification and DeriveDataTypeable) +-- +-- This module provides the extensible exceptions API for raising and catching both +-- built-in and user-defined exceptions. +-- +-- For newer versions of GHC (>=6.9), this package re-exports 'Control.Exception'. +-- Otherwise, it provides a compatibility layer around the previous version of the +-- extensions API. + +#ifdef USE_NEW_EXCEPTIONS +module Control.Exception.Extensible (module Control.Exception) where + +import Control.Exception hiding (blocked) + +#else +module Control.Exception.Extensible ( + -- * The Exception type + SomeException(..), + Exception(..), + E.IOException, + E.ArithException(..), + E.ArrayException(..), + AssertionFailed(..), + E.AsyncException(..), + NonTermination(..), + NestedAtomically(..), + ExitCode(..), + BlockedOnDeadMVar(..), + BlockedIndefinitely(..), + Deadlock(..), + NoMethodError(..), + PatternMatchFail(..), + RecConError(..), + RecSelError(..), + RecUpdError(..), + ErrorCall(..), + + -- * Throwing exceptions + throwIO, + throw, + ioError, + throwTo, + -- * Catching Exceptions + + -- |There are several functions for catching and examining + -- exceptions; all of them may only be used from within the + -- 'IO' monad. + + -- ** The @catch@ functions + catch, -- :: IO a -> (Exception -> IO a) -> IO a + catches, Handler(..), + catchJust, -- :: (Exception -> Maybe b) -> IO a -> (b -> IO a) -> IO a + + -- ** The @handle@ functions + handle, -- :: (Exception -> IO a) -> IO a -> IO a + handleJust,-- :: (Exception -> Maybe b) -> (b -> IO a) -> IO a -> IO a + + -- ** The @try@ functions + try, -- :: IO a -> IO (Either Exception a) + tryJust, -- :: (Exception -> Maybe b) -> a -> IO (Either b a) + onException, + + -- ** The @evaluate@ function + E.evaluate, -- :: a -> IO a + + -- ** The @mapException@ function + mapException, -- :: (Exception -> Exception) -> a -> a + + -- * Asynchronous Exceptions + + -- $async + + -- ** Asynchronous exception control + + -- |The following two functions allow a thread to control delivery of + -- asynchronous exceptions during a critical region. + + E.block, + E.unblock, + + -- *** Applying @block@ to an exception handler + + -- $block_handler + + -- *** Interruptible operations + + -- $interruptible + + -- * Assertions + + assert, + + -- * Utilities + bracket, + bracket_, + bracketOnError, + finally + ) where + +import Prelude hiding (catch) +import Control.Concurrent hiding (throwTo) +import qualified Control.Exception as E +import Data.Dynamic +import Data.Typeable +import System.Exit +import System.IO.Unsafe(unsafePerformIO) + +class (Typeable e, Show e) => Exception e where + toException :: e -> SomeException + fromException :: SomeException -> Maybe e + + toException = SomeException + fromException (SomeException e) = cast e + +data SomeException = forall e . Exception e => SomeException e + deriving Typeable + +instance Show SomeException where + showsPrec p (SomeException e) = showsPrec p e + +instance Exception SomeException where + toException se = se + fromException = Just + +mkOldException :: Exception e => e -> E.Exception +mkOldException e = let e' = toException e + in case fromException e' of + Just e'' -> -- If the exception is actually a legacy exception + -- then throw it directly so the legacy functions + -- catch it as they expect + e'' + Nothing -> -- Otherwise, throw it as a dynamic + E.DynException (toDyn e') + +throw :: Exception e => e -> a +throw e = E.throw (mkOldException e) + +throwIO :: Exception e => e -> IO a +throwIO e = E.throwIO (mkOldException e) + +throwTo :: Exception e => ThreadId -> e -> IO () +throwTo tid e = E.throwTo tid (mkOldException e) + +----------------------------------------------------------------------------- +-- Catching exceptions + +-- |This is the simplest of the exception-catching functions. It +-- takes a single argument, runs it, and if an exception is raised +-- the \"handler\" is executed, with the value of the exception passed as an +-- argument. Otherwise, the result is returned as normal. For example: +-- +-- > catch (openFile f ReadMode) +-- > (\e -> hPutStr stderr ("Couldn't open "++f++": " ++ show e)) +-- +-- For catching exceptions in pure (non-'IO') expressions, see the +-- function 'evaluate'. +-- +-- Note that due to Haskell\'s unspecified evaluation order, an +-- expression may return one of several possible exceptions: consider +-- the expression @error \"urk\" + 1 \`div\` 0@. Does +-- 'catch' execute the handler passing +-- @ErrorCall \"urk\"@, or @ArithError DivideByZero@? +-- +-- The answer is \"either\": 'catch' makes a +-- non-deterministic choice about which exception to catch. If you +-- call it again, you might get a different exception back. This is +-- ok, because 'catch' is an 'IO' computation. +-- +-- Note that 'catch' catches all types of exceptions, and is generally +-- used for \"cleaning up\" before passing on the exception using +-- 'throwIO'. It is not good practice to discard the exception and +-- continue, without first checking the type of the exception (it +-- might be a 'ThreadKilled', for example). In this case it is usually better +-- to use 'catchJust' and select the kinds of exceptions to catch. +-- +-- Also note that the "Prelude" also exports a function called +-- 'Prelude.catch' with a similar type to 'Control.Exception.catch', +-- except that the "Prelude" version only catches the IO and user +-- families of exceptions (as required by Haskell 98). +-- +-- We recommend either hiding the "Prelude" version of 'Prelude.catch' +-- when importing "Control.Exception": +-- +-- > import Prelude hiding (catch) +-- +-- or importing "Control.Exception" qualified, to avoid name-clashes: +-- +-- > import qualified Control.Exception as C +-- +-- and then using @C.catch@ +-- +catch :: Exception e + => IO a -- ^ The computation to run + -> (e -> IO a) -- ^ Handler to invoke if an exception is raised + -> IO a +catch io handler = io `E.catch` handler' + where handler' e = case fromException (toException e) of + Just e' -> + -- Handle the case where e == E.Exception, + -- or one of the types that make up E.Exception + handler e' + Nothing -> + case e of + E.DynException dyn -> + case fromDynamic dyn of + Just (SomeException exc) -> + case cast exc of + Just e' -> + -- Handle the case where we have + -- a new exception type encoded + -- as a Dynamic + handler e' + Nothing -> E.throw e + Nothing -> E.throw e + _ -> E.throw e + +-- | When you want to acquire a resource, do some work with it, and +-- then release the resource, it is a good idea to use 'bracket', +-- because 'bracket' will install the necessary exception handler to +-- release the resource in the event that an exception is raised +-- during the computation. If an exception is raised, then 'bracket' will +-- re-raise the exception (after performing the release). +-- +-- A common example is opening a file: +-- +-- > bracket +-- > (openFile "filename" ReadMode) +-- > (hClose) +-- > (\handle -> do { ... }) +-- +-- The arguments to 'bracket' are in this order so that we can partially apply +-- it, e.g.: +-- +-- > withFile name mode = bracket (openFile name mode) hClose +-- +bracket + :: IO a -- ^ computation to run first (\"acquire resource\") + -> (a -> IO b) -- ^ computation to run last (\"release resource\") + -> (a -> IO c) -- ^ computation to run in-between + -> IO c -- returns the value from the in-between computation +bracket before after thing = + E.block (do + a <- before + r <- E.unblock (thing a) `onException` after a + after a + return r + ) + +onException :: IO a -> IO b -> IO a +onException io what = io `catch` \e -> do what + throw (e :: SomeException) + +block, unblock :: IO a -> IO a +block = E.block +unblock = E.unblock + +-- | A specialised variant of 'bracket' with just a computation to run +-- afterward. +-- +finally :: IO a -- ^ computation to run first + -> IO b -- ^ computation to run afterward (even if an exception + -- was raised) + -> IO a -- returns the value from the first computation +a `finally` sequel = + E.block (do + r <- E.unblock a `onException` sequel + sequel + return r + ) + +-- | A variant of 'bracket' where the return value from the first computation +-- is not required. +bracket_ :: IO a -> IO b -> IO c -> IO c +bracket_ before after thing = bracket before (const after) (const thing) + +-- | Like bracket, but only performs the final action if there was an +-- exception raised by the in-between computation. +bracketOnError + :: IO a -- ^ computation to run first (\"acquire resource\") + -> (a -> IO b) -- ^ computation to run last (\"release resource\") + -> (a -> IO c) -- ^ computation to run in-between + -> IO c -- returns the value from the in-between computation +bracketOnError before after thing = + block (do + a <- before + unblock (thing a) `onException` after a + ) + +assert :: Bool -> a -> a +assert True x = x +assert False _ = throw (AssertionFailed "") + +-- | The function 'catchJust' is like 'catch', but it takes an extra +-- argument which is an /exception predicate/, a function which +-- selects which type of exceptions we\'re interested in. +-- +-- > result <- catchJust errorCalls thing_to_try handler +-- +-- Any other exceptions which are not matched by the predicate +-- are re-raised, and may be caught by an enclosing +-- 'catch' or 'catchJust'. +catchJust + :: Exception e + => (e -> Maybe b) -- ^ Predicate to select exceptions + -> IO a -- ^ Computation to run + -> (b -> IO a) -- ^ Handler + -> IO a +catchJust p a handler = catch a handler' + where handler' e = case p e of + Nothing -> throw e + Just b -> handler b + +-- | A version of 'catch' with the arguments swapped around; useful in +-- situations where the code for the handler is shorter. For example: +-- +-- > do handle (\e -> exitWith (ExitFailure 1)) $ +-- > ... +handle :: Exception e => (e -> IO a) -> IO a -> IO a +handle = flip catch + +-- | A version of 'catchJust' with the arguments swapped around (see +-- 'handle'). +handleJust :: Exception e => (e -> Maybe b) -> (b -> IO a) -> IO a -> IO a +handleJust p = flip (catchJust p) + +----------------------------------------------------------------------------- +-- 'mapException' + +-- | This function maps one exception into another as proposed in the +-- paper \"A semantics for imprecise exceptions\". + +-- Notice that the usage of 'unsafePerformIO' is safe here. + +mapException :: (Exception e1, Exception e2) => (e1 -> e2) -> a -> a +mapException f v = unsafePerformIO (catch (E.evaluate v) + (\x -> throw (f x))) + +----------------------------------------------------------------------------- +-- 'try' and variations. + +-- | Similar to 'catch', but returns an 'Either' result which is +-- @('Right' a)@ if no exception was raised, or @('Left' e)@ if an +-- exception was raised and its value is @e@. +-- +-- > try a = catch (Right `liftM` a) (return . Left) +-- +-- Note: as with 'catch', it is only polite to use this variant if you intend +-- to re-throw the exception after performing whatever cleanup is needed. +-- Otherwise, 'tryJust' is generally considered to be better. +-- +-- Also note that "System.IO.Error" also exports a function called +-- 'System.IO.Error.try' with a similar type to 'Control.Exception.try', +-- except that it catches only the IO and user families of exceptions +-- (as required by the Haskell 98 @IO@ module). + +try :: Exception e => IO a -> IO (Either e a) +try a = catch (a >>= \ v -> return (Right v)) (\e -> return (Left e)) + +-- | A variant of 'try' that takes an exception predicate to select +-- which exceptions are caught (c.f. 'catchJust'). If the exception +-- does not match the predicate, it is re-thrown. +tryJust :: Exception e => (e -> Maybe b) -> IO a -> IO (Either b a) +tryJust p a = do + r <- try a + case r of + Right v -> return (Right v) + Left e -> case p e of + Nothing -> throw e + Just b -> return (Left b) + + +------------- + + + +data Handler a = forall e . Exception e => Handler (e -> IO a) + +catches :: IO a -> [Handler a] -> IO a +catches io handlers = io `catch` catchesHandler handlers + +catchesHandler :: [Handler a] -> SomeException -> IO a +catchesHandler handlers e = foldr tryHandler (throw e) handlers + where tryHandler (Handler handler) res + = case fromException e of + Just e' -> handler e' + Nothing -> res + + +-- ----------------------------------------------------------------------------- +-- Asynchronous exceptions + +{- $async + + #AsynchronousExceptions# Asynchronous exceptions are so-called because they arise due to +external influences, and can be raised at any point during execution. +'StackOverflow' and 'HeapOverflow' are two examples of +system-generated asynchronous exceptions. + +The primary source of asynchronous exceptions, however, is +'throwTo': + +> throwTo :: ThreadId -> Exception -> IO () + +'throwTo' (also 'throwDynTo' and 'Control.Concurrent.killThread') allows one +running thread to raise an arbitrary exception in another thread. The +exception is therefore asynchronous with respect to the target thread, +which could be doing anything at the time it receives the exception. +Great care should be taken with asynchronous exceptions; it is all too +easy to introduce race conditions by the over zealous use of +'throwTo'. +-} + +{- $block_handler +There\'s an implied 'block' around every exception handler in a call +to one of the 'catch' family of functions. This is because that is +what you want most of the time - it eliminates a common race condition +in starting an exception handler, because there may be no exception +handler on the stack to handle another exception if one arrives +immediately. If asynchronous exceptions are blocked on entering the +handler, though, we have time to install a new exception handler +before being interrupted. If this weren\'t the default, one would have +to write something like + +> block ( +> catch (unblock (...)) +> (\e -> handler) +> ) + +If you need to unblock asynchronous exceptions again in the exception +handler, just use 'unblock' as normal. + +Note that 'try' and friends /do not/ have a similar default, because +there is no exception handler in this case. If you want to use 'try' +in an asynchronous-exception-safe way, you will need to use +'block'. +-} + +{- $interruptible + +Some operations are /interruptible/, which means that they can receive +asynchronous exceptions even in the scope of a 'block'. Any function +which may itself block is defined as interruptible; this includes +'Control.Concurrent.MVar.takeMVar' +(but not 'Control.Concurrent.MVar.tryTakeMVar'), +and most operations which perform +some I\/O with the outside world. The reason for having +interruptible operations is so that we can write things like + +> block ( +> a <- takeMVar m +> catch (unblock (...)) +> (\e -> ...) +> ) + +if the 'Control.Concurrent.MVar.takeMVar' was not interruptible, +then this particular +combination could lead to deadlock, because the thread itself would be +blocked in a state where it can\'t receive any asynchronous exceptions. +With 'Control.Concurrent.MVar.takeMVar' interruptible, however, we can be +safe in the knowledge that the thread can receive exceptions right up +until the point when the 'Control.Concurrent.MVar.takeMVar' succeeds. +Similar arguments apply for other interruptible operations like +'System.IO.openFile'. +-} + + +---------------------------------------------------------------------- +-- Exception instance for the legacy Exception type + +instance Exception E.Exception + +---------------------------------------------------------------------- +-- The new Exception types. These need to map to/from E.Exception so +-- that uses of legacy catch/throw functions work. + +---- + +instance Exception E.ArithException where + toException ae = toException (E.ArithException ae) + fromException (SomeException e) = case cast e of + Just (E.ArithException ae) -> + Just ae + _ -> Nothing +---- + +instance Exception E.ArrayException where + toException ae = toException (E.ArrayException ae) + fromException (SomeException e) = case cast e of + Just (E.ArrayException ae) -> + Just ae + _ -> Nothing + +---- + +data AssertionFailed = AssertionFailed String + deriving Typeable + +instance Exception AssertionFailed where + toException (AssertionFailed str) = toException (E.AssertionFailed str) + fromException (SomeException e) = case cast e of + Just (E.AssertionFailed str) -> + Just (AssertionFailed str) + _ -> Nothing + +instance Show AssertionFailed where + showsPrec _ (AssertionFailed err) = showString err + +----- + +instance Exception E.AsyncException where + toException ae = toException (E.AsyncException ae) + fromException (SomeException e) = case cast e of + Just (E.AsyncException ae) -> + Just ae + _ -> Nothing + +---- + +data BlockedOnDeadMVar = BlockedOnDeadMVar + deriving Typeable + +instance Exception BlockedOnDeadMVar where + toException BlockedOnDeadMVar = toException (E.BlockedOnDeadMVar) + fromException (SomeException e) = case cast e of + Just E.BlockedOnDeadMVar -> + Just BlockedOnDeadMVar + _ -> Nothing +instance Show BlockedOnDeadMVar where + showsPrec n BlockedOnDeadMVar = showsPrec n E.BlockedOnDeadMVar + +---- + +data BlockedIndefinitely = BlockedIndefinitely + deriving Typeable + +instance Exception BlockedIndefinitely where + toException BlockedIndefinitely = toException E.BlockedIndefinitely + fromException (SomeException e) = case cast e of + Just E.BlockedIndefinitely -> + Just BlockedIndefinitely + _ -> Nothing + +instance Show BlockedIndefinitely where + showsPrec n BlockedIndefinitely = showsPrec n E.BlockedIndefinitely + +---- + +data NestedAtomically = NestedAtomically + deriving Typeable + +instance Exception NestedAtomically where + toException NestedAtomically = toException E.NestedAtomically + fromException (SomeException e) = case cast e of + Just E.NestedAtomically -> + Just NestedAtomically + _ -> Nothing + +instance Show NestedAtomically where + showsPrec n NestedAtomically = showsPrec n E.NestedAtomically + +---- + +data Deadlock = Deadlock + deriving Typeable + +instance Exception Deadlock where + toException Deadlock = toException E.Deadlock + fromException (SomeException e) = case cast e of + Just E.Deadlock -> + Just Deadlock + _ -> Nothing + +instance Show Deadlock where + showsPrec n Deadlock = showsPrec n E.Deadlock + +----- + +data ErrorCall = ErrorCall String + deriving Typeable + +instance Exception ErrorCall where + toException (ErrorCall str) = toException (E.ErrorCall str) + fromException (SomeException e) = case cast e of + Just (E.ErrorCall str) -> + Just (ErrorCall str) + _ -> Nothing + +instance Show ErrorCall where + showsPrec _ (ErrorCall err) = showString err + +----- + +instance Typeable ExitCode where + typeOf _ = mkTyConApp (mkTyCon "ExitCode") [] + +instance Exception ExitCode where + toException ee = toException (E.ExitException ee) + fromException (SomeException e) = case cast e of + Just (E.ExitException ee) -> + Just ee + _ -> Nothing +----- + +instance Exception E.IOException where + toException ioe = toException (E.IOException ioe) + fromException (SomeException e) = case cast e of + Just (E.IOException ioe) -> + Just ioe + _ -> Nothing + +---- + +data NoMethodError = NoMethodError String + deriving Typeable + +instance Exception NoMethodError where + toException (NoMethodError str) = toException (E.NoMethodError str) + fromException (SomeException e) = case cast e of + Just (E.NoMethodError str) -> + Just (NoMethodError str) + _ -> Nothing + +instance Show NoMethodError where + showsPrec _ (NoMethodError str) = showString str + +---- + +data NonTermination = NonTermination + deriving Typeable + +instance Exception NonTermination where + toException NonTermination = toException E.NonTermination + fromException (SomeException e) = case cast e of + Just E.NonTermination -> + Just NonTermination + _ -> Nothing + +instance Show NonTermination where + showsPrec n NonTermination = showsPrec n E.NonTermination + +---- + +data PatternMatchFail = PatternMatchFail String + deriving Typeable + +instance Exception PatternMatchFail where + toException (PatternMatchFail str) = toException (E.PatternMatchFail str) + fromException (SomeException e) = case cast e of + Just (E.PatternMatchFail str) -> + Just (PatternMatchFail str) + _ -> Nothing + +instance Show PatternMatchFail where + showsPrec _ (PatternMatchFail str) = showString str + + +---- + +data RecConError = RecConError String + deriving Typeable + +instance Exception RecConError where + toException (RecConError str) = toException (E.RecConError str) + fromException (SomeException e) = case cast e of + Just (E.RecConError str) -> + Just (RecConError str) + _ -> Nothing + +instance Show RecConError where + showsPrec _ (RecConError str) = showString str + + + +---- + +data RecSelError = RecSelError String + deriving Typeable + +instance Exception RecSelError where + toException (RecSelError str) = toException (E.RecSelError str) + fromException (SomeException e) = case cast e of + Just (E.RecSelError str) -> + Just (RecSelError str) + _ -> Nothing + +instance Show RecSelError where + showsPrec _ (RecSelError str) = showString str + +---- + +data RecUpdError = RecUpdError String + deriving Typeable + +instance Exception RecUpdError where + toException (RecUpdError str) = toException (E.RecUpdError str) + fromException (SomeException e) = case cast e of + Just (E.RecUpdError str) -> + Just (RecUpdError str) + _ -> Nothing + +instance Show RecUpdError where + showsPrec _ (RecUpdError str) = showString str + + +#endif + addfile ./camp-repository/packages/extensible-exceptions/LICENSE hunk ./camp-repository/packages/extensible-exceptions/LICENSE 1 +This library (libraries/extensible-exceptions) is derived from code from several +sources: + + * Code from the GHC project which is largely (c) The University of + Glasgow, and distributable under a BSD-style license (see below), + + * Code from the Haskell 98 Report which is (c) Simon Peyton Jones + and freely redistributable (but see the full license for + restrictions). + + * Code from the Haskell Foreign Function Interface specification, + which is (c) Manuel M. T. Chakravarty and freely redistributable + (but see the full license for restrictions). + +The full text of these licenses is reproduced below. All of the +licenses are BSD-style or compatible. + +----------------------------------------------------------------------------- + +The Glasgow Haskell Compiler License + +Copyright 2004, The University Court of the University of Glasgow. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +- Redistributions of source code must retain the above copyright notice, +this list of conditions and the following disclaimer. + +- Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation +and/or other materials provided with the distribution. + +- Neither name of the University nor the names of its contributors may be +used to endorse or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY COURT OF THE UNIVERSITY OF +GLASGOW AND THE CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, +INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +UNIVERSITY COURT OF THE UNIVERSITY OF GLASGOW OR THE CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. + +----------------------------------------------------------------------------- + +Code derived from the document "Report on the Programming Language +Haskell 98", is distributed under the following license: + + Copyright (c) 2002 Simon Peyton Jones + + The authors intend this Report to belong to the entire Haskell + community, and so we grant permission to copy and distribute it for + any purpose, provided that it is reproduced in its entirety, + including this Notice. Modified versions of this Report may also be + copied and distributed for any purpose, provided that the modified + version is clearly presented as such, and that it does not claim to + be a definition of the Haskell 98 Language. + +----------------------------------------------------------------------------- + +Code derived from the document "The Haskell 98 Foreign Function +Interface, An Addendum to the Haskell 98 Report" is distributed under +the following license: + + Copyright (c) 2002 Manuel M. T. Chakravarty + + The authors intend this Report to belong to the entire Haskell + community, and so we grant permission to copy and distribute it for + any purpose, provided that it is reproduced in its entirety, + including this Notice. Modified versions of this Report may also be + copied and distributed for any purpose, provided that the modified + version is clearly presented as such, and that it does not claim to + be a definition of the Haskell 98 Foreign Function Interface. + +----------------------------------------------------------------------------- addfile ./camp-repository/packages/extensible-exceptions/Setup.hs hunk ./camp-repository/packages/extensible-exceptions/Setup.hs 1 +module Main (main) where + +import Distribution.Simple + +main :: IO () +main = defaultMain addfile ./camp-repository/packages/extensible-exceptions/extensible-exceptions.cabal hunk ./camp-repository/packages/extensible-exceptions/extensible-exceptions.cabal 1 +name: extensible-exceptions +version: 0.1.1.0 +license: BSD3 +license-file: LICENSE +maintainer: libraries@haskell.org +synopsis: Extensible exceptions +description: + This package provides extensible exceptions for both new and + old versions of GHC (i.e., < 6.10). +cabal-version: >=1.2 +build-type: Simple + +Library { + if impl(ghc>=6.9) + cpp-options: -DUSE_NEW_EXCEPTIONS + build-depends: base>=4&&<5 + else + build-depends: base<4 + exposed-modules: + Control.Exception.Extensible + extensions: CPP, ExistentialQuantification, DeriveDataTypeable +} hunk ./camp-bin/camp.cabal 53 + if impl(ghc >= 6.9) + cpp-options: -DUSE_NEW_EXCEPTIONS + build-depends: base >= 4 hunk ./camp-repository/camp-repository.cabal 59 + if impl(ghc >= 6.9) + cpp-options: -DUSE_NEW_EXCEPTIONS + build-depends: base >= 4 hunk ./camp-bin/Camp/Main.hs 76 - " record unrecord amend-record tag obliterate rollback", + " record unrecord amend-record tag obliterate invert", hunk ./camp-bin/Camp/Command/Get.hs 57 - where f i = do let fp' = fp ++ "_" ++ show i + where f :: Integer -> IO FilePath + f i = do let fp' = fp ++ "_" ++ show i hunk ./camp-bin/Camp/Command/Interactive.hs 9 -import Data.List hunk ./camp-bin/Camp/Main.hs 17 -import Control.Monad hunk ./camp-bin/Camp/Main.hs 19 -import System.IO hunk ./camp-core/Camp/Patch/Primitive.hs 18 -import Data.List hunk ./camp-core/Camp/Patch/Primitive.hs 393 - evaluate $ BS.length content + _ <- evaluate $ BS.length content hunk ./camp-core/Camp/Utils.hs 20 -import Data.Char hunk ./camp-core/Camp/Utils.hs 95 - evaluate (length xs) + _ <- evaluate (length xs) hunk ./camp-core/Camp/Utils.hs 159 - evaluate n' + n'' <- evaluate n' hunk ./camp-core/Camp/Utils.hs 161 - $ hGetLazily chunkSize h n' + $ hGetLazily chunkSize h n'' hunk ./camp-network/Camp/Curl.hsc 5 + +-- Th include currently gives us a deprecated message +{-# OPTIONS_GHC -fno-warn-deprecated-flags #-} hunk ./camp-repository/Camp/Diff.hs 16 -import System.FilePath hunk ./camp-repository/Camp/Repository.hs 153 - (\fp -> do tryJust (guard . isDoesNotExistError) - (removeFile fp) + (\fp -> do _ <- tryJust (guard . isDoesNotExistError) + (removeFile fp) hunk ./camp-repository/Camp/Repository.hs 228 - evaluate $ length inv + _ <- evaluate $ length inv hunk ./camp-bin/Camp/Main.hs 42 + -- XXX Hack: Handle prefixes properly: + "rec" -> record gf args' hunk ./camp-core/Camp/Patch/Catch.hs 101 - _ -> panic "XXX" - _ -> panic "XXX" - _ -> panic "XXX" + _ -> panic "Commute Catch Catch 1" + _ -> panic "Commute Catch Catch 2" + _ -> panic "Commute Catch Catch 3" hunk ./camp-core/Camp/Patch/Catch.hs 133 - _ -> panic "XXX" - _ -> panic "XXX" + _ -> panic "Commute Catch Catch 4" + _ -> panic "Commute Catch Catch 5" hunk ./camp-core/Camp/Patch/Catch.hs 161 - _ -> panic "XXX" + _ -> panic "Commute Catch Catch 6" hunk ./camp-core/Camp/Patch/Catch.hs 193 - _ -> panic "XXX" - _ -> panic "XXX" + _ -> panic "Commute Catch Catch 7" + _ -> panic "Commute Catch Catch 8" hunk ./camp-core/Camp/Patch/Catch.hs 76 - -- [p] [p^, {:p}, :q] <-> q [q^, {:q}, :p] + -- p [p^, {:p}, :q] <-> q [q^, {:q}, :p] hunk ./camp-core/Camp/Patch/Catch.hs 6 -import Camp.Patch.CommutePast hunk ./camp-core/Camp/Patch/Catch.hs 205 - qConflicts' <- mapM (\qConf -> commutePast (p' `ThenOpen` qConf)) + qConflicts' <- mapM (\qConf -> commutePastOrAnnihilate p' qConf) hunk ./camp-core/Camp/Patch/Catch.hs 207 - qIdentity' <- commutePast (p' `ThenOpen` qIdentity) + qIdentity' <- commutePastOrAnnihilate p' qIdentity hunk ./camp-core/Camp/Patch/Catch.hs 214 - pConflicts' <- mapM (\pConf -> commutePast (invert q `ThenOpen` pConf)) + pConflicts' <- mapM (\pConf -> commutePastOrAnnihilate (invert q) pConf) hunk ./camp-core/Camp/Patch/Catch.hs 216 - pIdentity' <- commutePast (invert q `ThenOpen` pIdentity) + pIdentity' <- commutePastOrAnnihilate (invert q) pIdentity hunk ./camp-core/Camp/Patch/Catch.hs 231 - pIdentity' <- commutePast (invert qEffect `ThenOpen` pIdentity) - qIdentity' <- commutePast (pEffect' `ThenOpen` qIdentity) + pIdentity' <- allCommutePastOrAnnihilate (invert qEffect) pIdentity + qIdentity' <- allCommutePastOrAnnihilate pEffect' qIdentity hunk ./camp-core/Camp/Patch/CommutePast.hs 1 - -module Camp.Patch.CommutePast (ThenOpen(..), CommutePast(..)) where - -data ThenOpen p q from where - ThenOpen :: p from mid -> q mid -> ThenOpen p q from - -class CommutePast p q where - commutePast :: ThenOpen p q from -> Maybe (q from) - rmfile ./camp-core/Camp/Patch/CommutePast.hs hunk ./camp-core/Camp/Patch/ContextedPatch.hs 3 - ContextedPatch(..), addToContext, conflictsWith + ContextedPatch(..), addToContext, conflictsWith, + allCommutePastOrAnnihilate, commutePastOrAnnihilate hunk ./camp-core/Camp/Patch/ContextedPatch.hs 8 -import Camp.Patch.CommutePast +import Camp.Patch.Equality hunk ./camp-core/Camp/Patch/ContextedPatch.hs 39 --- XXX Needs to handle the p p^ case -instance Commute p Patch => CommutePast p ContextedPatch where - commutePast (p `ThenOpen` ContextedPatch qs q) - = do (qs' `Then` p') <- commute (p `Then` qs) - (q' `Then` _) <- commute (p' `Then` q) - return (ContextedPatch qs' q') +commutePastOrAnnihilate :: Patch from mid -> ContextedPatch mid + -> Maybe (ContextedPatch from) +commutePastOrAnnihilate p (ContextedPatch qs q) + = do cpa <- commutePastSeqOrAnnihilate (p `Then` qs) + case cpa of + CommutedPast (qs' `Then` p') -> + do (q' `Then` _) <- commute (p' `Then` q) + return (ContextedPatch qs' q') + Annihilated qs' -> + return (ContextedPatch qs' q) + +commutePastSeqOrAnnihilate :: Then Patch (Seq Patch) from to + -> Maybe (CommutedPastOrAnnihilated from to) +commutePastSeqOrAnnihilate (p `Then` Nil) = Just (CommutedPast (Nil `Then` p)) +commutePastSeqOrAnnihilate (p `Then` Cons q qs) + = if inverseSubName (name p) == name q + then case isEqual (invert p) q of + IsEqual -> Just (Annihilated qs) + else do (q' `Then` p') <- commute (p `Then` q) + cpa <- commutePastSeqOrAnnihilate (p' `Then` qs) + case cpa of + CommutedPast (qs' `Then` p'') -> + return (CommutedPast (Cons q' qs' `Then` p'')) + Annihilated qs' -> + return (Annihilated (Cons q' qs')) + +data CommutedPastOrAnnihilated from to where + CommutedPast :: Then (Seq Patch) Patch from to + -> CommutedPastOrAnnihilated from to + Annihilated :: Seq Patch from to -> CommutedPastOrAnnihilated from to + +allCommutePastOrAnnihilate :: Seq Patch from mid -> ContextedPatch mid + -> Maybe (ContextedPatch from) +allCommutePastOrAnnihilate Nil cp = Just cp +allCommutePastOrAnnihilate (Cons p ps) cp + = do cp' <- allCommutePastOrAnnihilate ps cp + commutePastOrAnnihilate p cp' hunk ./camp-core/Camp/Patch/ContextedPatch.hs 82 - case commutePast (p `ThenOpen` cq') of + case commutePastOrAnnihilate p cq' of hunk ./camp-core/Camp/Patch/ContextedPatch.hs 88 - = case commutePast (invert p `ThenOpen` addToContext (invert ps) q) of + = case commutePastOrAnnihilate (invert p) (addToContext (invert ps) q) of hunk ./camp-core/camp-core.cabal 23 - Camp.Patch.CommutePast hunk ./camp-core/Camp/Utils.hs 8 - panic, + panic, panic2, hunk ./camp-core/Camp/Utils.hs 12 +import Camp.Patch.Pretty hunk ./camp-core/Camp/Utils.hs 134 +panic2 :: (Ppr p, Ppr q) => p -> q -> String -> a +panic2 p q str = panic (str ++ ":\n" ++ pprint p ++ "\n" ++ pprint q) + hunk ./camp-core/Camp/Patch/Catch.hs 75 - -- p [p^, {:p}, :q] <-> q [q^, {:q}, :p] - commute (Patch p `Then` Conflictor qEffect [qConflict] qIdentity) - | name p == name1 qConflict - -- To sanity check, we need to confirm: - -- p = qEffect^ - -- qConflict = : p - -- qIdentity = : q - = case qEffect of - Cons pInverse Nil -> - let p' = invert pInverse - in case sameStart p p' of - IsEqual -> - case isEqual p (invert pInverse) of - IsEqual -> - case qConflict of - ContextedPatch Nil p'' -> - case isEqual p p'' of - IsEqual -> - case qIdentity of - ContextedPatch Nil q -> - Just (Patch q - `Then` - Conflictor (invert q `Cons` Nil) - [ContextedPatch Nil q] - (ContextedPatch Nil p)) - _ -> panic "Commute Catch Catch 1" - _ -> panic "Commute Catch Catch 2" - _ -> panic "Commute Catch Catch 3" - -- [p] [p^ r, {r^:p} U X, y] <-> [r, X, y] [, {y}, r^:p] - -- XXX The qConflicts and qConflicts' type signatures are to fix - -- building on GHC 6.8 - commute (Patch p `Then` Conflictor qEffect (qConflicts :: [ContextedPatch from]) qIdentity) - | name p `elem` map name1 qConflicts - -- To sanity check, we need to confirm: - -- qEffect = p^ qEffect' - -- (qEffect'^ : p) \in qConflicts - -- qIdentity = : q - = case commuteToPrefix (Set.singleton (inverseSubName (name p))) qEffect of - Cons pInv Nil `Then` qEffect' -> - case isEqual (invert p) pInv of - IsEqual -> - case partition ((name p ==) . name1) qConflicts of - ([ContextedPatch cxt p'], qConflicts' :: [ContextedPatch from]) -> - -- XXX I think this is wrong: Some of (invert qEffect') - -- might be able to commute through p? - case isEqual (invert qEffect') cxt of + commute (left `Then` right) + = case (left `Then` right) of + -- p [p^, {:p}, :q] <-> q [q^, {:q}, :p] + (Patch p `Then` Conflictor qEffect [qConflict] qIdentity) + | name p == name1 qConflict -> + -- To sanity check, we need to confirm: + -- p = qEffect^ + -- qConflict = : p + -- qIdentity = : q + case qEffect of + Cons pInverse Nil -> + let p' = invert pInverse + in case sameStart p p' of + IsEqual -> + case isEqual p (invert pInverse) of hunk ./camp-core/Camp/Patch/Catch.hs 91 - case isEqual p p' of - IsEqual -> - Just (Conflictor qEffect' - qConflicts' - qIdentity - `Then` - Conflictor Nil - [qIdentity] - (addToContext (invert qEffect') $ - ContextedPatch Nil p)) - _ -> panic "Commute Catch Catch 4" - _ -> panic "Commute Catch Catch 5" - -- [r, X, y] [, {y}, r^:q] <-> [q] [q^r, {r^:q} U X, y] - commute (Conflictor pEffect pConflicts pIdentity - `Then` - Conflictor qEffect [qConflict] qIdentity) - | name1 pIdentity == name1 qConflict - -- To sanity check, we need to confirm: - -- qEffect = \epsilon - -- qConflict = pIdentity - -- qIdentity = pEffect^ : q - = case qEffect of - Nil -> - -- XXX We need equality of contexted patches to be able to - -- do this: - -- case isEqual qConflict pIdentity of - -- IsEqual -> - case qIdentity of - ContextedPatch qCxt q -> - -- XXX I think this is wrong: Some of (invert pEffect) - -- might be able to commute through q? - case isEqual qCxt (invert pEffect) of - IsEqual -> - Just (Patch q - `Then` - Conflictor (invert q `Cons` pEffect) - (qIdentity : pConflicts) - pIdentity) - _ -> panic "Commute Catch Catch 6" - -- [r s, W, x] [t, {t^x} U Y, z] <-> [r t', s'Y, s'z] [s', z U t^W, t^x] - commute (Conflictor pEffect pConflicts pIdentity - `Then` - Conflictor qEffect qConflicts qIdentity) - | name1 pIdentity `elem` map name1 qConflicts - -- -- XXX This sanity check comment is wrong: - -- To sanity check, we need to confirm: - -- qEffect = p^ qEffect' - -- (qEffect'^ : p) \in qConflicts - = let pEffectNames = Set.fromList (names pEffect) - qConflictsName = Set.fromList (map name1 qConflicts) - commonFirstConflictNames = pEffectNames `Set.intersection` - qConflictsName - in case commuteToPrefix commonFirstConflictNames pEffect of - commonEffects `Then` pOnlyEffect -> - case commute (pOnlyEffect `Then` qEffect) of - Just (qEffect' `Then` pEffect') -> - case partition ((name1 pIdentity ==) . name1) qConflicts of - ([ContextedPatch _cxt _p'], qConflicts') -> - -- XXX Check that - -- ContextedPatch cxt p' - -- is the same as pIdentity - Just (Conflictor (commonEffects `appendSeq` - qEffect') - (map (addToContext pEffect') qConflicts') - (addToContext pEffect' qIdentity) - `Then` - Conflictor pEffect' - (qIdentity : - map (addToContext (invert qEffect)) pConflicts) - (addToContext (invert qEffect) pIdentity)) - _ -> panic "Commute Catch Catch 7" - _ -> panic "Commute Catch Catch 8" + case qConflict of + ContextedPatch Nil p'' -> + case isEqual p p'' of + IsEqual -> + case qIdentity of + ContextedPatch Nil q -> + Just (Patch q + `Then` + Conflictor (invert q `Cons` Nil) + [ContextedPatch Nil q] + (ContextedPatch Nil p)) + _ -> panic "Commute Catch Catch 1" + _ -> panic "Commute Catch Catch 2" + _ -> panic "Commute Catch Catch 3" + -- [p] [p^ r, {r^:p} U X, y] <-> [r, X, y] [, {y}, r^:p] + -- XXX The qConflicts and qConflicts' type signatures are to fix + -- building on GHC 6.8 + (Patch p `Then` Conflictor qEffect (qConflicts :: [ContextedPatch from]) qIdentity) + | name p `elem` map name1 qConflicts -> + -- To sanity check, we need to confirm: + -- qEffect = p^ qEffect' + -- (qEffect'^ : p) \in qConflicts + -- qIdentity = : q + case commuteToPrefix (Set.singleton (inverseSubName (name p))) qEffect of + Cons pInv Nil `Then` qEffect' -> + case isEqual (invert p) pInv of + IsEqual -> + case partition ((name p ==) . name1) qConflicts of + ([ContextedPatch cxt p'], qConflicts' :: [ContextedPatch from]) -> + -- XXX I think this is wrong: Some of (invert qEffect') + -- might be able to commute through p? + case isEqual (invert qEffect') cxt of + IsEqual -> + case isEqual p p' of + IsEqual -> + let cp = addToContext (invert qEffect') + (ContextedPatch Nil p) + in Just (Conflictor qEffect' + qConflicts' + qIdentity + `Then` + Conflictor Nil + [qIdentity] + cp) + _ -> panic "Commute Catch Catch 4" + _ -> panic "Commute Catch Catch 5" + -- [r, X, y] [, {y}, r^:q] <-> [q] [q^r, {r^:q} U X, y] + (Conflictor pEffect pConflicts pIdentity + `Then` + Conflictor qEffect [qConflict] qIdentity) + | name1 pIdentity == name1 qConflict -> + -- To sanity check, we need to confirm: + -- qEffect = \epsilon + -- qConflict = pIdentity + -- qIdentity = pEffect^ : q + case qEffect of + Nil -> + -- XXX We need equality of contexted patches to be able to + -- do this: + -- case isEqual qConflict pIdentity of + -- IsEqual -> + case qIdentity of + ContextedPatch qCxt q -> + -- XXX I think this is wrong: Some of (invert pEffect) + -- might be able to commute through q? + case isEqual qCxt (invert pEffect) of + IsEqual -> + Just (Patch q + `Then` + Conflictor (invert q `Cons` pEffect) + (qIdentity : pConflicts) + pIdentity) + _ -> panic "Commute Catch Catch 6" + -- [r s, W, x] [t, {t^x} U Y, z] <-> [r t', s'Y, s'z] [s', z U t^W, t^x] + (Conflictor pEffect pConflicts pIdentity + `Then` + Conflictor qEffect qConflicts qIdentity) + | name1 pIdentity `elem` map name1 qConflicts -> + -- -- XXX This sanity check comment is wrong: + -- To sanity check, we need to confirm: + -- qEffect = p^ qEffect' + -- (qEffect'^ : p) \in qConflicts + let pEffectNames = Set.fromList (names pEffect) + qConflictsName = Set.fromList (map name1 qConflicts) + commonFirstConflictNames = pEffectNames `Set.intersection` + qConflictsName + in case commuteToPrefix commonFirstConflictNames pEffect of + commonEffects `Then` pOnlyEffect -> + case commute (pOnlyEffect `Then` qEffect) of + Just (qEffect' `Then` pEffect') -> + case partition ((name1 pIdentity ==) . name1) qConflicts of + ([ContextedPatch _cxt _p'], qConflicts') -> + -- XXX Check that + -- ContextedPatch cxt p' + -- is the same as pIdentity + Just (Conflictor (commonEffects `appendSeq` + qEffect') + (map (addToContext pEffect') qConflicts') + (addToContext pEffect' qIdentity) + `Then` + Conflictor pEffect' + (qIdentity : + map (addToContext (invert qEffect)) pConflicts) + (addToContext (invert qEffect) pIdentity)) + _ -> panic "Commute Catch Catch 7" + _ -> panic "Commute Catch Catch 8" hunk ./camp-core/Camp/Patch/Catch.hs 198 - -- From now on we know that the catches aren't conflictors that - -- might conflict with each other + -- From now on we know that the catches aren't conflictors that + -- might conflict with each other hunk ./camp-core/Camp/Patch/Catch.hs 201 - -- patch/patch - commute (Patch p `Then` Patch q) - = do (q' `Then` p') <- commute (p `Then` q) - return (Patch q' `Then` Patch p') - -- patch/conflictor - commute (Patch p `Then` Conflictor qEffect qConflicts qIdentity) - = do qEffect' `Then` p' <- commute (p `Then` qEffect) - qConflicts' <- mapM (\qConf -> commutePastOrAnnihilate p' qConf) - qConflicts - qIdentity' <- commutePastOrAnnihilate p' qIdentity - return (Conflictor qEffect' qConflicts' qIdentity' - `Then` - Patch p') - -- conflictor/patch - commute (Conflictor pEffect pConflicts pIdentity `Then` Patch q) - = do q' `Then` pEffect' <- commute (pEffect `Then` q) - pConflicts' <- mapM (\pConf -> commutePastOrAnnihilate (invert q) pConf) - pConflicts - pIdentity' <- commutePastOrAnnihilate (invert q) pIdentity - return (Patch q' - `Then` - Conflictor pEffect' pConflicts' pIdentity') - -- conflictor/conflictor - commute (Conflictor pEffect pConflicts pIdentity - `Then` - Conflictor qEffect qConflicts qIdentity) - = let pEffectNames = Set.fromList (names pEffect) - qConflictsName = Set.fromList (map name1 qConflicts) - commonFirstConflictNames = pEffectNames `Set.intersection` - qConflictsName - in case commuteToPrefix commonFirstConflictNames pEffect of - commonEffects `Then` pOnlyEffect -> - do qEffect' `Then` pEffect' <- commute (pOnlyEffect `Then` qEffect) - pIdentity' <- allCommutePastOrAnnihilate (invert qEffect) pIdentity - qIdentity' <- allCommutePastOrAnnihilate pEffect' qIdentity - if any (\pConflict -> pConflict `conflictsWith` addToContext qEffect qIdentity) pConflicts || - any (\qConflict -> pIdentity `conflictsWith` addToContext qEffect qConflict) qConflicts || - (pIdentity `conflictsWith` addToContext qEffect qIdentity) - then Nothing - else return (Conflictor (commonEffects `appendSeq` - qEffect') - (map (addToContext pEffect') qConflicts) - qIdentity' - `Then` - Conflictor pEffect' - (map (addToContext (invert qEffect)) pConflicts) - pIdentity') + -- patch/patch + (Patch p `Then` Patch q) -> + do (q' `Then` p') <- commute (p `Then` q) + return (Patch q' `Then` Patch p') + -- patch/conflictor + (Patch p `Then` Conflictor qEffect qConflicts qIdentity) -> + do qEffect' `Then` p' <- commute (p `Then` qEffect) + qConflicts' <- mapM (\qConf -> commutePastOrAnnihilate p' qConf) + qConflicts + qIdentity' <- commutePastOrAnnihilate p' qIdentity + return (Conflictor qEffect' qConflicts' qIdentity' + `Then` + Patch p') + -- conflictor/patch + (Conflictor pEffect pConflicts pIdentity `Then` Patch q) -> + do q' `Then` pEffect' <- commute (pEffect `Then` q) + pConflicts' <- mapM (\pConf -> commutePastOrAnnihilate (invert q) pConf) + pConflicts + pIdentity' <- commutePastOrAnnihilate (invert q) pIdentity + return (Patch q' + `Then` + Conflictor pEffect' pConflicts' pIdentity') + -- conflictor/conflictor + (Conflictor pEffect pConflicts pIdentity + `Then` + Conflictor qEffect qConflicts qIdentity) -> + let pEffectNames = Set.fromList (names pEffect) + qConflictsName = Set.fromList (map name1 qConflicts) + commonFirstConflictNames = pEffectNames `Set.intersection` + qConflictsName + in case commuteToPrefix commonFirstConflictNames pEffect of + commonEffects `Then` pOnlyEffect -> + do qEffect' `Then` pEffect' <- commute (pOnlyEffect `Then` qEffect) + pIdentity' <- allCommutePastOrAnnihilate (invert qEffect) pIdentity + qIdentity' <- allCommutePastOrAnnihilate pEffect' qIdentity + if any (\pConflict -> pConflict `conflictsWith` addToContext qEffect qIdentity) pConflicts || + any (\qConflict -> pIdentity `conflictsWith` addToContext qEffect qConflict) qConflicts || + (pIdentity `conflictsWith` addToContext qEffect qIdentity) + then Nothing + else return (Conflictor (commonEffects `appendSeq` + qEffect') + (map (addToContext pEffect') qConflicts) + qIdentity' + `Then` + Conflictor pEffect' + (map (addToContext (invert qEffect)) pConflicts) + pIdentity') hunk ./camp-core/Camp/Patch/Catch.hs 102 - _ -> panic "Commute Catch Catch 1" - _ -> panic "Commute Catch Catch 2" - _ -> panic "Commute Catch Catch 3" + _ -> panic2 left right "Commute Catch Catch 1" + _ -> panic2 left right "Commute Catch Catch 2" + _ -> panic2 left right "Commute Catch Catch 3" hunk ./camp-core/Camp/Patch/Catch.hs 135 - _ -> panic "Commute Catch Catch 4" - _ -> panic "Commute Catch Catch 5" + _ -> panic2 left right "Commute Catch Catch 4" + _ -> panic2 left right "Commute Catch Catch 5" hunk ./camp-core/Camp/Patch/Catch.hs 163 - _ -> panic "Commute Catch Catch 6" + _ -> panic2 left right "Commute Catch Catch 6" hunk ./camp-core/Camp/Patch/Catch.hs 195 - _ -> panic "Commute Catch Catch 7" - _ -> panic "Commute Catch Catch 8" + _ -> panic2 left right "Commute Catch Catch 7" + _ -> panic2 left right "Commute Catch Catch 8" hunk ./camp-core/Camp/Utils.hs 8 - panic, panic2, + panic, panic2, panic3, hunk ./camp-core/Camp/Utils.hs 135 -panic2 p q str = panic (str ++ ":\n" ++ pprint p ++ "\n" ++ pprint q) +panic2 p q str = panicn [str, pprint p, pprint q] + +panic3 :: (Ppr p, Ppr q, Ppr r) => p -> q -> r -> String -> a +panic3 p q r str = panicn [str, pprint p, pprint q, pprint r] + +panicn :: [String] -> a +panicn = panic . concat . intersperse "\n\n" hunk ./camp-core/Camp/Patch/Merge.hs 82 -instance (Equality q, Merge p q) => Merge p (Seq q) where +instance (Equality q, Merge p q, Ppr (p () ()), Ppr (q () ())) => Merge p (Seq q) where hunk ./camp-core/Camp/Patch/Merge.hs 94 - Nothing -> panic "sequence merge commute" + Nothing -> panic3 (coerceToUnitContexts2 p) + (coerceToUnitContexts2 q) + (coerceToUnitContexts2 q') + "sequence merge commute" hunk ./camp-core/Camp/Patch/Merge.hs 99 -instance (Equality q, Merge p q) => Merge (Seq p) (Seq q) where +instance (Equality q, Merge p q, Ppr (p () ()), Ppr (q () ())) => Merge (Seq p) (Seq q) where hunk ./camp-core/Camp/Utils.hs 10 + coerceToUnitContexts2, hunk ./camp-core/Camp/Utils.hs 29 +import Unsafe.Coerce hunk ./camp-core/Camp/Utils.hs 183 +coerceToUnitContexts2 :: p a b -> p () () +coerceToUnitContexts2 = unsafeCoerce + hunk ./camp-bin/Camp/Command/Show.hs 7 --- XXX import Camp.Patch.Pretty +import Camp.Patch.Pretty hunk ./camp-bin/Camp/Command/Show.hs 13 - inventory <- readInventory gf r + inventory <- readLocalInventory gf r hunk ./camp-bin/Camp/Command/Show.hs 23 - let isWanted (InventoryItem n _ _ _) = n == wantedName - _inventoryLine = case filter isWanted inventory of + isWanted (InventoryItem n _ _ _) = n == wantedName + inventoryLine = case filter isWanted inventory of hunk ./camp-bin/Camp/Command/Show.hs 27 - _patch <- error "XXX" -- readMegaPatch r inventoryLine - error "XXX" -- putStrLn $ pprint patch + patches <- readMegaPatchesWithAbsoluteInventory gf [inventoryLine] + putStrLn $ pprint patches hunk ./camp-core/Camp/Patch/ContextedPatch.hs 87 -conflictsWith (ContextedPatch ps p) q - = case commutePastOrAnnihilate (invert p) (addToContext (invert ps) q) of - Just _ -> False - Nothing -> True +conflictsWith (ContextedPatch ps p) (ContextedPatch qs q) + = let pSeq = appendSeq ps (Cons p Nil) + qSeq = appendSeq qs (Cons q Nil) + in case commute (invert pSeq `Then` qSeq) of + Just _ -> False + Nothing -> True adddir ./tests/tricky_merge addfile ./tests/tricky_merge/run_test.sh hunk ./tests/tricky_merge/run_test.sh 1 +#!/bin/sh + +# This test tests a case that caused darcs to give an "Inconsistent patch" +# error. http://bugs.darcs.net/issue1829 + +set -e + +HERE=`pwd` +CAMP="${CAMP:-$HERE/../../camp-bin/dist/build/camp/camp}" + +cleanup() { + cd "$HERE" + rm -rf a + rm -rf b +} + +cleanup + +if [ "$1" = "clean-only" ] +then + exit 0 +fi + +if [ "$1" != "no-clean-after" ] +then + trap cleanup EXIT +fi + +mkdir a +cd a +"$CAMP" init +echo Line BB > file +"$CAMP" add file +"$CAMP" rec -a -m "Main patch 1" +echo Line DDDD >> file +"$CAMP" rec -a -m "Main patch 2" +echo Line A > file +echo Line BB >> file +echo Line DDDD >> file +"$CAMP" rec -a -m "Main patch 3" +echo Line A > file +echo Line BB >> file +echo Line CCC >> file +echo Line DDDD >> file +"$CAMP" rec -a -m "Main patch 4" +echo Line A > file +echo Line BB >> file +echo Line CCC >> file +echo Line DDDD >> file +echo Line EEEEE >> file +"$CAMP" rec -a -m "Main patch 5" +cd .. + +mkdir b +cd b +"$CAMP" init +"$CAMP" pull ../a -a -p "Main patch 1" +echo Line TTTTTTT >> file +"$CAMP" rec -a -m "Alternate patch 1" +"$CAMP" pull ../a -a -p "Main patch 2" +echo Line BB > file +echo Line XXXXXXXXX >> file +"$CAMP" rec -a -m "Alternate patch 2" +echo Line XXXXXXXXX > file +"$CAMP" rec -a -m "Alternate patch 3" +cd .. + +cd a +"$CAMP" pull ../b -ap "Alternate patch 1" +"$CAMP" pull ../b -ap "Alternate patch 2" +"$CAMP" pull ../b -ap "Alternate patch 3" +cd .. + adddir ./tests/conflict_causes_dependency addfile ./tests/conflict_causes_dependency/run_test.sh hunk ./tests/conflict_causes_dependency/run_test.sh 1 +#!/bin/sh + +# In this test, we get 2 patches in sequence which commute, but we +# make them conflictors with the same patch and in current camp they +# no longer commute. + +set -e + +HERE=`pwd` +CAMP="${CAMP:-$HERE/../../camp-bin/dist/build/camp/camp}" + +cleanup() { + cd "$HERE" + rm -rf a + rm -rf b + rm -rf c +} + +cleanup + +if [ "$1" = "clean-only" ] +then + exit 0 +fi + +if [ "$1" != "no-clean-after" ] +then + trap cleanup EXIT +fi + +mkdir a +cd a +"$CAMP" init +printf 'A\nB\nC\nD\n' > file +"$CAMP" add file +"$CAMP" rec -a -m "Initial patch" +cd .. + +"$CAMP" get a b +"$CAMP" get a c + +cd a +printf 'A\nX\nB\nC\nD\n' > file +"$CAMP" rec -a -m "Add X" +printf 'A\nX\nB\nY\nC\nD\n' > file +"$CAMP" rec -a -m "Add Y" +printf 'A\nX\nB\nY\nC\nZ\nD\n' > file +"$CAMP" rec -a -m "Add Z" +cd .. + +cd b +printf 'RRR' > file +"$CAMP" rec -a -m "Rewrite with R" +"$CAMP" pull ../a -ap "Add X" +"$CAMP" pull ../a -ap "Add Y" +"$CAMP" pull ../a -ap "Add Z" +cd .. + +cd c +"$CAMP" pull ../b -ap "Add Z" +cd .. + hunk ./camp-core/Camp/Patch/Catch.hs 69 + pprShow (Patch p) = text "Patch" <+> pprShowAtomic p + pprShow (Conflictor effect conflicts identity) + = text "Conflictor" + $$ nest 4 (pprShowAtomic effect) + $$ nest 4 (pprShowAtomic conflicts) + $$ nest 4 (pprShowAtomic identity) hunk ./camp-core/Camp/Patch/ContextedPatch.hs 38 + pprShow (ContextedPatch ps p) = text "ContextedPatch" + $$ nest 4 (pprShowAtomic ps) + $$ nest 4 (pprShowAtomic p) hunk ./camp-core/Camp/Patch/MegaPatch.hs 64 + pprShow (MegaPatch n mi p) = text "MegaPatch" <+> pprShowAtomic n + $$ nest 4 (pprShow mi) + $$ nest 4 (pprShowAtomic p) hunk ./camp-core/Camp/Patch/MegaPatch.hs 97 + pprShow (MetaInfo short long author date) + = text "MetaInfo" + $$ nest 4 (pprShowAtomic short) + $$ nest 4 (pprShowAtomic long) + $$ nest 4 (pprShowAtomic author) + $$ nest 4 (parens (text "read" <+> text (show (show date)))) hunk ./camp-core/Camp/Patch/Name.hs 41 + pprShow (Name sign n) = text "Name" + <+> pprShowAtomic sign + <+> pprShowAtomic n hunk ./camp-core/Camp/Patch/Name.hs 75 + pprShow (SubName n s) = text "SubName" + <+> pprShowAtomic n + <+> pprShowAtomic s hunk ./camp-core/Camp/Patch/Name.hs 110 + pprShow Positive = text "Positive" + pprShow Negative = text "Negative" + pprShowAtomic = pprShow hunk ./camp-core/Camp/Patch/Patch.hs 38 + pprShow (Primitive sn p) = text "Primitive" <+> pprShowAtomic sn + $$ nest 4 (pprShowAtomic p) hunk ./camp-core/Camp/Patch/Pretty.hs 2 -module Camp.Patch.Pretty (Ppr(..), pprint, +module Camp.Patch.Pretty (Ppr(..), pprint, pprintShow, hunk ./camp-core/Camp/Patch/Pretty.hs 5 +import Data.ByteString.Lazy (ByteString) +import qualified Data.ByteString.Lazy.Char8 as BSC +import Data.Int hunk ./camp-core/Camp/Patch/Pretty.hs 13 +pprintShow :: Ppr a => a -> String +pprintShow = render . pprShow + hunk ./camp-core/Camp/Patch/Pretty.hs 21 + pprShow :: a -> Doc + pprShowAtomic :: a -> Doc + pprShowAtomic = parens . pprShow + hunk ./camp-core/Camp/Patch/Pretty.hs 28 + pprShow xs = brackets $ vcat $ punctuate comma $ map pprShow xs + pprShowAtomic = pprShow + +instance Ppr ByteString where + ppr bs = text $ BSC.unpack bs + pprAtomic = ppr + pprShow bs = text "BSC.pack" <+> text (show (BSC.unpack bs)) + +instance Ppr Integer where + ppr = integer + pprAtomic = ppr + pprShow = ppr + pprShowAtomic = ppr + +instance Ppr Int64 where + ppr = integer . fromIntegral + pprAtomic = ppr + pprShow = ppr + pprShowAtomic = ppr hunk ./camp-core/Camp/Patch/Primitive.hs 182 + + pprShow (AddDir fn) = text "AddDir" <+> text (show fn) + pprShow (RmDir fn) = text "RmDir" <+> text (show fn) + pprShow (MvDir from to) = text "MvDir" <+> text (show from) + <+> text (show to) + pprShow (AddFile fn) = text "AddFile" <+> text (show fn) + pprShow (RmFile fn) = text "RmFile" <+> text (show fn) + pprShow (MvFile from to) = text "MvFile" <+> text (show from) + <+> text (show to) + pprShow (Hunk fn skipBytes skipLines oldBytes oldLines newBytes newLines) + = text "Hunk" <+> + ( text (show fn) + $$ (pprShowAtomic skipBytes <+> pprShowAtomic skipLines) + $$ (pprShowAtomic oldBytes <+> pprShowAtomic oldLines) + $$ (pprShowAtomic newBytes <+> pprShowAtomic newLines) + ) + pprShow (Binary {}) = error "XXX Binary" hunk ./camp-core/Camp/Patch/Sequence.hs 84 + pprAtomic = ppr + pprShow Nil = text "Nil" + pprShow (Cons x xs) = text "Cons" + $$ pprShowAtomic (contextHack x) + $$ pprShowAtomic (contextHack xs) + pprShowAtomic s@Nil = pprShow s + pprShowAtomic s = parens $ pprShow s hunk ./camp-core/Camp/InRepoFileName.hs 9 +import Camp.Patch.Pretty hunk ./camp-core/Camp/InRepoFileName.hs 23 +instance Ppr InRepoFileName where + ppr (InRepoFileName fp) = text fp + pprAtomic = ppr + pprShow (InRepoFileName fp) = text "InRepoFileName.fromString" <+> text (show fp) + pprShowAtomic = pprShow + hunk ./camp-core/Camp/Patch/Primitive.hs 160 - ppr (AddDir fn) = text "AddDir" <+> text (show fn) - ppr (RmDir fn) = text "RmDir" <+> text (show fn) + ppr (AddDir fn) = text "AddDir" <+> ppr fn + ppr (RmDir fn) = text "RmDir" <+> ppr fn hunk ./camp-core/Camp/Patch/Primitive.hs 164 - ppr (AddFile fn) = text "AddFile" <+> text (show fn) - ppr (RmFile fn) = text "RmFile" <+> text (show fn) + ppr (AddFile fn) = text "AddFile" <+> ppr fn + ppr (RmFile fn) = text "RmFile" <+> ppr fn hunk ./camp-core/Camp/Patch/Primitive.hs 169 - = text "Hunk" <+> text (show fn) + = text "Hunk" <+> ppr fn hunk ./camp-core/Camp/Patch/Primitive.hs 183 - pprShow (AddDir fn) = text "AddDir" <+> text (show fn) - pprShow (RmDir fn) = text "RmDir" <+> text (show fn) - pprShow (MvDir from to) = text "MvDir" <+> text (show from) - <+> text (show to) - pprShow (AddFile fn) = text "AddFile" <+> text (show fn) - pprShow (RmFile fn) = text "RmFile" <+> text (show fn) - pprShow (MvFile from to) = text "MvFile" <+> text (show from) - <+> text (show to) + pprShow (AddDir fn) = text "AddDir" <+> pprShowAtomic fn + pprShow (RmDir fn) = text "RmDir" <+> pprShowAtomic fn + pprShow (MvDir from to) = text "MvDir" <+> pprShowAtomic from + <+> pprShowAtomic to + pprShow (AddFile fn) = text "AddFile" <+> pprShowAtomic fn + pprShow (RmFile fn) = text "RmFile" <+> pprShowAtomic fn + pprShow (MvFile from to) = text "MvFile" <+> pprShowAtomic from + <+> pprShowAtomic to hunk ./camp-core/Camp/Patch/Primitive.hs 193 - ( text (show fn) + ( pprShowAtomic fn hunk ./camp-core/Camp/Patch/Catch.hs 214 - qConflicts' <- mapM (\qConf -> commutePastOrAnnihilate p' qConf) - qConflicts + let qConflicts' = map (addOneToContext p') qConflicts hunk ./camp-core/Camp/Patch/Catch.hs 222 - pConflicts' <- mapM (\pConf -> commutePastOrAnnihilate (invert q) pConf) - pConflicts + let pConflicts' = map (addOneToContext (invert q)) pConflicts hunk ./camp-core/Camp/Patch/Catch.hs 232 - qConflictsName = Set.fromList (map name1 qConflicts) + qConflictsName = Set.fromList (map (inverseSubName . name1) qConflicts) hunk ./camp-core/Camp/Patch/Catch.hs 240 - if any (\pConflict -> pConflict `conflictsWith` addToContext qEffect qIdentity) pConflicts || - any (\qConflict -> pIdentity `conflictsWith` addToContext qEffect qConflict) qConflicts || + if -- any (\pConflict -> pConflict `conflictsWith` addToContext qEffect qIdentity) pConflicts || + -- any (\qConflict -> pIdentity `conflictsWith` addToContext qEffect qConflict) qConflicts || hunk ./camp-core/Camp/Patch/ContextedPatch.hs 3 - ContextedPatch(..), addToContext, conflictsWith, + ContextedPatch(..), addToContext, addOneToContext, conflictsWith, hunk ./camp-core/Camp/Patch/ContextedPatch.hs 17 +import Data.Maybe hunk ./camp-core/Camp/Patch/ContextedPatch.hs 84 - = case addToContext ps cq of - cq'@(ContextedPatch qs q) -> - case commutePastOrAnnihilate p cq' of - Nothing -> ContextedPatch (Cons p qs) q - Just cq'' -> cq'' + = addOneToContext p (addToContext ps cq) + +addOneToContext :: Patch from mid -> ContextedPatch mid -> ContextedPatch from +addOneToContext p cq@(ContextedPatch qs q) + = case commutePastOrAnnihilate p cq of + Nothing -> ContextedPatch (Cons p qs) q + Just cq' -> cq' hunk ./camp-core/Camp/Patch/ContextedPatch.hs 96 - in case commute (invert pSeq `Then` qSeq) of - Just _ -> False - Nothing -> True + in not (isJust (tryMerge pSeq qSeq)) hunk ./camp-core/Camp/Patch/Name.hs 16 -class Named p n | p -> n where +class Ord n => Named p n | p -> n where hunk ./camp-core/Camp/Patch/Name.hs 19 -class Named1 p n | p -> n where +class Ord n => Named1 p n | p -> n where hunk ./camp-core/Camp/Patch/Pretty.hs 7 +import Data.Set (Set) +import qualified Data.Set as Set hunk ./camp-core/Camp/Patch/Sequence.hs 3 - Seq(..), names, commuteToPrefix, appendSeq + Seq(..), names, commuteToPrefix, appendSeq, tryMerge hunk ./camp-core/Camp/Patch/Sequence.hs 6 +import Camp.Patch.Anonymous hunk ./camp-core/Camp/Patch/Sequence.hs 173 +-- XXX Could use this in Catch merge, if we returned a bit more info +tryMerge :: (Named p n, Commute p p, Invert p) + => Seq p from to1 -> Seq p from to2 -> Maybe (Anonymous1 (Seq p to1)) +tryMerge ps qs + = let pNames = names ps + qNames = names qs + pNameSet = Set.fromList pNames + qNameSet = Set.fromList qNames + commonNameSet = pNameSet `Set.intersection` qNameSet + in case commuteToPrefix commonNameSet ps of + _ `Then` psUnique -> + case commuteToPrefix commonNameSet qs of + _ `Then` qsUnique -> + case sameStart psUnique qsUnique of + IsEqual -> + case commute (invert psUnique `Then` qsUnique) of + Nothing -> + Nothing + Just (qsUnique' `Then` _) -> + Just (Anonymous1 qsUnique') + hunk ./camp-core/Camp/Patch/Pretty.hs 32 + +instance Ppr a => Ppr (Set a) where + ppr s = braces $ vcat $ punctuate comma $ map ppr $ Set.toList s + pprAtomic = ppr + pprShow s = text "Set.fromList" <+> ppr (Set.toList s) hunk ./tests/conflict_causes_dependency/run_test.sh 39 -"$CAMP" get a b -"$CAMP" get a c +"$CAMP" -v0 get a b +"$CAMP" -v0 get a c hunk ./camp-core/Camp/Patch/Catch.hs 215 - qIdentity' <- commutePastOrAnnihilate p' qIdentity + qIdentity' <- commutePast p' qIdentity hunk ./camp-core/Camp/Patch/Catch.hs 223 - pIdentity' <- commutePastOrAnnihilate (invert q) pIdentity + pIdentity' <- commutePast (invert q) pIdentity hunk ./camp-core/Camp/Patch/Catch.hs 238 - pIdentity' <- allCommutePastOrAnnihilate (invert qEffect) pIdentity - qIdentity' <- allCommutePastOrAnnihilate pEffect' qIdentity + pIdentity' <- allCommutePast (invert qEffect) pIdentity + qIdentity' <- allCommutePast pEffect' qIdentity hunk ./camp-core/Camp/Patch/ContextedPatch.hs 4 - allCommutePastOrAnnihilate, commutePastOrAnnihilate + allCommutePast, commutePast hunk ./camp-core/Camp/Patch/ContextedPatch.hs 43 +commutePast :: Patch from mid -> ContextedPatch mid + -> Maybe (ContextedPatch from) +commutePast p (ContextedPatch qs q) + = do (qs' `Then` p') <- commute (p `Then` qs) + (q' `Then` _) <- commute (p' `Then` q) + return (ContextedPatch qs' q') + +allCommutePast :: Seq Patch from mid -> ContextedPatch mid + -> Maybe (ContextedPatch from) +allCommutePast ps (ContextedPatch qs q) + = do (qs' `Then` ps') <- commute (ps `Then` qs) + (q' `Then` _) <- commute (ps' `Then` q) + return (ContextedPatch qs' q') + hunk ./camp-core/Camp/Patch/ContextedPatch.hs 88 -allCommutePastOrAnnihilate :: Seq Patch from mid -> ContextedPatch mid - -> Maybe (ContextedPatch from) -allCommutePastOrAnnihilate Nil cp = Just cp -allCommutePastOrAnnihilate (Cons p ps) cp - = do cp' <- allCommutePastOrAnnihilate ps cp - commutePastOrAnnihilate p cp' -