summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--README.md10
-rw-r--r--base64/Makefile56
-rw-r--r--base64/base64.hs102
-rw-r--r--hask-io/Makefile46
-rw-r--r--hask-io/mini.c14
-rw-r--r--hask-io/mini.hs3
6 files changed, 111 insertions, 120 deletions
diff --git a/README.md b/README.md
index e8f9124..0a2813f 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,11 @@
# Miguel's Haskell Collection
-A collection of some small haskell sources of mine.
+This repo holds collection of some small Haskell sources.
+
+* base64 - encoder / decoder
+* calcGTK - Parsing with Parsec and GUI with GTK
+* freedomain - simple multithreaded domain checker based on DNS lookup
+* hulls - find convex and concave hulls for a set of points
+* nasa-rfid - Topcoder challenge (?Where)
+* shebang - simple demonstration how to use shebang with Haskell
+* simpleSVG - my tiny minimalistic SVG generator (see inside hulls, merge, use as "lib")
diff --git a/base64/Makefile b/base64/Makefile
index 611abcf..629c734 100644
--- a/base64/Makefile
+++ b/base64/Makefile
@@ -1,45 +1,11 @@
-#
-# Some GHC flags explained
-#
-# -v verbose mode
-# -O2 level 2 optimizations
-# -rtsopts allow +RTS flags
-# -prof enable basic time and allocation profiling
-# -auto-all cost centers on all top level functions
-# (you can also add them via the SCC pragma)
-# -caf-all generate data for CAFs (constant applicative forms)
-# t
-# -fforce-recomp force recompilation
-#
-# Notes: you will obtain the profiling versions of dependancies via:
-# stack install --profile [libraryname]
-#
-# -fprof-auto replaced -auto-all
-# -fprof-cafs replaced -caf-all
-#
-
-build-prof:
- stack ghc --profile -- -rtsopts -prof -fprof-auto -fprof-cafs -O2 base64.hs
-
-#
-# Some +RTS flags
-#
-# -p profiling
-# -K set stack limit
-# -hc extract heap profile
-# -hy allocation by type
-# -hd allocation by constructor
-# -iN sampling frequency in seconds. e.g. -i0.01
-# -ddump-simpl generate core version
-#
-# Note: render the heap profile as graph with: hp2ps -e8in -c file.hp
-#
-
-run-prof:
- cat random.bin | ./base64 +RTS -p -K100M > /dev/null
-
-
-test-mini-hask:
- stack ghc -- -O mini.hs
-test-mini-c:
- gcc -O3 mini.c -o mini.out
+run: base64
+ ./base64 10 100 +RTS -N2
+ #cat /tmp/random.bin | ./base64 +RTS -N6 > /dev/null
+run-prof: base64prof
+ cat /tmp/random.bin | ./base64prof +RTS -N6 -p -s > /dev/null
+base64: base64.hs
+ stack ghc -- -O2 base64.hs -threaded -o base64
+base64prof: base64.hs
+ stack ghc -- -threaded -rtsopts -prof -fprof-auto -fprof-cafs -O2 base64.hs -o base64prof
+genrandom:
+ dd if=/dev/urandom of=/tmp/random.bin bs=1M count=64
diff --git a/base64/base64.hs b/base64/base64.hs
index 2443d07..cd84c48 100644
--- a/base64/base64.hs
+++ b/base64/base64.hs
@@ -1,12 +1,67 @@
+{-# LANGUAGE BangPatterns #-}
+{-# LANGUAGE ViewPatterns #-}
import Data.Maybe (fromJust,isJust)
import Data.List.Split (chunksOf)
import Data.Tuple (swap)
import Data.Tuple.Extra (first,second,dupe)
import System.Environment (getArgs)
-import qualified Data.ByteString.Lazy as B
-import qualified Data.ByteString.Lazy.Char8 as C
-import qualified Data.Map as M
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.ByteString as B
+import qualified Data.Map.Strict as M
+import Data.Char
+import Data.Maybe
+import Data.Word
+import Data.Array
+import Data.Bits
+--import Data.ByteString.Base64.Lazy as B64
+import Data.ByteString.Base64 as B64
+import GHC.Int
+import Control.Parallel
+import Control.Parallel.Strategies
+main :: IO()
+main = BL.getContents >>= mapM_ BL.putStr . parMap rdeepseq (chunkBL 3 (fromNum 6 4.toNum 8)) . chunkB 100000
+
+table64 :: Array Int Word8
+table64 = array (0,63) $ zip [0..] $ map (fromIntegral.ord) $
+ ['A'..'Z']++['a'..'z']++['0'..'9']++['+','/']
+
+toNum :: Int->BL.ByteString->Int
+toNum s = BL.foldl' f 0 where f a v = a `shiftL` s + fromIntegral v
+
+fromNum :: Int->Int->Int->BL.ByteString
+fromNum s l x = BL.pack $ map f (reverse [0..l-1])
+ where f v = table64 ! (x `shiftR` (v*s) .&. (2^s-1))
+
+chunkBL :: Int64->(BL.ByteString->BL.ByteString)->BL.ByteString->BL.ByteString
+chunkBL n f b | BL.null b = b
+ | otherwise = let l = f . BL.take n $ b
+ r = chunkBL n f . BL.drop n $ b
+ in BL.append l r
+
+chunkB :: Int64->BL.ByteString->[BL.ByteString]
+chunkB n b | BL.null b = []
+ | otherwise = let l = BL.take n $ b
+ r = chunkB n . BL.drop n $ b
+ in l:r
+{-
+ let arr=reverse $ BL.foldr f (0,0:[]) b
+ in arr --BL.unfoldr uf (0,snd arr)
+ where uf (_,[]) = Nothing
+ uf (3,x:xs) = Just (table64 ! x,(0,xs))
+ uf (c,x:xs) = Just (table64 ! (x.&.63),(c+1,x `shiftR` 6:xs))
+ f :: (Int,[Int]) -> Word8 -> (Int,[Int])
+ f (_,[]) v = (0,[])
+ f (2,x:xs) v = (0,0:(x `shiftL` 8) + fromIntegral v:xs)
+ f (c,x:xs) v = (c+1,(x `shiftL` 8) + fromIntegral v:xs)
+
+ -- BL.unfoldr uf . (,)3 . snd . BL.foldl' f (0,0:[])
+ -- -}
+{-
+-}
+
+
+--main = B.getContents >>= B.putStr . B64.encode
-- Example Usage: echo "hello world" | ./base64 | ./base64 -d
-- TODO
@@ -16,32 +71,57 @@ import qualified Data.Map as M
-- (see man base64)
-- compare performance . named map? array?
-main = do arg<-getArgs
- dat<-B.getContents
- B.putStr.B.pack.encode.B.unpack $ dat
+-- arg<-getArgs
{-
+enc64 x = B.reverse $ B.unfoldr unf $ (4, (B.foldl' (\a b->a `shiftL` 8 + fromIntegral b) (0::Int) x))
+unf (c,v) = if c==0 then Nothing else let r = v .&. 63 in Just (table64 ! r,(c-1,v`shiftR` 6))
if length arg == 0
then putStr . (++"\n") . encode64 . map fromIntegral . B.unpack $ dat
else B.putStr. B.pack . map fromIntegral . decode64 . C.unpack $ dat
-}
-encode = map ((+65).(`mod` 24))
+--t1 = BL.getContents >>= BL.putStr . BL.fromChunks . map (B.map f) . BL.toChunks
+
+
+
+--enc=B.map (\x->table64 ! (x `shiftR` 4))
+
-table64 = zip [0..] (['A'..'Z']++['a'..'z']++['0'..'9']++['+','/'])
-enc64 k = M.lookup k mp where mp = M.fromList table64
-dec64 k = M.lookup k mp where mp = M.fromList $ map swap table64
+-- -- reChunkIn :: Int -> [B.ByteString] -> [B.ByteString]
+-- -- reChunkIn !n = go
+-- -- where
+-- -- go [] = []
+-- -- go (y : ys) = case B.length y `divMod` n of
+-- -- (_, 0) -> y : go ys
+-- -- (d, _) -> case B.splitAt (d * n) y of
+-- -- (prefix, suffix) -> prefix : fixup suffix ys
+-- -- fixup acc [] = [acc]
+-- -- fixup acc (z : zs) = case B.splitAt (n - B.length acc) z of
+-- -- (prefix, suffix) ->
+-- -- let acc' = acc `B.append` prefix
+-- -- in if B.length acc' == n
+-- -- then let zs' = if B.null suffix
+-- -- then zs
+-- -- else suffix : zs
+-- -- in acc' : go zs'
+-- -- else -- suffix must be null
+-- -- fixup acc' zs
+--
+--dec64 k = M.lookup k mp where mp = M.fromList $ map swap table64
+{-
encode64 :: [Int] -> [Char]
encode64 = map (fromJust.enc64.flip mod 64) -- concat . map (pad . first enc . second length . dupe) . chunksOf 3
where enc = map (fromJust . enc64) . sumC
- pad (v,l) =take 4 $ take (1+l) v ++ "==="
sumC = map fst . reverse . take 4 . drop 1 .iterate to64 . (,) 0 . sum . map (uncurry (*)) . zip mult
mult = map (256^) [2,1,0]
to64 (r,v) = let r' = v `mod` 64 in (r',(v-r')`div`64)
+ pad (v,l) =take 4 $ take (1+l) v ++ "==="
decode64 :: [Char]->[Int]
decode64 = map fst . concat . map (rem . first (reverse . take 3 . drop 1 . iterate to256 . (,) 0 . dec. map (fromJust) . filter (isJust) . map dec64).second (length.filter(=='=')). dupe) . chunksOf 4 . filter (/='\n')
where dec = sum . map (uncurry (*)) . zip (map (64^) [3,2..])
to256 (r,v) = let r' = v `mod` 256 in (r',(v-r')`div`256)
rem (v,l) = take (3-l) v
+-}
diff --git a/hask-io/Makefile b/hask-io/Makefile
deleted file mode 100644
index e6e1da6..0000000
--- a/hask-io/Makefile
+++ /dev/null
@@ -1,46 +0,0 @@
-#
-# Some GHC flags explained
-#
-# -v verbose mode
-# -O2 level 2 optimizations
-# -rtsopts allow +RTS flags
-# -prof enable basic time and allocation profiling
-# -auto-all cost centers on all top level functions
-# (you can also add them via the SCC pragma)
-# -caf-all generate data for CAFs (constant applicative forms)
-# t
-# -fforce-recomp force recompilation
-#
-# Notes: you will obtain the profiling versions of dependancies via:
-# stack install --profile [libraryname]
-#
-# -fprof-auto replaced -auto-all
-# -fprof-cafs replaced -caf-all
-#
-
-build-prof:
- stack ghc --profile -- -rtsopts -prof -fprof-auto -fprof-cafs -O2 base64.hs
-
-#
-# Some +RTS flags
-#
-# -p profiling
-# -K set stack limit
-# -hc extract heap profile
-# -hy allocation by type
-# -hd allocation by constructor
-# -iN sampling frequency in seconds. e.g. -i0.01
-# -ddump-simpl generate core version
-#
-# Note: render the heap profile as graph with: hp2ps -e8in -c file.hp
-#
-
-run-prof:
- cat random.bin | ./base64 +RTS -p -K100M > /dev/null
-
-
-test-mini-hask:
- #stack ghc -- -rtsopts -prof -fprof-auto -fprof-cafs -O mini.hs -o mini_hask_exe
- stack ghc -- -O mini.hs -o mini_hask_exe
-test-mini-c:
- gcc -O3 mini.c -o mini_c_exe
diff --git a/hask-io/mini.c b/hask-io/mini.c
deleted file mode 100644
index eb5ba46..0000000
--- a/hask-io/mini.c
+++ /dev/null
@@ -1,14 +0,0 @@
-#include <stdio.h>
-int main(){
- char buf[2048];
- int a=0;
- while(1){
- size_t sz=fread(&buf,1,2048,stdin);
- if(!sz)break;
- for(size_t i=0;i<sz;i++){
- a+=buf[i];
- a%=256;
- }
- }
- printf("%i\n",a);
-}
diff --git a/hask-io/mini.hs b/hask-io/mini.hs
deleted file mode 100644
index 7d73bfe..0000000
--- a/hask-io/mini.hs
+++ /dev/null
@@ -1,3 +0,0 @@
-import qualified Data.ByteString.Lazy as B
-main = B.getContents>>=print.show.f
- where f = B.foldl1' (+)