10000 Moved out high-lever from bench and tests · postgres-haskell/postgres-wire@85fb669 · GitHub
[go: up one dir, main page]

Skip to content

Commit 85fb669

Browse files
Moved out high-lever from bench and tests
1 parent d4cb443 commit 85fb669

File tree

2 files changed

+7
-77
lines changed

2 files changed

+7
-77
lines changed

bench/Bench.hs

Lines changed: 7 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -54,39 +54,13 @@ main = defaultMain
5454
-- bench "parser" $ nf parse bs
5555
-- ]
5656
-- ]
57-
[ bgroup "Decoder"
58-
[ env (pure dec) $ \p -> bench "datarow" $ nf (benchDataRowDecoder p) bs]
59-
]
60-
-- main = benchMultiPw
61-
dec :: Decode (Maybe B.ByteString, Maybe Int32, Maybe Int32,
62-
Maybe Int16, Maybe Bool, Maybe B.ByteString,
63-
Maybe Bool, Maybe Bool, Maybe B.ByteString,
64-
Maybe Int32, Maybe Int32, Maybe Int32)
65-
dec = rowDecoder
66-
67-
parser = skipDataRowHeader *> p
68-
where
69-
p = (,,,,,,,,,,,)
70-
<$> fn getByteString
71-
<*> fn int4
72-
<*> fn int4
73-
<*> fn int2
74-
<*> fn bool
75-
<*> fn getByteString
76-
<*> fn bool
77-
<*> fn bool
78-
<*> fn getByteString
79-
<*> fn int4
80-
<*> fn int4
81-
<*> fn int4
82-
fn = getNullable
83-
84-
benchDataRowDecoder d bs = decodeManyRows d $
85-
DataRows (DataChunk 380 bs) Empty
86-
where
87-
decodeDataRow = do
88-
(Header _ len) <- decodeHeader
89-
getByteString len
57+
58+
-- benchDataRowDecoder d bs = decodeManyRows d $
59+
-- DataRows (DataChunk 380 bs) Empty
60+
-- where
61+
-- decodeDataRow = do
62+
-- (Header _ len) <- decodeHeader
63+
-- getByteString len
9064

9165
{-# NOINLINE bs #-}
9266
bs :: B.ByteString
@@ -155,20 +129,12 @@ benchMultiPw = benchRequests createConnection $ \c -> do
155129
sendBatchAndSync c [q]
156130
d <- readNextData c
157131
waitReadyForQuery c
158-
-- case d of
159-
-- Left _ -> undefined
160-
-- Right rows -> pure $ decodeManyRows dec rows
161132
where
162133
q = Query largeStmt V.empty Binary Binary AlwaysCache
163134
largeStmt = "SELECT * from _bytes_300_of_100"
164135
-- largeStmt = "select typname, typnamespace, typowner, typlen, typbyval,"
165136
-- <> "typcategory, typispreferred, typisdefined, typdelim,"
166137
-- <> "typrelid, typelem, typarray from pg_type"
167-
dec :: Decode (Maybe B.ByteString, Maybe Int32, Maybe Int32,
168-
Maybe Int16, Maybe Bool, Maybe B.ByteString,
169-
Maybe Bool, Maybe Bool, Maybe B.ByteString,
170-
Maybe Int32, Maybe Int32, Maybe Int32)
171-
dec = rowDecoder
172138

173139
benchLibpq :: IO ()
174140
benchLibpq = benchRequests libpqConnection $ \c -> do

tests/Driver.hs

Lines changed: 0 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -240,39 +240,3 @@ testCorrectDatarows = withConnection $ \c -> do
240240
decodeHeader
241241
getInt16BE
242242
getByteString . fromIntegral =<< getInt32BE
243-
244-
testDecoder :: IO ()
245-
testDecoder = withConnection $ \c -> do
246-
let stmt = "SELECT '{{1,2},{Null,4}}'::int[][]"
247-
sendBatchAndSync c [Query stmt V.empty Binary Binary NeverCache]
248-
r <- readNextData c
249-
waitReadyForQuery c
250-
case r of
251-
Left e -> error $ show e
252-
Right rows -> do
253-
-- print rows
254-
print $ decodeManyRows dec rows
255-
where
256-
-- dec :: Decode (Int32, (Maybe Int32, Int32, Int32), Int32)
257-
-- dec = rowDecoder
258-
largeStmt = "select typname, typnamespace, typowner, typlen, typbyval,"
259-
<> "typcategory, typispreferred, typisdefined, typdelim,"
260-
<> "typrelid, typelem, typarray from pg_type"
261-
-- dec :: Decode (Maybe B.ByteString, Maybe Int32, Maybe Int32,
262-
-- Maybe Int16, Maybe Bool, Maybe B.ByteString,
263-
-- Maybe Bool, Maybe Bool, Maybe B.ByteString,
264-
-- Maybe Int32, Maybe Int32, Maybe Int32)
265-
dec :: Decode (V.Vector (V.Vector (Maybe Int32)))
266-
dec = rowDecoder
267-
-- <$> fn getByteString
268-
-- <*> fn int4
269-
-- <*> fn int4
270-
-- <*> fn int2
271-
-- <*> fn bool
272-
-- <*> fn getByteString
273-
-- <*> fn bool
274-
-- <*> fn bool
275-
-- <*> fn getByteString
276-
-- <*> fn int4
277-
-- <*> fn int4
278-
-- <*> fn int4

0 commit comments

Comments
 (0)
0