@@ -5,6 +5,7 @@ var vows = require('vows')
5
5
, sandbox = require ( 'sandboxed-module' )
6
6
, log4js = require ( '../lib/log4js' )
7
7
, assert = require ( 'assert' )
8
+ , zlib = require ( 'zlib' )
8
9
, EOL = require ( 'os' ) . EOL || '\n' ;
9
10
10
11
log4js . clearAppenders ( ) ;
@@ -214,6 +215,79 @@ vows.describe('log4js fileAppender').addBatch({
214
215
}
215
216
}
216
217
}
218
+ } ,
219
+ 'with a max file size and 2 compressed backups' : {
220
+ topic : function ( ) {
221
+ var testFile = path . join ( __dirname , '/fa-maxFileSize-with-backups-compressed-test.log' )
222
+ , logger = log4js . getLogger ( 'max-file-size-backups' ) ;
223
+ remove ( testFile ) ;
224
+ remove ( testFile + '.1.gz' ) ;
225
+ remove ( testFile + '.2.gz' ) ;
226
+
227
+ //log file of 50 bytes maximum, 2 backups
228
+ log4js . clearAppenders ( ) ;
229
+ log4js . addAppender (
230
+ require ( '../lib/appenders/file' ) . appender ( testFile , log4js . layouts . basicLayout , 50 , 2 , null , true ) ,
231
+ 'max-file-size-backups'
232
+ ) ;
233
+ logger . info ( "This is the first log message." ) ;
234
+ logger . info ( "This is the second log message." ) ;
235
+ logger . info ( "This is the third log message." ) ;
236
+ logger . info ( "This is the fourth log message." ) ;
237
+ var that = this ;
238
+ //give the system a chance to open the stream
239
+ setTimeout ( function ( ) {
240
+ fs . readdir ( __dirname , function ( err , files ) {
241
+ if ( files ) {
242
+ that . callback ( null , files . sort ( ) ) ;
243
+ } else {
244
+ that . callback ( err , files ) ;
245
+ }
246
+ } ) ;
247
+ } , 200 ) ;
248
+ } ,
249
+ 'the log files' : {
250
+ topic : function ( files ) {
251
+ var logFiles = files . filter (
252
+ function ( file ) { return file . indexOf ( 'fa-maxFileSize-with-backups-compressed-test.log' ) > - 1 ; }
253
+ ) ;
254
+ return logFiles ;
255
+ } ,
256
+ 'should be 3' : function ( files ) {
257
+ assert . equal ( files . length , 3 ) ;
258
+ } ,
259
+ 'should be named in sequence' : function ( files ) {
260
+ assert . deepEqual ( files , [
261
+ 'fa-maxFileSize-with-backups-compressed-test.log' ,
262
+ 'fa-maxFileSize-with-backups-compressed-test.log.1.gz' ,
263
+ 'fa-maxFileSize-with-backups-compressed-test.log.2.gz'
264
+ ] ) ;
265
+ } ,
266
+ 'and the contents of the first file' : {
267
+ topic : function ( logFiles ) {
268
+ fs . readFile ( path . join ( __dirname , logFiles [ 0 ] ) , "utf8" , this . callback ) ;
269
+ } ,
270
+ 'should be the last log message' : function ( contents ) {
271
+ assert . include ( contents , 'This is the fourth log message.' ) ;
272
+ }
273
+ } ,
274
+ 'and the contents of the second file' : {
275
+ topic : function ( logFiles ) {
276
+ zlib . gunzip ( fs . readFileSync ( path . join ( __dirname , logFiles [ 1 ] ) ) , this . callback ) ;
277
+ } ,
278
+ 'should be the third log message' : function ( contents ) {
279
+ assert . include ( contents . toString ( 'utf8' ) , 'This is the third log message.' ) ;
280
+ }
281
+ } ,
282
+ 'and the contents of the third file' : {
283
+ topic : function ( logFiles ) {
284
+ zlib . gunzip ( fs . readFileSync ( path . join ( __dirname , logFiles [ 2 ] ) ) , this . callback ) ;
285
+ } ,
286
+ 'should be the second log message' : function ( contents ) {
287
+ assert . include ( contents . toString ( 'utf8' ) , 'This is the second log message.' ) ;
288
+ }
289
+ }
290
+ }
217
291
}
218
292
} ) . addBatch ( {
219
293
'configure' : {
0 commit comments