Commit 5d0c99f0 authored by Daniel Kochmański's avatar Daniel Kochmański
Browse files

pathnames: use relative pathnames

parent 50fccb30
......@@ -3,3 +3,4 @@
*.fasl
*.olisp
*.lafsl
output/
\ No newline at end of file
......@@ -3,7 +3,7 @@
SHELL = /bin/sh
clean-results:
-rm -f /var/tmp/CL-bench*
-rm -f output/CL-bench*
clean:
find . \( -name '*.abcl' -o -name '*.cls' -o -name '*.sparcf' -o -name "*.ppcf" -o -name '*.x86f' -o -name '*.lbytef' -o -name "*.err" -o -name '*.fas' -o -name '*.fasl' -o -name "*.faslmt" -o -name '*.lib' -o -name '*.o' -o -name '*.so' -o -name "*.pfsl" -o -name "*.ufsl" -o -name "*.dfsl" -o -name "*.olisp" -o -name "*.dfsl" -o -name "*.fsl" -o -name "*.nfasl" \) -print | xargs rm -f
......
......@@ -96,9 +96,9 @@ following steps:
5. Load the file "do-execute-script.lisp", which should cause all
the tests to be executed.
For each tested implementation, you should have a file in /var/tmp (or
in the current directory on Windows) named "CL-benchmark-<date>".
These files will have the following format:
For each tested implementation, you should have a file in output/
named "CL-benchmark-<date>". These files will have the following
format:
,---- /var/tmp/CL-benchmark-20010821T2208 ---
| ;; -*- lisp -*- CMU Common Lisp CVS sources, level-1 built 2001-08-22 on maftia1
......@@ -179,10 +179,10 @@ each test, you should see the elapsed user time, and possibly (if this
has been coded for your implementation) elapsed system time and the
number of bytes consed during the test execution.
The data in the different /var/tmp/CL-benchmark-* files is analysed by the
file "report.lisp", to generate a report comparing the performance of
the different implementations. This file needs to be run in a Common
Lisp implementation; the one you use will be considered the
The data in the different output/CL-benchmark-* files is analysed by
the file "report.lisp", to generate a report comparing the performance
of the different implementations. This file needs to be run in a
Common Lisp implementation; the one you use will be considered the
"reference" implementation. In the report which is generated, for each
test the timing for the reference implementation will be shown, as
well as the _relative times_ for each of the other tested
......
......@@ -2,7 +2,6 @@
(in-package #:cl-bench)
#+ (or)
(defun bench-gc ()
(trivial-garbage:gc :full t))
......@@ -10,11 +9,11 @@
(defun bench-time ()
(error "use metering"))
(setf (logical-pathname-translations "bench")
`(("root;*.*" ,(asdf:system-source-directory '#:cl-bench))
("test;*.*" ,(merge-pathnames
"files/"
(asdf:system-source-directory '#:cl-bench)))))
(let ((root-dir (asdf:system-source-directory '#:cl-bench)))
(setf (logical-pathname-translations "bench")
`(("root;*.*" ,root-dir)
("test;*.*" ,(merge-pathnames "files/" root-dir))
("result;*.*" ,(merge-pathnames "output/" root-dir)))))
;;; This is disabled after the consultation with the ABCL maintainer
......
;;; graph-report.lisp
;;
;; Author: Johannes Grdem <johs@copyleft.no>
;; Time-stamp: <2004-02-29 emarsden>
;; Time-stamp: <2016-05-13 12:25:11 jack>
;;
;;
;; When loaded into CMUCL, this should generate a report comparing the
;; performance of the different CL implementations which have been
;; tested. Reads the /var/tmp/CL-benchmark* files to obtain data from
;; tested. Reads the output/CL-benchmark* files to obtain data from
;; previous runs.
(defparameter *screen-width* 80)
......
;;; pdf-report.lisp
;;
;; Author: Eric Marsden <emarsden@laas.fr>
;; Time-stamp: <2004-03-09 emarsden>
;; Time-stamp: <2016-05-13 12:24:31 jack>
;;
;;
;; When loaded into CMUCL, this should generate a report comparing the
;; performance of the different CL implementations which have been
;; tested. Reads the /var/tmp/CL-benchmark* files to obtain data from
;; tested. Reads the output/CL-benchmark* files to obtain data from
;; previous runs. Requires the cl-pdf library.
(in-package :cl-user)
......@@ -45,9 +45,9 @@
;; FIXME annotate each benchmark with estimated allocation volume & peak storage requirement
(defun bench-analysis (&optional (filename #p"/tmp/cl-bench.pdf"))
(defun bench-analysis (&optional (filename #p"bench:result;cl-bench.pdf"))
(let (content data groups implementations benchmarks impl-scores impl-labels)
(dolist (f (directory "/var/tmp/CL-benchmark*.*"))
(dolist (f (directory "bench:result;CL-benchmark*.*"))
(ignore-errors
(with-open-file (f f :direction :input)
(let ((*read-eval* nil))
......@@ -118,9 +118,9 @@
;; (defun bench-analysis (&optional (filename #p"/tmp/cl-bench.pdf"))
;; (defun bench-analysis (&optional (filename #p"bench:result;cl-bench.pdf"))
;; (let (data groups implementations benchmarks impl-scores impl-labels)
;; (dolist (f (directory "/var/tmp/CL-benchmark*.*"))
;; (dolist (f (directory "bench:result;CL-benchmark*.*"))
;; (ignore-errors
;; (with-open-file (f f :direction :input)
;; (let ((*read-eval* nil))
......
;;; report.lisp
;;
;; Author: Eric Marsden <emarsden@laas.fr>
;; Time-stamp: <2004-02-29 emarsden>
;; Time-stamp: <2016-05-13 12:25:00 jack>
;;
;;
;; When loaded into CMUCL, this should generate a report comparing the
;; performance of the different CL implementations which have been
;; tested. Reads the /var/tmp/CL-benchmark* files to obtain data from
;; tested. Reads the output/CL-benchmark* files to obtain data from
;; previous runs.
;;
;; FIXME could create graphical version using ploticus
......@@ -21,7 +21,7 @@
(defun bench-analysis ()
(let (data implementations benchmarks)
(dolist (f (directory "/var/tmp/CL-benchmark*.*"))
(dolist (f (directory "bench:result;CL-benchmark*.*"))
(ignore-errors
(with-open-file (f f :direction :input)
(let ((*read-eval* nil))
......
;;; support.lisp --- performance benchmarks for Common Lisp implementations
;;
;; Author: Eric Marsden <emarsden@laas.fr>
;; Time-stamp: <2016-05-13 11:10:21 jack>
;; Time-stamp: <2016-05-13 12:26:30 jack>
;;
;;
;; The benchmarks consist of
......@@ -118,7 +118,7 @@
(get-decoded-time)
(declare (ignore second))
(format nil "~aCL-benchmark-~d~2,'0d~2,'0dT~2,'0d~2,'0d"
#+win32 "" #-win32 "/var/tmp/"
"bench:result;"
year month date hour minute)))
;; grr, CLISP doesn't implement ~<..~:>
......
;;; all the performance benchmarks
;;;
;;; Time-stamp: <2004-06-28 emarsden>
;;; Time-stamp: <2016-05-13 12:20:33 jack>
(in-package :cl-bench)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment