* test/test-ooddl.lisp: Test universal-time slot in an object
* TODO: Remove need for universal-time test
* debian/rules, debian/control: Add cl-sql-oracle binary package
+ * doc/appendix.xml: Add Oracle backend information
+ * db-oracle/oracle-objects.lisp: Add database-get-type-specifier for
+ universal-time. Convert BIGINT CLSQL type to CHAR SQL type
25 May 2004 Kevin Rosenberg <kevin@rosenberg.net>
* sql/oodml.lisp: (string n) now produces a CHAR field. Add new VARCHAR
(defmethod database-get-type-specifier ((type (eql 'bigint)) args
database (db-type (eql :oracle)))
- (declare (ignore database))
- (if args
- (format nil "NUMBER(~A,~A)"
- (or (first args) 38) (or (second args) 0))
- "NUMBER(38,0)"))
+ (declare (ignore args database))
+ "CHAR(20)")
+
+(defmethod database-get-type-specifier ((type (eql 'universal-time)) args
+ database (db-type (eql :oracle)))
+ (declare (ignore args database))
+ "CHAR(20)")
(defmethod database-get-type-specifier ((type (eql 'string)) args
database (db-type (eql :oracle)))
(when (char-equal #\t (schar val 0))
t))
+(defmethod read-sql-value (val (type (eql 'bigint))
+ database (db-type (eql :oracle)))
+ (declare (ignore database))
+ (parse-integer val))
+
+(defmethod read-sql-value (val (type (eql 'universal-time))
+ database (db-type (eql :oracle)))
+ (declare (ignore database))
+ (parse-integer val))
+
+
(defmethod database-get-type-specifier ((type (eql 'wall-time)) args
database (db-type (eql :oracle)))
(declare (ignore args database))
:unsigned-char))))
(if (string-equal str "NULL") nil str)))
-(defun deref-oci-int64 (arrayptr index)
- (let ((low32 (uffi:deref-array arrayptr '(:array :unsigned-int)
- (+ index index)))
- (high32 (uffi:deref-array arrayptr '(:array :unsigned-int)
- (+ 1 index index))))
- (make-64-bit-integer high32 low32)))
-
-(defun deref-oci-int128 (arrayptr index)
- (let* ((base (* 4 index))
- (d (uffi:deref-array arrayptr '(:array :unsigned-int) (incf base)))
- (c (uffi:deref-array arrayptr '(:array :unsigned-int) (incf base)))
- (b (uffi:deref-array arrayptr '(:array :unsigned-int) (incf base)))
- (a (uffi:deref-array arrayptr '(:array :unsigned-int) (incf base))))
- (make-128-bit-integer a b c d)))
-
;; the OCI library, part Z: no-longer used logic to convert from
;; Oracle's binary date representation to Common Lisp's native date
;; representation
(deref-vp errhp))
(let ((*scale (uffi:deref-pointer scale :byte))
(*precision (uffi:deref-pointer precision :byte)))
-
+
;;(format t "scale=~d, precision=~d~%" *scale *precision)
(cond
((or (and (minusp *scale) (zerop *precision))
- (and (zerop *scale) (< 0 *precision 9)))
+ (and (zerop *scale) (plusp *precision)))
(setf buffer (acquire-foreign-resource :int +n-buf-rows+)
sizeof 4 ;; sizeof(int)
dtype #.SQLT-INT))
- ((and (zerop *scale)
- (plusp *precision)
- #+ignore (< *precision 19))
- (setf buffer (acquire-foreign-resource :unsigned-int
- (* 2 +n-buf-rows+))
- sizeof 8 ;; sizeof(int64)
- dtype #.SQLT-INT))
- ;; Bug in OCI? But OCI won't take 16-byte buffer for 128-bit
- ;; integers
- #+ignore
- ((and (zerop *scale) (plusp *precision))
- (setf buffer (acquire-foreign-resource :unsigned-int
- (* 4 +n-buf-rows+))
- sizeof 8 ;; sizeof(int128)
- dtype #.SQLT-INT))
(t
(setf buffer (acquire-foreign-resource :double +n-buf-rows+)
sizeof 8 ;; sizeof(double)
t)
(deftest :fddl/attributes/7
- (and (member (clsql:attribute-type [bd_utime] [employee]) '(:bigint :int8 :number)) t)
+ (and (member (clsql:attribute-type [bd_utime] [employee]) '(:bigint :int8 :char)) t)
t)
(let ((index (1+ i))
(int (first (car rest)))
(bigint (second (car rest))))
- (when (and (eq *test-database-type* :odbc)
- (eq *test-database-underlying-type* :postgresql)
+ (when (and (or (eq *test-database-type* :oracle)
+ (and (eq *test-database-type* :odbc)
+ (eq *test-database-underlying-type* :postgresql)))
(stringp bigint))
(setf bigint (parse-integer bigint)))
(unless (and (eql int index)