<dependency>
<groupId>org.jruby</groupId>
<artifactId>jruby-complete</artifactId>
- <version>1.6.3</version>
+ <version>1.6.1</version>
</dependency>
<dependency>
<groupId>geronimo-spec</groupId>
#sonar.ajp13.port: 8009
-#-----------------------------------------------------------------------
+#---------------------------------------------------------
# DATABASE
#
# IMPORTANT : the embedded database Derby is used by default.
-# It is recommended for tests only. Please use an external database
-# for production environment (MySQL, Oracle, Postgresql, SQLServer)
+# It is recommended for tests only. Please use an other database
+# for production environment (MySQL, Oracle, Postgresql,
+# SQLServer)
#
-#-----------------------------------------------------------------------
+#---------------------------------------------------------
#----- Credentials
# Permissions to create tables and indexes must be granted to JDBC user.
#sonar.jdbc.url: jdbc:postgresql://localhost/sonar
#sonar.jdbc.driverClassName: org.postgresql.Driver
#sonar.jdbc.validationQuery: select 1
-#sonar.jdbc.postgreSearchPath: public
#----- Microsoft SQLServer
#----- JDBC Datasource bounded to JNDI
-# When sonar webapp is deployed into a JEE server, the JDBC datasource can be loaded from JNDI.
+# When sonar webapp is deployed into a JEE server, the JDBC datasource can be configured into the JEE server and registered into JNDI.
# In such a case Sonar uses this datasource to connect to database.
-# If you activate this feature, then the properties starting with "sonar.jdbc." can be commented, except sonar.jdbc.dialect.
+# If you activate this feature, then the properties starting with "sonar.jdbc." can be commented, except "sonar.jdbc.dialect".
# The JDBC driver must still be deployed into the directory /extensions/jdbc-driver.
#sonar.jdbc.jndiName: jdbc/sonar
-# Values are : mysql, mssql, oracle, postgresql
+# Values are : mysql, mssql, derby, oracle, postgresql
#sonar.jdbc.dialect=
adapter: jdbc
production:
- <% if ::Java::OrgSonarServerUi::JRubyFacade.getInstance().getConfigurationValue('sonar.jdbc.jndiName') %>
- adapter: jndi
- <% else %>
- adapter: jdbc
- username: <%= ::Java::OrgSonarServerUi::JRubyFacade.getInstance().getConfigurationValue('sonar.jdbc.username' ) || 'sonar' %>
- password: <%= ::Java::OrgSonarServerUi::JRubyFacade.getInstance().getConfigurationValue('sonar.jdbc.password') || 'sonar' %>
- url: <%= ::Java::OrgSonarServerUi::JRubyFacade.getInstance().getConfigurationValue('sonar.jdbc.url') %>
-
- # PostgreSQL
- schema_search_path: <%= ::Java::OrgSonarServerUi::JRubyFacade.getInstance().getConfigurationValue('sonar.jdbc.postgreSearchPath') %>
- connection_alive_sql: <%= ::Java::OrgSonarServerUi::JRubyFacade.getInstance().getConfigurationValue('sonar.jdbc.validationQuery') %>
- <% end %>
-
- dialect: <%= ::Java::OrgSonarServerUi::JRubyFacade.getInstance().getDialect().getActiveRecordDialectCode() %>
- driver: <%= ::Java::OrgSonarServerUi::JRubyFacade.getInstance().getConfigurationValue('sonar.jdbc.driverClassName') %>
- pool: <%= ::Java::OrgSonarServerUi::JRubyFacade.getInstance().getConfigurationValue('sonar.jdbc.maxActive') || 10 %>
\ No newline at end of file
+ adapter: jdbc
\ No newline at end of file
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
end
+
+module ActiveRecord
+ module ConnectionAdapters
+
+ # Patch to delegate configuration of JDBC datasource to Sonar.
+ # See vendor/gems/activerecord-jdbc-adapter/lib/active_record/connection_adapters/jdbc_adapter.rb
+ class JdbcConnection
+ def initialize(config)
+ @config = config.symbolize_keys!
+ @config[:retry_count] ||= 5
+ @config[:connection_alive_sql] ||= ::Java::OrgSonarServerUi::JRubyFacade.getInstance().getConfigurationValue('sonar.jdbc.validationQuery')
+
+ @jndi_connection = true # used in JndiConnectionPoolCallbacks to close this initial connection
+
+ @connection_factory = JdbcConnectionFactory.impl do
+ ::Java::OrgSonarServerUi::JRubyFacade.getInstance().getConnection()
+ end
+ @config[:dialect] = ::Java::OrgSonarServerUi::JRubyFacade.getInstance().getDialect().getActiveRecordDialectCode()
+
+ connection # force the connection to load
+ set_native_database_types
+ @stmts = {}
+ rescue Exception => e
+ raise "Fail to connect to database: #{e}"
+ end
+ end
+ end
+end
+
+
class ActiveRecord::Migration
def self.alter_to_big_primary_key(tablename)
dialect = ::Java::OrgSonarServerUi::JRubyFacade.getInstance().getDialect().getActiveRecordDialectCode()
end
end
+module JdbcSpec
+
+ #
+ # Ticket http://tools.assembla.com/sonar/ticket/200
+ # Problem with mysql TEXT columns. ActiveRecord :text type is mapped to TEXT type (65535 characters).
+ # But we would like the bigger MEDIUMTEXT for the snapshot_sources table (16777215 characters).
+ # This hack works only for ActiveRecord-JDBC (Jruby use).
+ # See http://www.headius.com/jrubywiki/index.php/Adding_Datatypes_to_ActiveRecord-JDBC
+ # The following has been copied from WEB-INF\gems\gems\activerecord-jdbc-adapter-0.9\lib\jdbc_adapter\jdbc_mysql.rb
+ # Problem still in activerecord-jdbc-adapter 0.9
+ module MySQL
+ def modify_types(tp)
+ tp[:primary_key] = "int(11) DEFAULT NULL auto_increment PRIMARY KEY"
+ tp[:decimal] = { :name => "decimal" }
+ tp[:timestamp] = { :name => "datetime" }
+ tp[:datetime][:limit] = nil
+
+ # sonar
+ tp[:text] = { :name => "mediumtext" }
+ tp[:binary] = { :name => "longblob" }
+ tp[:big_integer] = { :name => "bigint"}
+
+ tp
+ end
+ end
+
+ # wrong column types on oracle 10g timestamp and datetimes
+ # Problem still in activerecord-jdbc-adapter 0.8
+ module Oracle
+ def modify_types(tp)
+ tp[:primary_key] = "NUMBER(38) NOT NULL PRIMARY KEY"
+ tp[:integer] = { :name => "NUMBER", :limit => 38 }
+ tp[:datetime] = { :name => "TIMESTAMP" } # updated for sonar
+ tp[:timestamp] = { :name => "TIMESTAMP" } # updated for sonar
+ tp[:time] = { :name => "DATE" }
+ tp[:date] = { :name => "DATE" }
+
+ #sonar
+ tp[:big_integer] = { :name => "NUMBER", :limit => 38 }
+
+ tp
+ end
+
+ end
+
+ module MsSQL
+ def modify_types(tp)
+ tp[:primary_key] = "int NOT NULL IDENTITY(1, 1) PRIMARY KEY"
+ tp[:integer][:limit] = nil
+ tp[:boolean] = {:name => "bit"}
+ tp[:binary] = { :name => "image"}
+
+ # sonar patch:
+ tp[:text] = { :name => "NVARCHAR(MAX)" }
+ tp[:big_integer] = { :name => "bigint"}
+ end
+
+ end
+
+ # activerecord-jdbc-adapter has a missing quote_table_name method
+ module Derby
+ def modify_types(tp)
+ tp[:primary_key] = "int generated by default as identity NOT NULL PRIMARY KEY"
+ tp[:integer][:limit] = nil
+ tp[:string][:limit] = 256
+ tp[:boolean] = {:name => "smallint"}
+
+ #sonar
+ tp[:big_integer] = {:name => "bigint"}
+
+ tp
+ end
+
+ def quote_table_name(name) #:nodoc:
+ quote_column_name(name).gsub('.', '`.`')
+ end
+ end
+
+ module PostgreSQL
+ def modify_types(tp)
+ tp[:primary_key] = "serial primary key"
+ tp[:integer][:limit] = nil
+ tp[:boolean][:limit] = nil
+
+ # sonar
+ # tp[:string][:limit] = 255
+ tp[:big_integer] = { :name => "int8", :limit => nil }
+
+ tp
+ end
+
+ # See SONAR-862 on Postgre search_path setting.
+ # The issue is fixed in next activerecord-jdbc-adapter version: http://github.com/nicksieger/activerecord-jdbc-adapter/commit/2575700d3aee2eb395cac3e7933bb4d129fa2f03
+ # More details on https://rails.lighthouseapp.com/projects/8994/tickets/918-postgresql-tables-not-generating-correct-schema-list
+ def columns(table_name, name=nil)
+ # schema_name must be nil instead of "public"
+ schema_name = nil
+ if table_name =~ /\./
+ parts = table_name.split(/\./)
+ table_name = parts.pop
+ schema_name = parts.join(".")
+ end
+ @connection.columns_internal(table_name, name, schema_name)
+ end
+ end
+end
+
# patch for SONAR-1182. GWT does not support ISO8601 dates that end with 'Z'
# http://google-web-toolkit.googlecode.com/svn/javadoc/1.6/com/google/gwt/i18n/client/DateTimeFormat.html
module ActiveSupport
require File.dirname(__FILE__) + '/../lib/sonar_webservice_plugins.rb'
require File.dirname(__FILE__) + '/../lib/database_version.rb'
DatabaseVersion.automatic_setup
-
-
-#
-#
-# IMPORTANT NOTE
-# Some changes have been done in activerecord-jdbc-adapter. Most of them relate to column types.
-# All these changes are prefixed by the comment #sonar
-#
-#
\ No newline at end of file
-#
-# Sonar, entreprise quality control tool.
-# Copyright (C) 2008-2011 SonarSource
-# mailto:contact AT sonarsource DOT com
-#
-# Sonar is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 3 of the License, or (at your option) any later version.
-#
-# Sonar is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with Sonar; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02
-#
+ #
+ # Sonar, entreprise quality control tool.
+ # Copyright (C) 2008-2011 SonarSource
+ # mailto:contact AT sonarsource DOT com
+ #
+ # Sonar is free software; you can redistribute it and/or
+ # modify it under the terms of the GNU Lesser General Public
+ # License as published by the Free Software Foundation; either
+ # version 3 of the License, or (at your option) any later version.
+ #
+ # Sonar is distributed in the hope that it will be useful,
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ # Lesser General Public License for more details.
+ #
+ # You should have received a copy of the GNU Lesser General Public
+ # License along with Sonar; if not, write to the Free Software
+ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02
+ #
class CreateUsers < ActiveRecord::Migration
def self.up
+ create_users
+ end
+
+ private
+
+ def self.create_users
create_table 'users' do |t|
- t.column :login, :string, :limit => 40
- t.column :name, :string, :limit => 200, :null => true
- t.column :email, :string, :limit => 100
- t.column :crypted_password, :string, :limit => 40
- t.column :salt, :string, :limit => 40
- t.column :created_at, :datetime
- t.column :updated_at, :datetime
- t.column :remember_token, :string, :limit => 500, :null => true
+ t.column :login, :string, :limit => 40
+ t.column :name, :string, :limit => 200, :null => true
+ t.column :email, :string, :limit => 100
+ t.column :crypted_password, :string, :limit => 40
+ t.column :salt, :string, :limit => 40
+ t.column :created_at, :datetime
+ t.column :updated_at, :datetime
+ t.column :remember_token, :string, :limit => 500, :null => true
t.column :remember_token_expires_at, :datetime
end
+
+ User.create(:login => 'admin', :name => 'Administrator', :email => '', :password => 'admin',
+ :password_confirmation => 'admin')
end
+
end
+++ /dev/null
- #
- # Sonar, entreprise quality control tool.
- # Copyright (C) 2008-2011 SonarSource
- # mailto:contact AT sonarsource DOT com
- #
- # Sonar is free software; you can redistribute it and/or
- # modify it under the terms of the GNU Lesser General Public
- # License as published by the Free Software Foundation; either
- # version 3 of the License, or (at your option) any later version.
- #
- # Sonar is distributed in the hope that it will be useful,
- # but WITHOUT ANY WARRANTY; without even the implied warranty of
- # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- # Lesser General Public License for more details.
- #
- # You should have received a copy of the GNU Lesser General Public
- # License along with Sonar; if not, write to the Free Software
- # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02
- #
-class CreateAdministrator < ActiveRecord::Migration
-
- def self.up
- User.create(:login => 'admin', :name => 'Administrator', :email => '', :password => 'admin',
- :password_confirmation => 'admin')
- end
-
-end
--- /dev/null
+--- !ruby/object:Gem::Specification
+name: activerecord-jdbc-adapter
+version: !ruby/object:Gem::Version
+ prerelease: false
+ segments:
+ - 0
+ - 9
+ - 0
+ - 1
+ version: 0.9.0.1
+platform: ruby
+authors:
+ - Nick Sieger, Ola Bini and JRuby contributors
+autorequire:
+bindir: bin
+cert_chain: []
+
+date: 2009-10-26 00:00:00 +01:00
+default_executable:
+dependencies: []
+
+description: |-
+ activerecord-jdbc-adapter is a database adapter for Rails' ActiveRecord
+ component that can be used with JRuby[http://www.jruby.org/]. It allows use of
+ virtually any JDBC-compliant database with your JRuby on Rails application.
+email: nick@nicksieger.com, ola.bini@gmail.com
+executables: []
+
+extensions: []
+
+extra_rdoc_files:
+ - History.txt
+ - Manifest.txt
+ - README.txt
+ - LICENSE.txt
+files:
+ - History.txt
+ - Manifest.txt
+ - README.txt
+ - Rakefile
+ - LICENSE.txt
+ - lib/active_record/connection_adapters/cachedb_adapter.rb
+ - lib/active_record/connection_adapters/derby_adapter.rb
+ - lib/active_record/connection_adapters/h2_adapter.rb
+ - lib/active_record/connection_adapters/hsqldb_adapter.rb
+ - lib/active_record/connection_adapters/informix_adapter.rb
+ - lib/active_record/connection_adapters/jdbc_adapter.rb
+ - lib/active_record/connection_adapters/jdbc_adapter_spec.rb
+ - lib/active_record/connection_adapters/jndi_adapter.rb
+ - lib/active_record/connection_adapters/mysql_adapter.rb
+ - lib/active_record/connection_adapters/oracle_adapter.rb
+ - lib/active_record/connection_adapters/postgresql_adapter.rb
+ - lib/active_record/connection_adapters/sqlite3_adapter.rb
+ - lib/jdbc_adapter/jdbc_cachedb.rb
+ - lib/jdbc_adapter/jdbc_db2.rb
+ - lib/jdbc_adapter/jdbc_derby.rb
+ - lib/jdbc_adapter/jdbc_firebird.rb
+ - lib/jdbc_adapter/jdbc_hsqldb.rb
+ - lib/jdbc_adapter/jdbc_informix.rb
+ - lib/jdbc_adapter/jdbc_mimer.rb
+ - lib/jdbc_adapter/jdbc_mssql.rb
+ - lib/jdbc_adapter/jdbc_mysql.rb
+ - lib/jdbc_adapter/jdbc_oracle.rb
+ - lib/jdbc_adapter/jdbc_postgre.rb
+ - lib/jdbc_adapter/jdbc_sqlite3.rb
+ - lib/jdbc_adapter/jdbc_sybase.rb
+ - lib/jdbc_adapter/missing_functionality_helper.rb
+ - lib/jdbc_adapter/rake_tasks.rb
+ - lib/jdbc_adapter/tsql_helper.rb
+ - lib/jdbc_adapter/version.rb
+ - lib/jdbc_adapter.rb
+ - lib/jdbc_adapter/jdbc_adapter_internal.jar
+ - test/activerecord/connection_adapters/type_conversion_test.rb
+ - test/activerecord/connections/native_jdbc_mysql/connection.rb
+ - test/cachedb_simple_test.rb
+ - test/db/cachedb.rb
+ - test/db/db2.rb
+ - test/db/derby.rb
+ - test/db/h2.rb
+ - test/db/hsqldb.rb
+ - test/db/informix.rb
+ - test/db/jdbc.rb
+ - test/db/jndi_config.rb
+ - test/db/logger.rb
+ - test/db/mssql.rb
+ - test/db/mysql.rb
+ - test/db/oracle.rb
+ - test/db/postgres.rb
+ - test/db/sqlite3.rb
+ - test/db2_simple_test.rb
+ - test/derby_multibyte_test.rb
+ - test/derby_simple_test.rb
+ - test/generic_jdbc_connection_test.rb
+ - test/h2_simple_test.rb
+ - test/has_many_through.rb
+ - test/hsqldb_simple_test.rb
+ - test/informix_simple_test.rb
+ - test/jdbc_adapter/jdbc_db2_test.rb
+ - test/jdbc_adapter/jdbc_sybase_test.rb
+ - test/jdbc_common.rb
+ - test/jndi_callbacks_test.rb
+ - test/jndi_test.rb
+ - test/manualTestDatabase.rb
+ - test/minirunit/testConnect.rb
+ - test/minirunit/testH2.rb
+ - test/minirunit/testHsqldb.rb
+ - test/minirunit/testLoadActiveRecord.rb
+ - test/minirunit/testMysql.rb
+ - test/minirunit/testRawSelect.rb
+ - test/minirunit.rb
+ - test/models/add_not_null_column_to_table.rb
+ - test/models/auto_id.rb
+ - test/models/data_types.rb
+ - test/models/entry.rb
+ - test/models/reserved_word.rb
+ - test/mssql_simple_test.rb
+ - test/mysql_multibyte_test.rb
+ - test/mysql_simple_test.rb
+ - test/oracle_simple_test.rb
+ - test/postgres_reserved_test.rb
+ - test/postgres_simple_test.rb
+ - test/simple.rb
+ - test/sqlite3_simple_test.rb
+ - lib/jdbc_adapter/jdbc.rake
+ - src/java/jdbc_adapter/JdbcAdapterInternalService.java
+ - src/java/jdbc_adapter/JdbcConnectionFactory.java
+ - src/java/jdbc_adapter/JdbcDerbySpec.java
+ - src/java/jdbc_adapter/JdbcMySQLSpec.java
+ - src/java/jdbc_adapter/SQLBlock.java
+has_rdoc: true
+homepage: http://jruby-extras.rubyforge.org/activerecord-jdbc-adapter
+licenses: []
+
+post_install_message:
+rdoc_options:
+ - --main
+ - README.txt
+require_paths:
+ - lib
+required_ruby_version: !ruby/object:Gem::Requirement
+ requirements:
+ - - ">="
+ - !ruby/object:Gem::Version
+ segments:
+ - 0
+ version: "0"
+required_rubygems_version: !ruby/object:Gem::Requirement
+ requirements:
+ - - ">="
+ - !ruby/object:Gem::Version
+ segments:
+ - 0
+ version: "0"
+requirements: []
+
+rubyforge_project: jruby-extras
+rubygems_version: 1.3.6
+signing_key:
+specification_version: 3
+summary: JDBC adapter for ActiveRecord, for use within JRuby on Rails.
+test_files: []
+
--- /dev/null
+== 0.9
+
+- Now updated to support ActiveRecord 2.2. JNDI-based connections will
+ automatically connect/disconnect for every AR connection pool
+ checkout/checkin. For best results, set your pool: parameter >= the
+ actual maximum size of the JNDI connection pool. (We'll look at how
+ to eliminate the need to configure AR's pool in the future.)
+- NEW! Informix support courtesy of Javier Fernandez-Ivern.
+- Backport another Oracle CLOB issue, thanks Edson César.
+- Rubyforge #22018: chomp final trailing semicolon for oracle
+- JRUBY-2848: Fix NPE error in set_native_database_types
+- Rework oracle lob saving callback to be Rails 2.1 friendly (assist
+ from court3nay)
+- JRUBY-2715: Add create/drop database methods to Postgres (Peter Williams)
+- JRUBY-3183: Fix structure dump for Postgres (Ryan Bell)
+- JRUBY-3184: recreate_database for test database working for PG (Ryan Bell)
+- JRUBY-3186: disable referential integrity for PG (Ryan Bell)
+- Authoritative repository now hosted at
+ git://github.com/nicksieger/activerecord-jdbc-adapter.git; rubyforge
+ svn trunk cleaned out.
+
+== 0.8.2
+
+- Added an optional config key called :dialect. Using :dialect allows you to
+ override the default SQL dialect for the driver class being used. There are
+ a few cases for this:
+ - Using using Sybase w/ the jTDS driver.
+ - Using rebranded drivers.
+ - It makes more sense to use :dialect, rather then :driver when using JNDI.
+- JRUBY-2619: Typo with :test config causing problems with dev database (Igor Minar)
+- 20524, JRUBY-2612: Since when did I think that there was a #true? method on Object?
+
+== 0.8.1
+
+- Now sporting a JDBC sqlite3 adapter! Thanks Joseph Athman.
+- Added support for InterSystems Cache database (Ryan Bell)
+- Fix for JRUBY-2256
+- JRUBY-1638, JRUBY-2404, JRUBY-2463: schema.table handling and Oracle NUMBER fixes (Darcy Schultz & Jesse Hu)
+- Add structure dump and other DDL-ish for DB2 (courtesy abedra and stuarthalloway)
+- Fix missing quote_table_name function under Rails 1.2.6 and earlier
+- Small tweaks to jdbc.rake to select proper config
+- JRUBY-2011: Fix MSSQL string un-quoting issue (Silvio Fonseca)
+- JRUBY-1977, 17427: Fix information_schema select issue with MSSQL (Matt Burke)
+- 20479: Improve get_table_name for MSSQL (Aslak Hellesøy)
+- 20243: numerics improvements for MSSQL (Aslak Hellesøy)
+- 20172: don't quote table names for MSSQL (Thor Marius Henrichsen)
+- 19729: check for primary key existence in postgres during insert (Martin Luder)
+- JRUBY-2297, 18846: retrying failing SQL statements is harmful when not autocommitting (Craig McMillan)
+- 10021: very preliminary sybase support. (Mark Atkinson) Not usable until collision w/ sqlserver driver is resolved.
+- JRUBY-2312, JRUBY-2319, JRUBY-2322: Oracle timestamping issues (Jesse Hu & Michael König)
+- JRUBY-2422: Fix MySQL referential integrity and rollback issues
+- JRUBY-2382: mysql string quoting fails with ArrayIndexOutofBoundsException
+
+== 0.8
+
+- NOTE: This release is only compatible with JRuby 1.1RC3 or later.
+- Because of recent API changes in trunk in preparation for JRuby 1.1, this release is not
+ backward compatible with previous JRuby releases. Hence the version bump.
+- Internal: convert Java methods to be defined with annotations
+- Fix problem with reserved words coming back pre-quoted from #indexes in postgres
+- JRUBY-2205: Fix N^2 allocation of bytelists for mysql quoting (taw)
+- Attempt a fix for Rubyforge 18059
+- Upgrade derby to 10.3.2.1
+- Fix db:create etc. in the case where JDBC is loaded in Rails' preinitializer.rb
+- Fix db:drop to actually work
+- Fix for Rubyforge #11567 (Matt Williams)
+
+== 0.7.2
+
+- JRUBY-1905: add_column for derby, hsqldb, and postgresql (Stephen Bannasch)
+- Fix db:create for JDBC
+- Support Rails 2 with the old "require 'jdbc_adapter'" approach
+- JRUBY-1966: Instead of searching for just tables, search for views and tables.
+- JRUBY-1583: DB2 numeric quoting (Ryan Shillington)
+- JRUBY-1634: Oracle DATE type mapping (Daniel Wintschel)
+- JRUBY-1543: rename_column issue with more recent MySQL drivers (Oliver Schmelzle)
+- Rubyforge #15074: ConnectionAdapters::JdbcAdapter.indexes is missing name and
+ schema_name parameters in the method signature (Igor Minar)
+- Rubyforge #13558: definition for the indexes method (T Meyarivan)
+- JRUBY-2051: handle schemaname and tablename more correctly for columns
+- JRUBY-2102: Postgres Adapter cannot handle datetime type (Rainer Hahnekamp)
+- JRUBY-2018: Oracle behind ActiveRecord-JDBC fails with "Invalid column index" (K Venkatasubramaniyan)
+- JRUBY-2012: jdbc_mysql structure dump fails for mysql views (Tyler Jennings)
+
+== 0.7.1
+
+- Add adapter and driver for H2 courtesy of Caleb Land
+- Fix "undefined method `last' for {}:Hash" error introduced with new Rake 0.8.1 (JRUBY-1859)
+
+== 0.7
+
+- PLEASE NOTE: This release is not compatible with JRuby releases earlier than
+ 1.0.3 or 1.1b2. If you must use JRuby 1.0.2 or earlier, please install the
+ 0.6 release.
+- Release coincides with JRuby 1.0.3 and JRuby 1.1b2 releases
+- Simultaneous support for JRuby trunk and 1.0 branch
+- Get rid of log_no_bench method, so we time SQL execution again.
+- Implement #select_rows
+- MySQL migration and quoting updates
+
+== 0.6
+
+- Gem is renamed to "activerecord-jdbc-adapter" to follow new conventions
+ introduced in Rails 2.0 for third-party adapters. Rails 2.0 compatibility is
+ introduced.
+- Add dependency on ActiveRecord >= 1.14 (from the Rails 1.1.x release)
+- New drivers (jdbc-XXX) and adapter (activerecord-jdbcXXX-adapter) gems
+ available separately. See the README.txt file for details.
+- Plain "jdbc" driver is still available if you want to use the full
+ driver/url way of specifying the driver.
+- More bugfixes to Oracle and SQLServer courtesy of Ola & ThoughtWorks
+
+== 0.5
+
+- Release coincides with JRuby 1.0.1 release
+- It is no longer necessary to specify :driver and :url configuration
+ parameters for the mysql, postgresql, oracle, derby, hsqldb, and h2
+ adapters. The previous configuration is still valid and compatible, but for
+ new applications, this makes it possible to use the exact same database.yml
+ configuration as Rails applications running under native Ruby.
+- JDBC drivers can now be dynamically loaded by Ruby code, without being on
+ the classpath prior to launching JRuby. Simply use "require
+ 'jdbc-driver.jar'" in JRuby code to add it to the runtime classpath.
+- Updates to HSQL, MS SQLServer, Postgres, Oracle and Derby adapters
+
+== 0.4
+
+- Release coincides with JRuby 1.0 release
+- Shoring up PostgreSQL (courtesy Dudley Flanders) and HSQL (courtesy Matthew
+ Williams)
+- Fix timestamps on Oracle to use DATE (as everything else)
+- Derby fixes: Fix for open result set issue, better structure dump, quoting,
+ column type changing
+- Sybase type recognition fix (courtesy Dean Mao)
+
+== 0.3.1
+
+- Derby critical fixes shortly after 0.3
+
+== 0.3
+
+- Release coincides with JRuby 1.0.0RC1 release
+- Improvements for Derby, Postgres, and Oracle, all of which are running
+ > 95% of AR tests
+
+== 0.2.4
+
+- Release coincides with JRuby 0.9.9 release
+- JRuby 0.9.9 is required
+- MySQL close to 100% working
+- Derby improvements
+- DECIMAL/NUMERIC/FLOAT/REAL bugs fixed with type recognition for Oracle,
+ Postgres, etc.
+- HSQLDB has regressed this release and may not be functioning; we'll get it
+ fixed for the next one
+
+== 0.2.3
+
+- Release coincides (and compatible) with JRuby 0.9.8 release
+- 8 bugs fixed: see http://rubyurl.com/0Da
+- Improvements and compatibility fixes for Rails 1.2.x
+
+== 0.2.1, 0.2.2
+
+- Early releases, added better support for multiple databases
+
+== 0.0.1
+
+- Initial, very alpha release
--- /dev/null
+Copyright (c) 2006-2008 Nick Sieger <nick@nicksieger.com>
+Copyright (c) 2006-2008 Ola Bini <ola.bini@gmail.com>
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
--- /dev/null
+History.txt
+Manifest.txt
+README.txt
+Rakefile
+LICENSE.txt
+lib/active_record/connection_adapters/cachedb_adapter.rb
+lib/active_record/connection_adapters/derby_adapter.rb
+lib/active_record/connection_adapters/h2_adapter.rb
+lib/active_record/connection_adapters/hsqldb_adapter.rb
+lib/active_record/connection_adapters/informix_adapter.rb
+lib/active_record/connection_adapters/jdbc_adapter.rb
+lib/active_record/connection_adapters/jdbc_adapter_spec.rb
+lib/active_record/connection_adapters/jndi_adapter.rb
+lib/active_record/connection_adapters/mysql_adapter.rb
+lib/active_record/connection_adapters/oracle_adapter.rb
+lib/active_record/connection_adapters/postgresql_adapter.rb
+lib/active_record/connection_adapters/sqlite3_adapter.rb
+lib/jdbc_adapter/jdbc_cachedb.rb
+lib/jdbc_adapter/jdbc_db2.rb
+lib/jdbc_adapter/jdbc_derby.rb
+lib/jdbc_adapter/jdbc_firebird.rb
+lib/jdbc_adapter/jdbc_hsqldb.rb
+lib/jdbc_adapter/jdbc_informix.rb
+lib/jdbc_adapter/jdbc_mimer.rb
+lib/jdbc_adapter/jdbc_mssql.rb
+lib/jdbc_adapter/jdbc_mysql.rb
+lib/jdbc_adapter/jdbc_oracle.rb
+lib/jdbc_adapter/jdbc_postgre.rb
+lib/jdbc_adapter/jdbc_sqlite3.rb
+lib/jdbc_adapter/jdbc_sybase.rb
+lib/jdbc_adapter/missing_functionality_helper.rb
+lib/jdbc_adapter/rake_tasks.rb
+lib/jdbc_adapter/tsql_helper.rb
+lib/jdbc_adapter/version.rb
+lib/jdbc_adapter.rb
+lib/jdbc_adapter/jdbc_adapter_internal.jar
+test/activerecord/connection_adapters/type_conversion_test.rb
+test/activerecord/connections/native_jdbc_mysql/connection.rb
+test/cachedb_simple_test.rb
+test/db/cachedb.rb
+test/db/db2.rb
+test/db/derby.rb
+test/db/h2.rb
+test/db/hsqldb.rb
+test/db/informix.rb
+test/db/jdbc.rb
+test/db/jndi_config.rb
+test/db/logger.rb
+test/db/mssql.rb
+test/db/mysql.rb
+test/db/oracle.rb
+test/db/postgres.rb
+test/db/sqlite3.rb
+test/db2_simple_test.rb
+test/derby_multibyte_test.rb
+test/derby_simple_test.rb
+test/generic_jdbc_connection_test.rb
+test/h2_simple_test.rb
+test/has_many_through.rb
+test/hsqldb_simple_test.rb
+test/informix_simple_test.rb
+test/jdbc_adapter/jdbc_db2_test.rb
+test/jdbc_adapter/jdbc_sybase_test.rb
+test/jdbc_common.rb
+test/jndi_callbacks_test.rb
+test/jndi_test.rb
+test/manualTestDatabase.rb
+test/minirunit/testConnect.rb
+test/minirunit/testH2.rb
+test/minirunit/testHsqldb.rb
+test/minirunit/testLoadActiveRecord.rb
+test/minirunit/testMysql.rb
+test/minirunit/testRawSelect.rb
+test/minirunit.rb
+test/models/add_not_null_column_to_table.rb
+test/models/auto_id.rb
+test/models/data_types.rb
+test/models/entry.rb
+test/models/reserved_word.rb
+test/mssql_simple_test.rb
+test/mysql_multibyte_test.rb
+test/mysql_simple_test.rb
+test/oracle_simple_test.rb
+test/postgres_reserved_test.rb
+test/postgres_simple_test.rb
+test/simple.rb
+test/sqlite3_simple_test.rb
+lib/jdbc_adapter/jdbc.rake
+src/java/jdbc_adapter/JdbcAdapterInternalService.java
+src/java/jdbc_adapter/JdbcConnectionFactory.java
+src/java/jdbc_adapter/JdbcDerbySpec.java
+src/java/jdbc_adapter/JdbcMySQLSpec.java
+src/java/jdbc_adapter/SQLBlock.java
--- /dev/null
+activerecord-jdbc-adapter is a database adapter for Rails' ActiveRecord
+component that can be used with JRuby[http://www.jruby.org/]. It allows use of
+virtually any JDBC-compliant database with your JRuby on Rails application.
+
+== Databases
+
+What's there, and what is not there:
+
+* MySQL - Complete support
+* PostgreSQL - Complete support
+* Oracle - Complete support
+* Microsoft SQL Server - Complete support except for change_column_default
+* DB2 - Complete, except for the migrations:
+ * change_column
+ * change_column_default
+ * remove_column
+ * rename_column
+ * add_index
+ * remove_index
+ * rename_table
+* FireBird - Complete, except for change_column_default and rename_column
+* Derby - Complete, except for:
+ * change_column
+ * change_column_default
+ * remove_column
+ * rename_column
+* HSQLDB - Complete
+* H2 - Complete
+* SQLite3 - work in progress
+* Informix - Fairly complete support, all tests pass and migrations appear to work. Comments welcome.
+
+Other databases will require testing and likely a custom configuration module.
+Please join the jruby-extras
+mailing-list[http://rubyforge.org/mail/?group_id=2014] to help us discover
+support for more databases.
+
+== Using ActiveRecord JDBC
+
+=== Inside Rails
+
+To use activerecord-jdbc-adapter with JRuby on Rails:
+
+1. Choose the adapter you wish to gem install. The following pre-packaged
+ adapters are available:
+
+ * base jdbc (<tt>activerecord-jdbc-adapter</tt>). Supports all available databases via JDBC, but requires you to download and manually install the database vendor's JDBC driver .jar file.
+ * mysql (<tt>activerecord-jdbcmysql-adapter</tt>)
+ * postgresql (<tt>activerecord-jdbcpostgresql-adapter</tt>)
+ * derby (<tt>activerecord-jdbcderby-adapter</tt>)
+ * hsqldb (<tt>activerecord-jdbchsqldb-adapter</tt>)
+ * h2 (<tt>activerecord-jdbch2-adapter</tt>)
+
+2. If you're using Rails 2.0, you may skip to the next step. For Rails prior to
+ version 2.0, you'll need to add one-time setup to your config/environment.rb
+ file in your Rails application. Add the following lines just before the
+ <code>Rails::Initializer</code>. (If you're using activerecord-jdbc-adapter
+ under the old gem name used in versions 0.5 and earlier (ActiveRecord-JDBC),
+ replace 'activerecord-jdbc-adapter' with 'ActiveRecord-JDBC' below.)
+
+ if RUBY_PLATFORM =~ /java/
+ require 'rubygems'
+ gem 'activerecord-jdbc-adapter'
+ require 'jdbc_adapter'
+ end
+
+3. Configure your database.yml to use the <code>jdbc</code> adapter. For mysql,
+ postgres, derby, oracle, hsqldb, h2, and informix you can simply configure
+ the database in the normal Rails style. If you use one of the convenience
+ 'activerecord-jdbcXXX-adapter' adapters, be sure and put a 'jdbc' prefix in
+ front of the databas adapter name as below.
+
+ development:
+ adapter: jdbcmysql
+ username: blog
+ password:
+ hostname: localhost
+ database: weblog_development
+
+For other databases, you'll need to know the database driver class and URL.
+Example:
+
+ development:
+ adapter: jdbc
+ username: blog
+ password:
+ driver: com.mysql.jdbc.Driver
+ url: jdbc:mysql://localhost:3306/weblog_development
+
+=== Standalone, with ActiveRecord
+
+1. Install the gem with JRuby:
+
+ jruby -S gem install activerecord-jdbc-adapter
+
+ If you wish to use the adapter for a specific database, you can install it
+ directly and a driver gem will be installed as well:
+
+ jruby -S gem install activerecord-jdbcderby-adapter
+
+2. If using ActiveRecord 2.0 (Rails 2.0) or greater, you can skip to the next
+ step. Otherwise, ensure the following code gets executed in your script:
+
+ require 'rubygems'
+ gem 'activerecord-jdbc-adapter'
+ require 'jdbc_adapter'
+ require 'active_record'
+
+3. After this you can establish a JDBC connection like this:
+
+ ActiveRecord::Base.establish_connection(
+ :adapter => 'jdbcderby',
+ :database => "db/my-database"
+ )
+
+ or like this (but requires that you manually put the driver jar on the classpath):
+
+ ActiveRecord::Base.establish_connection(
+ :adapter => 'jdbc',
+ :driver => 'org.apache.derby.jdbc.EmbeddedDriver',
+ :url => 'jdbc:derby:test_ar;create=true'
+ )
+
+== Getting the source
+
+The source for activerecord-jdbc-adapter is available using git.
+
+ git clone git://github.com/nicksieger/activerecord-jdbc-adapter.git
+
+== Running AR-JDBC's Tests
+
+Drivers for 4 open-source databases are included. Provided you have MySQL
+installed, you can simply type <tt>jruby -S rake</tt> to run the tests. A
+database named <tt>weblog_development</tt> is needed beforehand with a
+connection user of "blog" and password empty.
+
+== Authors
+
+This project was written by Nick Sieger <nick@nicksieger.com> and Ola Bini
+<olabini@gmail.com> with lots of help from the JRuby community.
+
+== License
+
+activerecord-jdbc-adapter is released under a BSD license. See the LICENSE file
+included with the distribution for details.
+
+Open-source driver gems for activerecord-jdbc-adapter are licensed under the
+same license the database's drivers are licensed. See each driver gem's
+LICENSE.txt file for details.
--- /dev/null
+require 'rake'
+require 'rake/testtask'
+
+task :default => [:java_compile, :test]
+
+def java_classpath_arg # myriad of ways to discover JRuby classpath
+ begin
+ cpath = Java::java.lang.System.getProperty('java.class.path').split(File::PATH_SEPARATOR)
+ cpath += Java::java.lang.System.getProperty('sun.boot.class.path').split(File::PATH_SEPARATOR)
+ jruby_cpath = cpath.compact.join(File::PATH_SEPARATOR)
+ rescue => e
+ end
+ unless jruby_cpath
+ jruby_cpath = ENV['JRUBY_PARENT_CLASSPATH'] || ENV['JRUBY_HOME'] &&
+ FileList["#{ENV['JRUBY_HOME']}/lib/*.jar"].join(File::PATH_SEPARATOR)
+ end
+ jruby_cpath ? "-cp \"#{jruby_cpath}\"" : ""
+end
+
+desc "Compile the native Java code."
+task :java_compile do
+ pkg_classes = File.join(*%w(pkg classes))
+ jar_name = File.join(*%w(lib jdbc_adapter jdbc_adapter_internal.jar))
+ mkdir_p pkg_classes
+ sh "javac -target 1.5 -source 1.5 -d pkg/classes #{java_classpath_arg} #{FileList['src/java/**/*.java'].join(' ')}"
+ sh "jar cf #{jar_name} -C #{pkg_classes} ."
+end
+file "lib/jdbc_adapter/jdbc_adapter_internal.jar" => :java_compile
+
+task :filelist do
+ puts FileList['pkg/**/*'].inspect
+end
+
+if RUBY_PLATFORM =~ /java/
+ # TODO: add more databases into the standard tests here.
+ task :test => [:test_mysql, :test_jdbc, :test_derby, :test_hsqldb, :test_h2, :test_sqlite3]
+else
+ task :test => [:test_mysql]
+end
+
+FileList['drivers/*'].each do |d|
+ next unless File.directory?(d)
+ driver = File.basename(d)
+ Rake::TestTask.new("test_#{driver}") do |t|
+ files = FileList["test/#{driver}*test.rb"]
+ if driver == "derby"
+ files << 'test/activerecord/connection_adapters/type_conversion_test.rb'
+ end
+ t.ruby_opts << "-rjdbc/#{driver}"
+ t.test_files = files
+ t.libs << "test" << "#{d}/lib"
+ end
+end
+
+Rake::TestTask.new(:test_jdbc) do |t|
+ t.test_files = FileList['test/generic_jdbc_connection_test.rb', 'test/jndi_callbacks_test.rb']
+ t.libs << 'test' << 'drivers/mysql/lib'
+end
+
+Rake::TestTask.new(:test_jndi) do |t|
+ t.test_files = FileList['test/jndi_test.rb']
+ t.libs << 'test' << 'drivers/derby/lib'
+end
+
+task :test_postgresql => [:test_postgres]
+task :test_pgsql => [:test_postgres]
+
+# Ensure oracle driver is on your classpath before launching rake
+Rake::TestTask.new(:test_oracle) do |t|
+ t.test_files = FileList['test/oracle_simple_test.rb']
+ t.libs << 'test'
+end
+
+# Ensure DB2 driver is on your classpath before launching rake
+Rake::TestTask.new(:test_db2) do |t|
+ t.test_files = FileList['test/db2_simple_test.rb']
+ t.libs << 'test'
+end
+
+# Ensure InterSystems CacheDB driver is on your classpath before launching rake
+Rake::TestTask.new(:test_cachedb) do | t |
+ t.test_files = FileList[ 'test/cachedb_simple_test.rb' ]
+ t.libs << 'test'
+end
+
+# Ensure that the jTDS driver in on your classpath before launching rake
+Rake::TestTask.new(:test_mssql) do | t |
+ t.test_files = FileList[ 'test/mssql_simple_test.rb' ]
+ t.libs << 'test'
+end
+
+# Ensure that the Informix driver is on your classpath before launching rake
+Rake::TestTask.new(:test_informix) do |t|
+ t.test_files = FileList[ 'test/informix_simple_test.rb' ]
+ t.libs << 'test'
+end
+
+# Tests for JDBC adapters that don't require a database.
+Rake::TestTask.new(:test_jdbc_adapters) do | t |
+ t.test_files = FileList[ 'test/jdbc_adapter/jdbc_sybase_test.rb' ]
+ t.libs << 'test'
+end
+
+MANIFEST = FileList["History.txt", "Manifest.txt", "README.txt",
+ "Rakefile", "LICENSE.txt", "lib/**/*.rb", "lib/jdbc_adapter/jdbc_adapter_internal.jar", "test/**/*.rb",
+ "lib/**/*.rake", "src/**/*.java"]
+
+file "Manifest.txt" => :manifest
+task :manifest do
+ File.open("Manifest.txt", "w") {|f| MANIFEST.each {|n| f << "#{n}\n"} }
+end
+Rake::Task['manifest'].invoke # Always regen manifest, so Hoe has up-to-date list of files
+
+require File.dirname(__FILE__) + "/lib/jdbc_adapter/version"
+begin
+ require 'hoe'
+ Hoe.new("activerecord-jdbc-adapter", JdbcAdapter::Version::VERSION) do |p|
+ p.rubyforge_name = "jruby-extras"
+ p.url = "http://jruby-extras.rubyforge.org/activerecord-jdbc-adapter"
+ p.author = "Nick Sieger, Ola Bini and JRuby contributors"
+ p.email = "nick@nicksieger.com, ola.bini@gmail.com"
+ p.summary = "JDBC adapter for ActiveRecord, for use within JRuby on Rails."
+ p.changes = p.paragraphs_of('History.txt', 0..1).join("\n\n")
+ p.description = p.paragraphs_of('README.txt', 0...1).join("\n\n")
+ end.spec.dependencies.delete_if { |dep| dep.name == "hoe" }
+rescue LoadError
+ puts "You really need Hoe installed to be able to package this gem"
+rescue => e
+ puts "ignoring error while loading hoe: #{e.to_s}"
+end
+
+def rake(*args)
+ ruby "-S", "rake", *args
+end
+
+%w(test package install_gem release clean).each do |task|
+ desc "Run rake #{task} on all available adapters and drivers"
+ task "all:#{task}" => task
+end
+
+(Dir["drivers/*/Rakefile"] + Dir["adapters/*/Rakefile"]).each do |rakefile|
+ dir = File.dirname(rakefile)
+ prefix = dir.sub(%r{/}, ':')
+ tasks = %w(package install_gem debug_gem clean)
+ tasks << "test" if File.directory?(File.join(dir, "test"))
+ tasks.each do |task|
+ desc "Run rake #{task} on #{dir}"
+ task "#{prefix}:#{task}" do
+ Dir.chdir(dir) do
+ rake task
+ end
+ end
+ task "#{File.dirname(dir)}:#{task}" => "#{prefix}:#{task}"
+ task "all:#{task}" => "#{prefix}:#{task}"
+ end
+ desc "Run rake release on #{dir}"
+ task "#{prefix}:release" do
+ Dir.chdir(dir) do
+ version = nil
+ if dir =~ /adapters/
+ version = ENV['VERSION']
+ else
+ Dir["lib/**/*.rb"].each do |file|
+ version ||= File.open(file) {|f| f.read =~ /VERSION = "([^"]+)"/ && $1}
+ end
+ end
+ rake "release", "VERSION=#{version}"
+ end
+ end
+ # Only release adapters synchronously with main release. Drivers are versioned
+ # according to their JDBC driver versions.
+ if dir =~ /adapters/
+ task "adapters:release" => "#{prefix}:release"
+ task "all:release" => "#{prefix}:release"
+ end
+end
+
+require 'rake/clean'
+CLEAN.include 'derby*', 'test.db.*','test/reports', 'test.sqlite3','lib/**/*.jar','manifest.mf', '*.log'
--- /dev/null
+require 'active_record/connection_adapters/jdbc_adapter'
--- /dev/null
+tried_gem = false
+begin
+ require "jdbc/derby"
+rescue LoadError
+ unless tried_gem
+ require 'rubygems'
+ gem "jdbc-derby"
+ tried_gem = true
+ retry
+ end
+ # trust that the derby jar is already present
+end
+require 'active_record/connection_adapters/jdbc_adapter'
\ No newline at end of file
--- /dev/null
+require 'active_record/connection_adapters/jdbc_adapter'
\ No newline at end of file
--- /dev/null
+tried_gem = false
+begin
+ require "jdbc/hsqldb"
+rescue LoadError
+ unless tried_gem
+ require 'rubygems'
+ gem "jdbc-hsqldb"
+ tried_gem = true
+ retry
+ end
+ # trust that the hsqldb jar is already present
+end
+require 'active_record/connection_adapters/jdbc_adapter'
\ No newline at end of file
--- /dev/null
+require 'active_record/connection_adapters/jdbc_adapter'
--- /dev/null
+require 'active_record/connection_adapters/abstract_adapter'
+require 'java'
+require 'active_record/connection_adapters/jdbc_adapter_spec'
+require 'jdbc_adapter/jdbc_adapter_internal'
+require 'bigdecimal'
+
+begin
+ require 'jdbc_adapter/rake_tasks'
+rescue LoadError
+end if defined?(RAILS_ROOT)
+
+module ActiveRecord
+ module ConnectionAdapters # :nodoc:
+ module SchemaStatements
+ # The original implementation of this had a bug, which modifies native_database_types.
+ # This version allows us to cache that value.
+ def type_to_sql(type, limit = nil, precision = nil, scale = nil) #:nodoc:
+ native = native_database_types[type.to_s.downcase.to_sym]
+ column_type_sql = native.is_a?(Hash) ? native[:name] : native
+ if type == :decimal # ignore limit, use precison and scale
+ precision ||= native[:precision]
+ scale ||= native[:scale]
+ if precision
+ if scale
+ column_type_sql += "(#{precision},#{scale})"
+ else
+ column_type_sql += "(#{precision})"
+ end
+ else
+ raise ArgumentError, "Error adding decimal column: precision cannot be empty if scale if specified" if scale
+ end
+ column_type_sql
+ else
+ limit ||= native[:limit]
+ column_type_sql += "(#{limit})" if limit
+ column_type_sql
+ end
+ end
+ end
+ end
+end
+
+module JdbcSpec
+ module ActiveRecordExtensions
+ def jdbc_connection(config)
+ connection = ::ActiveRecord::ConnectionAdapters::JdbcConnection.new(config)
+ ::ActiveRecord::ConnectionAdapters::JdbcAdapter.new(connection, logger, config)
+ end
+ alias jndi_connection jdbc_connection
+
+ def embedded_driver(config)
+ config[:username] ||= "sa"
+ config[:password] ||= ""
+ jdbc_connection(config)
+ end
+ end
+end
+
+module ActiveRecord
+ class Base
+ extend JdbcSpec::ActiveRecordExtensions
+
+ alias :attributes_with_quotes_pre_oracle :attributes_with_quotes
+ def attributes_with_quotes(include_primary_key = true, *args) #:nodoc:
+ aq = attributes_with_quotes_pre_oracle(include_primary_key, *args)
+ if connection.class == ConnectionAdapters::JdbcAdapter && (connection.is_a?(JdbcSpec::Oracle) || connection.is_a?(JdbcSpec::Mimer))
+ aq[self.class.primary_key] = "?" if include_primary_key && aq[self.class.primary_key].nil?
+ end
+ aq
+ end
+ end
+
+ module ConnectionAdapters
+ module Java
+ Class = java.lang.Class
+ URL = java.net.URL
+ URLClassLoader = java.net.URLClassLoader
+ end
+
+ module Jdbc
+ Mutex = java.lang.Object.new
+ DriverManager = java.sql.DriverManager
+ Statement = java.sql.Statement
+ Types = java.sql.Types
+
+ # some symbolic constants for the benefit of the JDBC-based
+ # JdbcConnection#indexes method
+ module IndexMetaData
+ INDEX_NAME = 6
+ NON_UNIQUE = 4
+ TABLE_NAME = 3
+ COLUMN_NAME = 9
+ end
+
+ module TableMetaData
+ TABLE_CAT = 1
+ TABLE_SCHEM = 2
+ TABLE_NAME = 3
+ TABLE_TYPE = 4
+ end
+
+ module PrimaryKeyMetaData
+ COLUMN_NAME = 4
+ end
+
+ end
+
+ # I want to use JDBC's DatabaseMetaData#getTypeInfo to choose the best native types to
+ # use for ActiveRecord's Adapter#native_database_types in a database-independent way,
+ # but apparently a database driver can return multiple types for a given
+ # java.sql.Types constant. So this type converter uses some heuristics to try to pick
+ # the best (most common) type to use. It's not great, it would be better to just
+ # delegate to each database's existin AR adapter's native_database_types method, but I
+ # wanted to try to do this in a way that didn't pull in all the other adapters as
+ # dependencies. Suggestions appreciated.
+ class JdbcTypeConverter
+ # The basic ActiveRecord types, mapped to an array of procs that are used to #select
+ # the best type. The procs are used as selectors in order until there is only one
+ # type left. If all the selectors are applied and there is still more than one
+ # type, an exception will be raised.
+ AR_TO_JDBC_TYPES = {
+ :string => [ lambda {|r| Jdbc::Types::VARCHAR == r['data_type'].to_i},
+ lambda {|r| r['type_name'] =~ /^varchar/i},
+ lambda {|r| r['type_name'] =~ /^varchar$/i},
+ lambda {|r| r['type_name'] =~ /varying/i}],
+ :text => [ lambda {|r| [Jdbc::Types::LONGVARCHAR, Jdbc::Types::CLOB].include?(r['data_type'].to_i)},
+ lambda {|r| r['type_name'] =~ /^text$/i}, # For Informix
+ lambda {|r| r['type_name'] =~ /^(text|clob)$/i},
+ lambda {|r| r['type_name'] =~ /^character large object$/i},
+ lambda {|r| r['sql_data_type'] == 2005}],
+ :integer => [ lambda {|r| Jdbc::Types::INTEGER == r['data_type'].to_i},
+ lambda {|r| r['type_name'] =~ /^integer$/i},
+ lambda {|r| r['type_name'] =~ /^int4$/i},
+ lambda {|r| r['type_name'] =~ /^int$/i}],
+ :decimal => [ lambda {|r| Jdbc::Types::DECIMAL == r['data_type'].to_i},
+ lambda {|r| r['type_name'] =~ /^decimal$/i},
+ lambda {|r| r['type_name'] =~ /^numeric$/i},
+ lambda {|r| r['type_name'] =~ /^number$/i},
+ lambda {|r| r['type_name'] =~ /^real$/i},
+ lambda {|r| r['precision'] == '38'},
+ lambda {|r| r['data_type'] == '2'}],
+ :float => [ lambda {|r| [Jdbc::Types::FLOAT,Jdbc::Types::DOUBLE, Jdbc::Types::REAL].include?(r['data_type'].to_i)},
+ lambda {|r| r['data_type'].to_i == Jdbc::Types::REAL}, #Prefer REAL to DOUBLE for Postgresql
+ lambda {|r| r['type_name'] =~ /^float/i},
+ lambda {|r| r['type_name'] =~ /^double$/i},
+ lambda {|r| r['type_name'] =~ /^real$/i},
+ lambda {|r| r['precision'] == '15'}],
+ :datetime => [ lambda {|r| Jdbc::Types::TIMESTAMP == r['data_type'].to_i},
+ lambda {|r| r['type_name'] =~ /^datetime$/i},
+ lambda {|r| r['type_name'] =~ /^timestamp$/i},
+ lambda {|r| r['type_name'] =~ /^date/i},
+ lambda {|r| r['type_name'] =~ /^integer/i}], #Num of milliseconds for SQLite3 JDBC Driver
+ :timestamp => [ lambda {|r| Jdbc::Types::TIMESTAMP == r['data_type'].to_i},
+ lambda {|r| r['type_name'] =~ /^timestamp$/i},
+ lambda {|r| r['type_name'] =~ /^datetime/i},
+ lambda {|r| r['type_name'] =~ /^date/i},
+ lambda {|r| r['type_name'] =~ /^integer/i}], #Num of milliseconds for SQLite3 JDBC Driver
+ :time => [ lambda {|r| Jdbc::Types::TIME == r['data_type'].to_i},
+ lambda {|r| r['type_name'] =~ /^time$/i},
+ lambda {|r| r['type_name'] =~ /^datetime/i}, # For Informix
+ lambda {|r| r['type_name'] =~ /^date/i},
+ lambda {|r| r['type_name'] =~ /^integer/i}], #Num of milliseconds for SQLite3 JDBC Driver
+ :date => [ lambda {|r| Jdbc::Types::DATE == r['data_type'].to_i},
+ lambda {|r| r['type_name'] =~ /^date$/i},
+ lambda {|r| r['type_name'] =~ /^date/i},
+ lambda {|r| r['type_name'] =~ /^integer/i}], #Num of milliseconds for SQLite3 JDBC Driver3
+ :binary => [ lambda {|r| [Jdbc::Types::LONGVARBINARY,Jdbc::Types::BINARY,Jdbc::Types::BLOB].include?(r['data_type'].to_i)},
+ lambda {|r| r['type_name'] =~ /^blob/i},
+ lambda {|r| r['type_name'] =~ /sub_type 0$/i}, # For FireBird
+ lambda {|r| r['type_name'] =~ /^varbinary$/i}, # We want this sucker for Mimer
+ lambda {|r| r['type_name'] =~ /^binary$/i}, ],
+ :boolean => [ lambda {|r| [Jdbc::Types::TINYINT].include?(r['data_type'].to_i)},
+ lambda {|r| r['type_name'] =~ /^bool/i},
+ lambda {|r| r['data_type'] == '-7'},
+ lambda {|r| r['type_name'] =~ /^tinyint$/i},
+ lambda {|r| r['type_name'] =~ /^decimal$/i},
+ lambda {|r| r['type_name'] =~ /^integer$/i}]
+ }
+
+ def initialize(types)
+ @types = types
+ @types.each {|t| t['type_name'] ||= t['local_type_name']} # Sybase driver seems to want 'local_type_name'
+ end
+
+ def choose_best_types
+ type_map = {}
+ @types.each do |row|
+ name = row['type_name'].downcase
+ k = name.to_sym
+ type_map[k] = { :name => name }
+ type_map[k][:limit] = row['precision'].to_i if row['precision']
+ end
+
+ AR_TO_JDBC_TYPES.keys.each do |k|
+ typerow = choose_type(k)
+ type_map[k] = { :name => typerow['type_name'].downcase }
+ case k
+ when :integer, :string, :decimal
+ type_map[k][:limit] = typerow['precision'] && typerow['precision'].to_i
+ when :boolean
+ type_map[k][:limit] = 1
+ end
+ end
+ type_map
+ end
+
+ def choose_type(ar_type)
+ procs = AR_TO_JDBC_TYPES[ar_type]
+ types = @types
+ procs.each do |p|
+ new_types = types.select(&p)
+ new_types = new_types.inject([]) do |typs,t|
+ typs << t unless typs.detect {|el| el['type_name'] == t['type_name']}
+ typs
+ end
+ return new_types.first if new_types.length == 1
+ types = new_types if new_types.length > 0
+ end
+ raise "unable to choose type for #{ar_type} from:\n#{types.collect{|t| t['type_name']}.inspect}"
+ end
+ end
+
+ class JdbcDriver
+ def initialize(name)
+ @name = name
+ end
+
+ def driver_class
+ @driver_class ||= begin
+ driver_class_const = (@name[0...1].capitalize + @name[1..@name.length]).gsub(/\./, '_')
+ Jdbc::Mutex.synchronized do
+ unless Jdbc.const_defined?(driver_class_const)
+ driver_class_name = @name
+ Jdbc.module_eval do
+ include_class(driver_class_name) { driver_class_const }
+ end
+ end
+ end
+ driver_class = Jdbc.const_get(driver_class_const)
+ raise "You specify a driver for your JDBC connection" unless driver_class
+ driver_class
+ end
+ end
+
+ def load
+ Jdbc::DriverManager.registerDriver(create)
+ end
+
+ def connection(url, user, pass)
+ Jdbc::DriverManager.getConnection(url, user, pass)
+ rescue
+ # bypass DriverManager to get around problem with dynamically loaded jdbc drivers
+ props = java.util.Properties.new
+ props.setProperty("user", user)
+ props.setProperty("password", pass)
+ create.connect(url, props)
+ end
+
+ def create
+ driver_class.new
+ end
+ end
+
+ class JdbcColumn < Column
+ attr_writer :limit, :precision
+
+ COLUMN_TYPES = ::JdbcSpec.constants.map{|c|
+ ::JdbcSpec.const_get c }.select{ |c|
+ c.respond_to? :column_selector }.map{|c|
+ c.column_selector }.inject({}) { |h,val|
+ h[val[0]] = val[1]; h }
+
+ def initialize(config, name, default, *args)
+ dialect = config[:dialect] || config[:driver]
+ for reg, func in COLUMN_TYPES
+ if reg === dialect.to_s
+ func.call(config,self)
+ end
+ end
+ super(name,default_value(default),*args)
+ init_column(name, default, *args)
+ end
+
+ def init_column(*args)
+ end
+
+ def default_value(val)
+ val
+ end
+ end
+
+ include_class "jdbc_adapter.JdbcConnectionFactory"
+
+ class JdbcConnection
+ attr_reader :adapter, :connection_factory
+
+ def initialize(config)
+ @config = config.symbolize_keys!
+ @config[:retry_count] ||= 5
+ @config[:connection_alive_sql] ||= "select 1"
+ if @config[:jndi]
+ begin
+ configure_jndi
+ rescue => e
+ warn "JNDI data source unavailable: #{e.message}; trying straight JDBC"
+ configure_jdbc
+ end
+ else
+ configure_jdbc
+ end
+ connection # force the connection to load
+ set_native_database_types
+ @stmts = {}
+ rescue Exception => e
+ raise "The driver encountered an error: #{e}"
+ end
+
+ def adapter=(adapt)
+ @adapter = adapt
+ @tps = {}
+ @native_types.each_pair {|k,v| @tps[k] = v.inject({}) {|memo,kv| memo.merge({kv.first => (kv.last.dup rescue kv.last)})}}
+ adapt.modify_types(@tps)
+ end
+
+ # Default JDBC introspection for index metadata on the JdbcConnection.
+ # This is currently used for migrations by JdbcSpec::HSQDLB and JdbcSpec::Derby
+ # indexes with a little filtering tacked on.
+ #
+ # JDBC index metadata is denormalized (multiple rows may be returned for
+ # one index, one row per column in the index), so a simple block-based
+ # filter like that used for tables doesn't really work here. Callers
+ # should filter the return from this method instead.
+ def indexes(table_name, name = nil, schema_name = nil)
+ with_connection_retry_guard do |conn|
+ metadata = conn.getMetaData
+ begin
+ unless String === table_name
+ table_name = table_name.to_s
+ else
+ table_name = table_name.dup
+ end
+ table_name.upcase! if metadata.storesUpperCaseIdentifiers
+ table_name.downcase! if metadata.storesLowerCaseIdentifiers
+ resultset = metadata.getIndexInfo(nil, schema_name, table_name, false, false)
+ primary_keys = primary_keys(table_name)
+ indexes = []
+ current_index = nil
+ while resultset.next
+ index_name = resultset.get_string(Jdbc::IndexMetaData::INDEX_NAME)
+ next unless index_name
+ index_name.downcase!
+ column_name = resultset.get_string(Jdbc::IndexMetaData::COLUMN_NAME).downcase
+
+ next if primary_keys.include? column_name
+
+ # We are working on a new index
+ if current_index != index_name
+ current_index = index_name
+ table_name = resultset.get_string(Jdbc::IndexMetaData::TABLE_NAME).downcase
+ non_unique = resultset.get_boolean(Jdbc::IndexMetaData::NON_UNIQUE)
+
+ # empty list for column names, we'll add to that in just a bit
+ indexes << IndexDefinition.new(table_name, index_name, !non_unique, [])
+ end
+
+ # One or more columns can be associated with an index
+ indexes.last.columns << column_name
+ end
+ resultset.close
+ indexes
+ ensure
+ metadata.close rescue nil
+ end
+ end
+ end
+
+ def jndi_connection?
+ @jndi_connection
+ end
+
+ private
+ def configure_jndi
+ jndi = @config[:jndi].to_s
+ ctx = javax.naming.InitialContext.new
+ ds = ctx.lookup(jndi)
+ @connection_factory = JdbcConnectionFactory.impl do
+ ds.connection
+ end
+ unless @config[:driver]
+ @config[:driver] = connection.meta_data.connection.java_class.name
+ end
+ @jndi_connection = true
+ end
+
+ def configure_jdbc
+ driver = @config[:driver].to_s
+ user = @config[:username].to_s
+ pass = @config[:password].to_s
+ url = @config[:url].to_s
+
+ unless driver && url
+ raise ::ActiveRecord::ConnectionFailed, "jdbc adapter requires driver class and url"
+ end
+
+ if driver =~ /mysql/i && url !~ /#{Regexp.quote(JdbcSpec::MySQL::URL_OPTIONS)}/
+ div = url =~ /\?/ ? '&' : '?'
+ url = "#{url}#{div}#{JdbcSpec::MySQL::URL_OPTIONS}"
+ @config[:url] = url
+ end
+
+ jdbc_driver = JdbcDriver.new(driver)
+ jdbc_driver.load
+ @connection_factory = JdbcConnectionFactory.impl do
+ jdbc_driver.connection(url, user, pass)
+ end
+ end
+ end
+
+ class JdbcAdapter < AbstractAdapter
+ module ShadowCoreMethods
+ def alias_chained_method(meth, feature, target)
+ if instance_methods.include?("#{meth}_without_#{feature}")
+ alias_method "#{meth}_without_#{feature}".to_sym, target
+ else
+ alias_method meth, target
+ end
+ end
+ end
+
+ module CompatibilityMethods
+ def self.needed?(base)
+ !base.instance_methods.include?("quote_table_name")
+ end
+
+ def quote_table_name(name)
+ quote_column_name(name)
+ end
+ end
+
+ module ConnectionPoolCallbacks
+ def self.included(base)
+ base.checkin :on_checkin
+ base.checkout :on_checkout
+ end
+
+ def self.needed?
+ ActiveRecord::Base.respond_to?(:connection_pool)
+ end
+
+ def on_checkin
+ # default implementation does nothing
+ end
+
+ def on_checkout
+ # default implementation does nothing
+ end
+ end
+
+ module JndiConnectionPoolCallbacks
+ def self.prepare(adapter, conn)
+ if ActiveRecord::Base.respond_to?(:connection_pool) && conn.jndi_connection?
+ adapter.extend self
+ conn.disconnect! # disconnect initial connection in JdbcConnection#initialize
+ end
+ end
+
+ def on_checkin
+ disconnect!
+ end
+
+ def on_checkout
+ reconnect!
+ end
+ end
+
+ extend ShadowCoreMethods
+ include CompatibilityMethods if CompatibilityMethods.needed?(self)
+ include ConnectionPoolCallbacks if ConnectionPoolCallbacks.needed?
+
+ attr_reader :config
+
+ ADAPTER_TYPES = ::JdbcSpec.constants.map{|c|
+ ::JdbcSpec.const_get c }.select{ |c|
+ c.respond_to? :adapter_selector }.map{|c|
+ c.adapter_selector }.inject({}) { |h,val|
+ h[val[0]] = val[1]; h }
+
+ def initialize(connection, logger, config)
+ super(connection, logger)
+ @config = config
+ dialect = config[:dialect] || config[:driver]
+ for reg, func in ADAPTER_TYPES
+ if reg === dialect.to_s
+ func.call(@config,self)
+ end
+ end
+ connection.adapter = self
+ JndiConnectionPoolCallbacks.prepare(self, connection)
+ end
+
+ def modify_types(tp)
+ tp
+ end
+
+ def adapter_name #:nodoc:
+ 'JDBC'
+ end
+
+ def supports_migrations?
+ true
+ end
+
+ def native_database_types #:nodoc:
+ @connection.native_database_types
+ end
+
+ def database_name #:nodoc:
+ @connection.database_name
+ end
+
+ def native_sql_to_type(tp)
+ if /^(.*?)\(([0-9]+)\)/ =~ tp
+ tname = $1
+ limit = $2.to_i
+ ntype = native_database_types
+ if ntype[:primary_key] == tp
+ return :primary_key,nil
+ else
+ ntype.each do |name,val|
+ if name == :primary_key
+ next
+ end
+ if val[:name].downcase == tname.downcase && (val[:limit].nil? || val[:limit].to_i == limit)
+ return name,limit
+ end
+ end
+ end
+ elsif /^(.*?)/ =~ tp
+ tname = $1
+ ntype = native_database_types
+ if ntype[:primary_key] == tp
+ return :primary_key,nil
+ else
+ ntype.each do |name,val|
+ if val[:name].downcase == tname.downcase && val[:limit].nil?
+ return name,nil
+ end
+ end
+ end
+ else
+ return :string,255
+ end
+ return nil,nil
+ end
+
+ def reconnect!
+ @connection.reconnect!
+ @connection
+ end
+
+ def disconnect!
+ @connection.disconnect!
+ end
+
+ def jdbc_select_all(sql, name = nil)
+ select(sql, name)
+ end
+ alias_chained_method :select_all, :query_cache, :jdbc_select_all
+
+ def select_rows(sql, name = nil)
+ rows = []
+ select(sql, name).each {|row| rows << row.values }
+ rows
+ end
+
+ def select_one(sql, name = nil)
+ select(sql, name).first
+ end
+
+ def execute(sql, name = nil)
+ log(sql, name) do
+ _execute(sql,name)
+ end
+ end
+
+ # we need to do it this way, to allow Rails stupid tests to always work
+ # even if we define a new execute method. Instead of mixing in a new
+ # execute, an _execute should be mixed in.
+ def _execute(sql, name = nil)
+ if JdbcConnection::select?(sql)
+ @connection.execute_query(sql)
+ elsif JdbcConnection::insert?(sql)
+ @connection.execute_insert(sql)
+ else
+ @connection.execute_update(sql)
+ end
+ end
+
+ def jdbc_update(sql, name = nil) #:nodoc:
+ execute(sql, name)
+ end
+ alias_chained_method :update, :query_dirty, :jdbc_update
+
+ def jdbc_insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)
+ id = execute(sql, name = nil)
+ id_value || id
+ end
+ alias_chained_method :insert, :query_dirty, :jdbc_insert
+
+ def jdbc_columns(table_name, name = nil)
+ @connection.columns(table_name.to_s)
+ end
+ alias_chained_method :columns, :query_cache, :jdbc_columns
+
+ def tables
+ @connection.tables
+ end
+
+ def indexes(table_name, name = nil, schema_name = nil)
+ @connection.indexes(table_name, name, schema_name)
+ end
+
+ def begin_db_transaction
+ @connection.begin
+ end
+
+ def commit_db_transaction
+ @connection.commit
+ end
+
+ def rollback_db_transaction
+ @connection.rollback
+ end
+
+ def write_large_object(*args)
+ @connection.write_large_object(*args)
+ end
+
+ private
+ def select(sql, name=nil)
+ execute(sql,name)
+ end
+ end
+ end
+end
--- /dev/null
+
+require 'jdbc_adapter/jdbc_mimer'
+require 'jdbc_adapter/jdbc_hsqldb'
+require 'jdbc_adapter/jdbc_oracle'
+require 'jdbc_adapter/jdbc_postgre'
+require 'jdbc_adapter/jdbc_mysql'
+require 'jdbc_adapter/jdbc_derby'
+require 'jdbc_adapter/jdbc_firebird'
+require 'jdbc_adapter/jdbc_db2'
+require 'jdbc_adapter/jdbc_mssql'
+require 'jdbc_adapter/jdbc_cachedb'
+require 'jdbc_adapter/jdbc_sqlite3'
+require 'jdbc_adapter/jdbc_sybase'
+require 'jdbc_adapter/jdbc_informix'
--- /dev/null
+require 'active_record/connection_adapters/jdbc_adapter'
\ No newline at end of file
--- /dev/null
+tried_gem = false
+begin
+ require "jdbc/mysql"
+rescue LoadError
+ unless tried_gem
+ require 'rubygems'
+ gem "jdbc-mysql"
+ tried_gem = true
+ retry
+ end
+ # trust that the mysql jar is already present
+end
+require 'active_record/connection_adapters/jdbc_adapter'
--- /dev/null
+require 'active_record/connection_adapters/jdbc_adapter'
\ No newline at end of file
--- /dev/null
+tried_gem = false
+begin
+ require "jdbc/postgres"
+rescue LoadError
+ unless tried_gem
+ require 'rubygems'
+ gem "jdbc-postgres"
+ tried_gem = true
+ retry
+ end
+ # trust that the postgres jar is already present
+end
+require 'active_record/connection_adapters/jdbc_adapter'
\ No newline at end of file
--- /dev/null
+tried_gem = false
+begin
+ require "jdbc/sqlite3"
+rescue LoadError
+ unless tried_gem
+ require 'rubygems'
+ gem "jdbc-sqlite3"
+ tried_gem = true
+ retry
+ end
+ # trust that the sqlite jar is already present
+end
+require 'active_record/connection_adapters/jdbc_adapter'
--- /dev/null
+if RUBY_PLATFORM =~ /java/
+ begin
+ tried_gem ||= false
+ require 'active_record/version'
+ rescue LoadError
+ raise if tried_gem
+ require 'rubygems'
+ gem 'activerecord'
+ tried_gem = true
+ retry
+ end
+ if ActiveRecord::VERSION::MAJOR < 2
+ if defined?(RAILS_CONNECTION_ADAPTERS)
+ RAILS_CONNECTION_ADAPTERS << %q(jdbc)
+ else
+ RAILS_CONNECTION_ADAPTERS = %w(jdbc)
+ end
+ if ActiveRecord::VERSION::MAJOR == 1 && ActiveRecord::VERSION::MINOR == 14
+ require 'active_record/connection_adapters/jdbc_adapter'
+ end
+ else
+ require 'active_record'
+ require 'active_record/connection_adapters/jdbc_adapter'
+ end
+else
+ warn "ActiveRecord-JDBC is for use with JRuby only"
+end
--- /dev/null
+def redefine_task(*args, &block)
+ task_name = Hash === args.first ? args.first.keys[0] : args.first
+ existing_task = Rake.application.lookup task_name
+ if existing_task
+ class << existing_task; public :instance_variable_set; end
+ existing_task.instance_variable_set "@prerequisites", FileList[]
+ existing_task.instance_variable_set "@actions", []
+ end
+ task(*args, &block)
+end
+
+namespace :db do
+ if Rake::Task["db:create"]
+ redefine_task :create => :environment do
+ create_database(ActiveRecord::Base.configurations[RAILS_ENV])
+ end
+
+ class << self; alias_method :previous_create_database, :create_database; end
+ def create_database(config)
+ begin
+ ActiveRecord::Base.establish_connection(config)
+ ActiveRecord::Base.connection
+ rescue
+ begin
+ url = config['url']
+ if url
+ if url =~ /^(.*\/)/
+ url = $1
+ end
+ end
+
+ ActiveRecord::Base.establish_connection(config.merge({'database' => nil, 'url' => url}))
+ ActiveRecord::Base.connection.create_database(config['database'])
+ ActiveRecord::Base.establish_connection(config)
+ rescue
+ previous_create_database(config)
+ end
+ end
+ end
+
+ redefine_task :drop => :environment do
+ config = ActiveRecord::Base.configurations[RAILS_ENV]
+ begin
+ ActiveRecord::Base.establish_connection(config)
+ db = ActiveRecord::Base.connection.database_name
+ ActiveRecord::Base.connection.drop_database(db)
+ rescue
+ drop_database(config)
+ end
+ end
+ end
+
+ namespace :structure do
+ redefine_task :dump => :environment do
+ abcs = ActiveRecord::Base.configurations
+ ActiveRecord::Base.establish_connection(abcs[RAILS_ENV])
+ File.open("db/#{RAILS_ENV}_structure.sql", "w+") { |f| f << ActiveRecord::Base.connection.structure_dump }
+ if ActiveRecord::Base.connection.supports_migrations?
+ File.open("db/#{RAILS_ENV}_structure.sql", "a") { |f| f << ActiveRecord::Base.connection.dump_schema_information }
+ end
+ end
+ end
+
+ namespace :test do
+ redefine_task :clone_structure => [ "db:structure:dump", "db:test:purge" ] do
+ abcs = ActiveRecord::Base.configurations
+ abcs['test']['pg_params'] = '?allowEncodingChanges=true' if abcs['test']['adapter'] =~ /postgresql/i
+ ActiveRecord::Base.establish_connection(abcs["test"])
+ ActiveRecord::Base.connection.execute('SET foreign_key_checks = 0') if abcs["test"]["adapter"] =~ /mysql/i
+ IO.readlines("db/#{RAILS_ENV}_structure.sql").join.split(";\n\n").each do |ddl|
+ ActiveRecord::Base.connection.execute(ddl.chomp(';'))
+ end
+ end
+
+ redefine_task :purge => :environment do
+ abcs = ActiveRecord::Base.configurations
+ config = abcs['test'].dup
+ if config['adapter'] =~ /postgresql/i
+ if config['url']
+ db = config['url'][/\/([^\/]*)$/, 1]
+ config['url'][/\/([^\/]*)$/, 1] if db_name
+ else
+ db = config['database']
+ config['database'] = 'postgres'
+ end
+ ActiveRecord::Base.establish_connection(config)
+ else
+ ActiveRecord::Base.establish_connection(config)
+ db = ActiveRecord::Base.connection.database_name
+ end
+ ActiveRecord::Base.connection.recreate_database(db)
+ end
+ end
+end
--- /dev/null
+require 'jdbc_adapter/tsql_helper'
+
+module ::JdbcSpec
+ module ActiveRecordExtensions
+ def cachedb_connection( config )
+ config[:port] ||= 1972
+ config[:url] ||= "jdbc:Cache://#{config[:host]}:#{config[:port]}/#{ config[:database]}"
+ config[:driver] ||= "com.intersys.jdbc.CacheDriver"
+ jdbc_connection( config )
+ end
+ end
+
+ module CacheDB
+ include TSqlMethods
+
+ def self.column_selector
+ [ /cache/i, lambda { | cfg, col | col.extend( ::JdbcSpec::CacheDB::Column ) } ]
+ end
+
+ def self.adapter_selector
+ [ /cache/i, lambda { | cfg, adapt | adapt.extend( ::JdbcSpec::CacheDB ) } ]
+ end
+
+ module Column
+ end
+
+ def create_table(name, options = { })
+ super(name, options)
+ primary_key = options[:primary_key] || "id"
+ execute "ALTER TABLE #{name} ADD CONSTRAINT #{name}_PK PRIMARY KEY(#{primary_key})" unless options[:id] == false
+ end
+ end
+end
--- /dev/null
+module JdbcSpec
+ module DB2
+ def self.column_selector
+ [/db2/i, lambda {|cfg,col|
+ if cfg[:url] =~ /^jdbc:derby:net:/
+ col.extend(::JdbcSpec::Derby::Column)
+ else
+ col.extend(::JdbcSpec::DB2::Column)
+ end }]
+ end
+
+ def self.adapter_selector
+ [/db2/i, lambda {|cfg,adapt|
+ if cfg[:url] =~ /^jdbc:derby:net:/
+ adapt.extend(::JdbcSpec::Derby)
+ else
+ adapt.extend(::JdbcSpec::DB2)
+ end }]
+ end
+
+ module Column
+ def type_cast(value)
+ return nil if value.nil? || value =~ /^\s*null\s*$/i
+ case type
+ when :string then value
+ when :integer then defined?(value.to_i) ? value.to_i : (value ? 1 : 0)
+ when :primary_key then defined?(value.to_i) ? value.to_i : (value ? 1 : 0)
+ when :float then value.to_f
+ when :datetime then cast_to_date_or_time(value)
+ when :timestamp then cast_to_time(value)
+ when :time then cast_to_time(value)
+ else value
+ end
+ end
+ def cast_to_date_or_time(value)
+ return value if value.is_a? Date
+ return nil if value.blank?
+ guess_date_or_time((value.is_a? Time) ? value : cast_to_time(value))
+ end
+
+ def cast_to_time(value)
+ return value if value.is_a? Time
+ time_array = ParseDate.parsedate value
+ time_array[0] ||= 2000; time_array[1] ||= 1; time_array[2] ||= 1;
+ Time.send(ActiveRecord::Base.default_timezone, *time_array) rescue nil
+ end
+
+ def guess_date_or_time(value)
+ (value.hour == 0 and value.min == 0 and value.sec == 0) ?
+ Date.new(value.year, value.month, value.day) : value
+ end
+ end
+
+ def modify_types(tp)
+ tp[:primary_key] = 'int generated by default as identity (start with 42) primary key'
+ tp[:string][:limit] = 255
+ tp[:integer][:limit] = nil
+ tp[:boolean][:limit] = nil
+ tp
+ end
+
+ def add_limit_offset!(sql, options)
+ if limit = options[:limit]
+ offset = options[:offset] || 0
+ sql.gsub!(/SELECT/i, 'SELECT B.* FROM (SELECT A.*, row_number() over () AS internal$rownum FROM (SELECT')
+ sql << ") A ) B WHERE B.internal$rownum > #{offset} AND B.internal$rownum <= #{limit + offset}"
+ end
+ end
+
+ def quote_column_name(column_name)
+ column_name
+ end
+
+ def quote(value, column = nil) # :nodoc:
+ if column && column.type == :primary_key
+ return value.to_s
+ end
+ if column && (column.type == :decimal || column.type == :integer) && value
+ return value.to_s
+ end
+ case value
+ when String
+ if column && column.type == :binary
+ "BLOB('#{quote_string(value)}')"
+ else
+ "'#{quote_string(value)}'"
+ end
+ else super
+ end
+ end
+
+ def quote_string(string)
+ string.gsub(/'/, "''") # ' (for ruby-mode)
+ end
+
+ def quoted_true
+ '1'
+ end
+
+ def quoted_false
+ '0'
+ end
+
+ def recreate_database(name)
+ do_not_drop = ["stmg_dbsize_info","hmon_atm_info","hmon_collection","policy"]
+ tables.each do |table|
+ unless do_not_drop.include?(table)
+ drop_table(table)
+ end
+ end
+ end
+
+ def remove_index(table_name, options = { })
+ execute "DROP INDEX #{quote_column_name(index_name(table_name, options))}"
+ end
+
+ # This method makes tests pass without understanding why.
+ # Don't use this in production.
+ def columns(table_name, name = nil)
+ super.select do |col|
+ # strip out "magic" columns from DB2 (?)
+ !/rolename|roleid|create_time|auditpolicyname|auditpolicyid|remarks/.match(col.name)
+ end
+ end
+
+ def add_quotes(name)
+ return name unless name
+ %Q{"#{name}"}
+ end
+
+ def strip_quotes(str)
+ return str unless str
+ return str unless /^(["']).*\1$/ =~ str
+ str[1..-2]
+ end
+
+ def expand_double_quotes(name)
+ return name unless name && name['"']
+ name.gsub(/"/,'""')
+ end
+
+
+ def structure_dump #:nodoc:
+ definition=""
+ rs = @connection.connection.meta_data.getTables(nil,nil,nil,["TABLE"].to_java(:string))
+ while rs.next
+ tname = rs.getString(3)
+ definition << "CREATE TABLE #{tname} (\n"
+ rs2 = @connection.connection.meta_data.getColumns(nil,nil,tname,nil)
+ first_col = true
+ while rs2.next
+ col_name = add_quotes(rs2.getString(4));
+ default = ""
+ d1 = rs2.getString(13)
+ default = d1 ? " DEFAULT #{d1}" : ""
+
+ type = rs2.getString(6)
+ col_size = rs2.getString(7)
+ nulling = (rs2.getString(18) == 'NO' ? " NOT NULL" : "")
+ create_col_string = add_quotes(expand_double_quotes(strip_quotes(col_name))) +
+ " " +
+ type +
+ "" +
+ nulling +
+ default
+ if !first_col
+ create_col_string = ",\n #{create_col_string}"
+ else
+ create_col_string = " #{create_col_string}"
+ end
+
+ definition << create_col_string
+
+ first_col = false
+ end
+ definition << ");\n\n"
+ end
+ definition
+ end
+
+ def dump_schema_information
+ begin
+ if (current_schema = ActiveRecord::Migrator.current_version) > 0
+ #TODO: Find a way to get the DB2 instace name to properly form the statement
+ return "INSERT INTO DB2INST2.SCHEMA_INFO (version) VALUES (#{current_schema})"
+ end
+ rescue ActiveRecord::StatementInvalid
+ # No Schema Info
+ end
+ end
+
+ end
+end
--- /dev/null
+require 'jdbc_adapter/missing_functionality_helper'
+
+module ::JdbcSpec
+ module ActiveRecordExtensions
+ def derby_connection(config)
+ config[:url] ||= "jdbc:derby:#{config[:database]};create=true"
+ config[:driver] ||= "org.apache.derby.jdbc.EmbeddedDriver"
+ embedded_driver(config)
+ end
+ end
+
+ module Derby
+ def self.column_selector
+ [/derby/i, lambda {|cfg,col| col.extend(::JdbcSpec::Derby::Column)}]
+ end
+
+ def self.adapter_selector
+ [/derby/i, lambda {|cfg,adapt| adapt.extend(::JdbcSpec::Derby)}]
+ end
+
+ def self.monkey_rails
+ unless @already_monkeyd
+ # Needed because Rails is broken wrt to quoting of
+ # some values. Most databases are nice about it,
+ # but not Derby. The real issue is that you can't
+ # compare a CHAR value to a NUMBER column.
+ ::ActiveRecord::Associations::ClassMethods.module_eval do
+ private
+
+ def select_limited_ids_list(options, join_dependency)
+ connection.select_all(
+ construct_finder_sql_for_association_limiting(options, join_dependency),
+ "#{name} Load IDs For Limited Eager Loading"
+ ).collect { |row| connection.quote(row[primary_key], columns_hash[primary_key]) }.join(", ")
+ end
+ end
+
+ @already_monkeyd = true
+ end
+ end
+
+ def self.extended(*args)
+ monkey_rails
+ end
+
+ def self.included(*args)
+ monkey_rails
+ end
+
+ module Column
+ def value_to_binary(value)
+ value.scan(/[0-9A-Fa-f]{2}/).collect {|v| v.to_i(16)}.pack("C*")
+ end
+
+ def cast_to_date_or_time(value)
+ return value if value.is_a? Date
+ return nil if value.blank?
+ guess_date_or_time((value.is_a? Time) ? value : cast_to_time(value))
+ end
+
+ def cast_to_time(value)
+ return value if value.is_a? Time
+ time_array = ParseDate.parsedate value
+ time_array[0] ||= 2000; time_array[1] ||= 1; time_array[2] ||= 1;
+ Time.send(ActiveRecord::Base.default_timezone, *time_array) rescue nil
+ end
+
+ def guess_date_or_time(value)
+ (value.hour == 0 and value.min == 0 and value.sec == 0) ?
+ Date.new(value.year, value.month, value.day) : value
+ end
+
+ def simplified_type(field_type)
+ return :boolean if field_type =~ /smallint/i
+ return :float if field_type =~ /real/i
+ super
+ end
+ end
+
+ include JdbcSpec::MissingFunctionalityHelper
+
+ def modify_types(tp)
+ tp[:primary_key] = "int generated by default as identity NOT NULL PRIMARY KEY"
+ tp[:integer][:limit] = nil
+ tp[:string][:limit] = 256
+ tp[:boolean] = {:name => "smallint"}
+ tp
+ end
+
+ # Override default -- fix case where ActiveRecord passes :default => nil, :null => true
+ def add_column_options!(sql, options)
+ options.delete(:default) if options.has_key?(:default) && options[:default].nil?
+ options.delete(:null) if options.has_key?(:null) && (options[:null].nil? || options[:null] == true)
+ super
+ end
+
+ def classes_for_table_name(table)
+ ActiveRecord::Base.send(:subclasses).select {|klass| klass.table_name == table}
+ end
+
+ # Set the sequence to the max value of the table's column.
+ def reset_sequence!(table, column, sequence = nil)
+ mpk = select_value("SELECT MAX(#{quote_column_name column}) FROM #{table}")
+ execute("ALTER TABLE #{table} ALTER COLUMN #{quote_column_name column} RESTART WITH #{mpk.to_i + 1}")
+ end
+
+ def reset_pk_sequence!(table, pk = nil, sequence = nil)
+ klasses = classes_for_table_name(table)
+ klass = klasses.nil? ? nil : klasses.first
+ pk = klass.primary_key unless klass.nil?
+ if pk && klass.columns_hash[pk].type == :integer
+ reset_sequence!(klass.table_name, pk)
+ end
+ end
+
+ def primary_key(table_name) #:nodoc:
+ primary_keys(table_name).first
+ end
+
+ def remove_index(table_name, options) #:nodoc:
+ execute "DROP INDEX #{index_name(table_name, options)}"
+ end
+
+ def rename_table(name, new_name)
+ execute "RENAME TABLE #{name} TO #{new_name}"
+ end
+
+ COLUMN_INFO_STMT = "SELECT C.COLUMNNAME, C.REFERENCEID, C.COLUMNNUMBER FROM SYS.SYSCOLUMNS C, SYS.SYSTABLES T WHERE T.TABLEID = '%s' AND T.TABLEID = C.REFERENCEID ORDER BY C.COLUMNNUMBER"
+
+ COLUMN_TYPE_STMT = "SELECT COLUMNDATATYPE, COLUMNDEFAULT FROM SYS.SYSCOLUMNS WHERE REFERENCEID = '%s' AND COLUMNNAME = '%s'"
+
+ AUTO_INC_STMT = "SELECT AUTOINCREMENTSTART, AUTOINCREMENTINC, COLUMNNAME, REFERENCEID, COLUMNDEFAULT FROM SYS.SYSCOLUMNS WHERE REFERENCEID = '%s' AND COLUMNNAME = '%s'"
+ AUTO_INC_STMT2 = "SELECT AUTOINCREMENTSTART, AUTOINCREMENTINC, COLUMNNAME, REFERENCEID, COLUMNDEFAULT FROM SYS.SYSCOLUMNS WHERE REFERENCEID = (SELECT T.TABLEID FROM SYS.SYSTABLES T WHERE T.TABLENAME = '%s') AND COLUMNNAME = '%s'"
+
+ def add_quotes(name)
+ return name unless name
+ %Q{"#{name}"}
+ end
+
+ def strip_quotes(str)
+ return str unless str
+ return str unless /^(["']).*\1$/ =~ str
+ str[1..-2]
+ end
+
+ def expand_double_quotes(name)
+ return name unless name && name['"']
+ name.gsub(/"/,'""')
+ end
+
+ def reinstate_auto_increment(name, refid, coldef)
+ stmt = AUTO_INC_STMT % [refid, strip_quotes(name)]
+ data = execute(stmt).first
+ if data
+ start = data['autoincrementstart']
+ if start
+ coldef << " GENERATED " << (data['columndefault'].nil? ? "ALWAYS" : "BY DEFAULT ")
+ coldef << "AS IDENTITY (START WITH "
+ coldef << start
+ coldef << ", INCREMENT BY "
+ coldef << data['autoincrementinc']
+ coldef << ")"
+ return true
+ end
+ end
+ false
+ end
+
+ def reinstate_auto_increment(name, refid, coldef)
+ stmt = AUTO_INC_STMT % [refid, strip_quotes(name)]
+ data = execute(stmt).first
+ if data
+ start = data['autoincrementstart']
+ if start
+ coldef << " GENERATED " << (data['columndefault'].nil? ? "ALWAYS" : "BY DEFAULT ")
+ coldef << "AS IDENTITY (START WITH "
+ coldef << start
+ coldef << ", INCREMENT BY "
+ coldef << data['autoincrementinc']
+ coldef << ")"
+ return true
+ end
+ end
+ false
+ end
+
+ def auto_increment_stmt(tname, cname)
+ stmt = AUTO_INC_STMT2 % [tname, strip_quotes(cname)]
+ data = execute(stmt).first
+ if data
+ start = data['autoincrementstart']
+ if start
+ coldef = ""
+ coldef << " GENERATED " << (data['columndefault'].nil? ? "ALWAYS" : "BY DEFAULT ")
+ coldef << "AS IDENTITY (START WITH "
+ coldef << start
+ coldef << ", INCREMENT BY "
+ coldef << data['autoincrementinc']
+ coldef << ")"
+ return coldef
+ end
+ end
+ ""
+ end
+
+
+ def add_column(table_name, column_name, type, options = {})
+ if option_not_null = options[:null] == false
+ option_not_null = options.delete(:null)
+ end
+ add_column_sql = "ALTER TABLE #{quote_table_name(table_name)} ADD #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
+ add_column_options!(add_column_sql, options)
+ execute(add_column_sql)
+ if option_not_null
+ alter_column_sql = "ALTER TABLE #{quote_table_name(table_name)} ALTER #{quote_column_name(column_name)} NOT NULL"
+ end
+ end
+
+ # I don't think this method is ever called ??? (stepheneb)
+ def create_column(name, refid, colno)
+ stmt = COLUMN_TYPE_STMT % [refid, strip_quotes(name)]
+ coldef = ""
+ data = execute(stmt).first
+ if data
+ coldef << add_quotes(expand_double_quotes(strip_quotes(name)))
+ coldef << " "
+ coldef << data['columndatatype']
+ if !reinstate_auto_increment(name, refid, coldef) && data['columndefault']
+ coldef << " DEFAULT " << data['columndefault']
+ end
+ end
+ coldef
+ end
+
+ SIZEABLE = %w(VARCHAR CLOB BLOB)
+
+ def structure_dump #:nodoc:
+ definition=""
+ rs = @connection.connection.meta_data.getTables(nil,nil,nil,["TABLE"].to_java(:string))
+ while rs.next
+ tname = rs.getString(3)
+ definition << "CREATE TABLE #{tname} (\n"
+ rs2 = @connection.connection.meta_data.getColumns(nil,nil,tname,nil)
+ first_col = true
+ while rs2.next
+ col_name = add_quotes(rs2.getString(4));
+ default = ""
+ d1 = rs2.getString(13)
+ if d1 =~ /^GENERATED_/
+ default = auto_increment_stmt(tname, col_name)
+ elsif d1
+ default = " DEFAULT #{d1}"
+ end
+
+ type = rs2.getString(6)
+ col_size = rs2.getString(7)
+ nulling = (rs2.getString(18) == 'NO' ? " NOT NULL" : "")
+ create_col_string = add_quotes(expand_double_quotes(strip_quotes(col_name))) +
+ " " +
+ type +
+ (SIZEABLE.include?(type) ? "(#{col_size})" : "") +
+ nulling +
+ default
+ if !first_col
+ create_col_string = ",\n #{create_col_string}"
+ else
+ create_col_string = " #{create_col_string}"
+ end
+
+ definition << create_col_string
+
+ first_col = false
+ end
+ definition << ");\n\n"
+ end
+ definition
+ end
+
+ # Support for removing columns added via derby bug issue:
+ # https://issues.apache.org/jira/browse/DERBY-1489
+ #
+ # This feature has not made it into a formal release and is not in Java 6.
+ # If the normal strategy fails we fall back on a strategy by creating a new
+ # table without the new column and there after moving the data to the new
+ #
+ def remove_column(table_name, column_name)
+ begin
+ execute "ALTER TABLE #{table_name} DROP COLUMN #{column_name} RESTRICT"
+ rescue
+ alter_table(table_name) do |definition|
+ definition.columns.delete(definition[column_name])
+ end
+ end
+ end
+
+ # Notes about changing in Derby:
+ # http://db.apache.org/derby/docs/10.2/ref/rrefsqlj81859.html#rrefsqlj81859__rrefsqlj37860)
+ #
+ # We support changing columns using the strategy outlined in:
+ # https://issues.apache.org/jira/browse/DERBY-1515
+ #
+ # This feature has not made it into a formal release and is not in Java 6. We will
+ # need to conditionally support this somehow (supposed to arrive for 10.3.0.0)
+ def change_column(table_name, column_name, type, options = {})
+ # null/not nulling is easy, handle that separately
+ if options.include?(:null)
+ # This seems to only work with 10.2 of Derby
+ if options.delete(:null) == false
+ execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} NOT NULL"
+ else
+ execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} NULL"
+ end
+ end
+
+ # anything left to do?
+ unless options.empty?
+ begin
+ execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} SET DATA TYPE #{type_to_sql(type, options[:limit])}"
+ rescue
+ transaction do
+ temp_new_column_name = "#{column_name}_newtype"
+ # 1) ALTER TABLE t ADD COLUMN c1_newtype NEWTYPE;
+ add_column table_name, temp_new_column_name, type, options
+ # 2) UPDATE t SET c1_newtype = c1;
+ execute "UPDATE #{table_name} SET #{temp_new_column_name} = CAST(#{column_name} AS #{type_to_sql(type, options[:limit])})"
+ # 3) ALTER TABLE t DROP COLUMN c1;
+ remove_column table_name, column_name
+ # 4) ALTER TABLE t RENAME COLUMN c1_newtype to c1;
+ rename_column table_name, temp_new_column_name, column_name
+ end
+ end
+ end
+ end
+
+ # Support for renaming columns:
+ # https://issues.apache.org/jira/browse/DERBY-1490
+ #
+ # This feature is expect to arrive in version 10.3.0.0:
+ # http://wiki.apache.org/db-derby/DerbyTenThreeRelease)
+ #
+ def rename_column(table_name, column_name, new_column_name) #:nodoc:
+ begin
+ execute "ALTER TABLE #{table_name} ALTER RENAME COLUMN #{column_name} TO #{new_column_name}"
+ rescue
+ alter_table(table_name, :rename => {column_name => new_column_name})
+ end
+ end
+
+ def primary_keys(table_name)
+ @connection.primary_keys table_name.to_s.upcase
+ end
+
+ def recreate_database(db_name)
+ tables.each do |t|
+ drop_table t
+ end
+ end
+
+ # For DDL it appears you can quote "" column names, but in queries (like insert it errors out?)
+ def quote_column_name(name) #:nodoc:
+ name = name.to_s
+ if /^references$/i =~ name
+ %Q{"#{name.upcase}"}
+ elsif /[A-Z]/ =~ name && /[a-z]/ =~ name
+ %Q{"#{name}"}
+ elsif name =~ /\s/
+ %Q{"#{name.upcase}"}
+ elsif name =~ /^[_\d]/
+ %Q{"#{name.upcase}"}
+ else
+ name
+ end
+ end
+
+ def quoted_true
+ '1'
+ end
+
+ def quoted_false
+ '0'
+ end
+ end
+end
+
--- /dev/null
+module ::JdbcSpec
+ module FireBird
+ def self.adapter_selector
+ [/firebird/i, lambda{|cfg,adapt| adapt.extend(::JdbcSpec::FireBird)}]
+ end
+
+ def modify_types(tp)
+ tp[:primary_key] = 'INTEGER NOT NULL PRIMARY KEY'
+ tp[:string][:limit] = 252
+ tp[:integer][:limit] = nil
+ tp
+ end
+
+ def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) # :nodoc:
+ execute(sql, name)
+ id_value
+ end
+
+ def add_limit_offset!(sql, options) # :nodoc:
+ if options[:limit]
+ limit_string = "FIRST #{options[:limit]}"
+ limit_string << " SKIP #{options[:offset]}" if options[:offset]
+ sql.sub!(/\A(\s*SELECT\s)/i, '\&' + limit_string + ' ')
+ end
+ end
+
+ def prefetch_primary_key?(table_name = nil)
+ true
+ end
+
+ def default_sequence_name(table_name, primary_key) # :nodoc:
+ "#{table_name}_seq"
+ end
+
+ def next_sequence_value(sequence_name)
+ select_one("SELECT GEN_ID(#{sequence_name}, 1 ) FROM RDB$DATABASE;")["gen_id"]
+ end
+
+ def create_table(name, options = {}) #:nodoc:
+ super(name, options)
+ execute "CREATE GENERATOR #{name}_seq"
+ end
+
+ def rename_table(name, new_name) #:nodoc:
+ execute "RENAME #{name} TO #{new_name}"
+ execute "UPDATE RDB$GENERATORS SET RDB$GENERATOR_NAME='#{new_name}_seq' WHERE RDB$GENERATOR_NAME='#{name}_seq'" rescue nil
+ end
+
+ def drop_table(name, options = {}) #:nodoc:
+ super(name)
+ execute "DROP GENERATOR #{name}_seq" rescue nil
+ end
+
+ def change_column(table_name, column_name, type, options = {}) #:nodoc:
+ execute "ALTER TABLE #{table_name} ALTER #{column_name} TYPE #{type_to_sql(type, options[:limit])}"
+ end
+
+ def rename_column(table_name, column_name, new_column_name)
+ execute "ALTER TABLE #{table_name} ALTER #{column_name} TO #{new_column_name}"
+ end
+
+ def remove_index(table_name, options) #:nodoc:
+ execute "DROP INDEX #{index_name(table_name, options)}"
+ end
+
+ def quote(value, column = nil) # :nodoc:
+ return value.quoted_id if value.respond_to?(:quoted_id)
+
+ if [Time, DateTime].include?(value.class)
+ "CAST('#{value.strftime("%Y-%m-%d %H:%M:%S")}' AS TIMESTAMP)"
+ else
+ if column && column.type == :primary_key
+ return value.to_s
+ end
+ super
+ end
+ end
+
+ def quote_string(string) # :nodoc:
+ string.gsub(/'/, "''")
+ end
+
+ def quote_column_name(column_name) # :nodoc:
+ %Q("#{ar_to_fb_case(column_name)}")
+ end
+
+ def quoted_true # :nodoc:
+ quote(1)
+ end
+
+ def quoted_false # :nodoc:
+ quote(0)
+ end
+
+ private
+
+ # Maps uppercase Firebird column names to lowercase for ActiveRecord;
+ # mixed-case columns retain their original case.
+ def fb_to_ar_case(column_name)
+ column_name =~ /[[:lower:]]/ ? column_name : column_name.to_s.downcase
+ end
+
+ # Maps lowercase ActiveRecord column names to uppercase for Fierbird;
+ # mixed-case columns retain their original case.
+ def ar_to_fb_case(column_name)
+ column_name =~ /[[:upper:]]/ ? column_name : column_name.to_s.upcase
+ end
+ end
+end
--- /dev/null
+module ::JdbcSpec
+ module ActiveRecordExtensions
+ def hsqldb_connection(config)
+ config[:url] ||= "jdbc:hsqldb:#{config[:database]}"
+ config[:driver] ||= "org.hsqldb.jdbcDriver"
+ embedded_driver(config)
+ end
+
+ def h2_connection(config)
+ config[:url] ||= "jdbc:h2:#{config[:database]}"
+ config[:driver] ||= "org.h2.Driver"
+ embedded_driver(config)
+ end
+ end
+
+ module HSQLDB
+ def self.column_selector
+ [/hsqldb|\.h2\./i, lambda {|cfg,col| col.extend(::JdbcSpec::HSQLDB::Column)}]
+ end
+
+ def self.adapter_selector
+ [/hsqldb|\.h2\./i, lambda do |cfg,adapt|
+ adapt.extend(::JdbcSpec::HSQLDB)
+ def adapt.h2_adapter; true; end if cfg[:driver] =~ /\.h2\./
+ end]
+ end
+
+ module Column
+ def type_cast(value)
+ return nil if value.nil? || value =~ /^\s*null\s*$/i
+ case type
+ when :string then value
+ when :integer then defined?(value.to_i) ? value.to_i : (value ? 1 : 0)
+ when :primary_key then defined?(value.to_i) ? value.to_i : (value ? 1 : 0)
+ when :float then value.to_f
+ when :datetime then cast_to_date_or_time(value)
+ when :timestamp then cast_to_time(value)
+ when :binary then value.scan(/[0-9A-Fa-f]{2}/).collect {|v| v.to_i(16)}.pack("C*")
+ when :time then cast_to_time(value)
+ else value
+ end
+ end
+ def cast_to_date_or_time(value)
+ return value if value.is_a? Date
+ return nil if value.blank?
+ guess_date_or_time((value.is_a? Time) ? value : cast_to_time(value))
+ end
+
+ def cast_to_time(value)
+ return value if value.is_a? Time
+ time_array = ParseDate.parsedate value
+ time_array[0] ||= 2000; time_array[1] ||= 1; time_array[2] ||= 1;
+ Time.send(ActiveRecord::Base.default_timezone, *time_array) rescue nil
+ end
+
+ def guess_date_or_time(value)
+ (value.hour == 0 and value.min == 0 and value.sec == 0) ?
+ Date.new(value.year, value.month, value.day) : value
+ end
+
+
+ private
+ def simplified_type(field_type)
+ case field_type
+ when /longvarchar/i
+ :text
+ else
+ super(field_type)
+ end
+ end
+
+ # Override of ActiveRecord::ConnectionAdapters::Column
+ def extract_limit(sql_type)
+ # HSQLDB appears to return "LONGVARCHAR(0)" for :text columns, which
+ # for AR purposes should be interpreted as "no limit"
+ return nil if sql_type =~ /\(0\)/
+ super
+ end
+ end
+
+ def modify_types(tp)
+ tp[:primary_key] = "INTEGER GENERATED BY DEFAULT AS IDENTITY(START WITH 0) PRIMARY KEY"
+ tp[:integer][:limit] = nil
+ tp[:boolean][:limit] = nil
+ # set text and float limits so we don't see odd scales tacked on
+ # in migrations
+ tp[:text][:limit] = nil
+ tp[:float][:limit] = 17
+ tp[:string][:limit] = 255
+ tp[:datetime] = { :name => "DATETIME" }
+ tp[:timestamp] = { :name => "DATETIME" }
+ tp[:time] = { :name => "DATETIME" }
+ tp[:date] = { :name => "DATETIME" }
+ tp
+ end
+
+ def quote(value, column = nil) # :nodoc:
+ return value.quoted_id if value.respond_to?(:quoted_id)
+
+ case value
+ when String
+ if respond_to?(:h2_adapter) && value.empty?
+ "NULL"
+ elsif column && column.type == :binary
+ "'#{quote_string(value).unpack("C*").collect {|v| v.to_s(16)}.join}'"
+ else
+ "'#{quote_string(value)}'"
+ end
+ else super
+ end
+ end
+
+ def quote_string(str)
+ str.gsub(/'/, "''")
+ end
+
+ def quoted_true
+ '1'
+ end
+
+ def quoted_false
+ '0'
+ end
+
+ def add_column(table_name, column_name, type, options = {})
+ if option_not_null = options[:null] == false
+ option_not_null = options.delete(:null)
+ end
+ add_column_sql = "ALTER TABLE #{quote_table_name(table_name)} ADD #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
+ add_column_options!(add_column_sql, options)
+ execute(add_column_sql)
+ if option_not_null
+ alter_column_sql = "ALTER TABLE #{quote_table_name(table_name)} ALTER #{quote_column_name(column_name)} NOT NULL"
+ end
+ end
+
+ def change_column(table_name, column_name, type, options = {}) #:nodoc:
+ execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} #{type_to_sql(type, options[:limit])}"
+ end
+
+ def change_column_default(table_name, column_name, default) #:nodoc:
+ execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} SET DEFAULT #{quote(default)}"
+ end
+
+ def rename_column(table_name, column_name, new_column_name) #:nodoc:
+ execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} RENAME TO #{new_column_name}"
+ end
+
+ def rename_table(name, new_name)
+ execute "ALTER TABLE #{name} RENAME TO #{new_name}"
+ end
+
+ def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:
+ log(sql,name) do
+ @connection.execute_update(sql)
+ end
+ table = sql.split(" ", 4)[2]
+ id_value || last_insert_id(table, nil)
+ end
+
+ def last_insert_id(table, sequence_name)
+ Integer(select_value("SELECT IDENTITY() FROM #{table}"))
+ end
+
+ # Override normal #_execute: See Rubyforge #11567
+ def _execute(sql, name = nil)
+ if ::ActiveRecord::ConnectionAdapters::JdbcConnection::select?(sql)
+ @connection.execute_query(sql)
+ elsif ::ActiveRecord::ConnectionAdapters::JdbcConnection::insert?(sql)
+ insert(sql, name)
+ else
+ @connection.execute_update(sql)
+ end
+ end
+
+ def add_limit_offset!(sql, options) #:nodoc:
+ offset = options[:offset] || 0
+ bef = sql[7..-1]
+ if limit = options[:limit]
+ sql.replace "select limit #{offset} #{limit} #{bef}"
+ elsif offset > 0
+ sql.replace "select limit #{offset} 0 #{bef}"
+ end
+ end
+
+ # override to filter out system tables that otherwise end
+ # up in db/schema.rb during migrations. JdbcConnection#tables
+ # now takes an optional block filter so we can screen out
+ # rows corresponding to system tables. HSQLDB names its
+ # system tables SYSTEM.*, but H2 seems to name them without
+ # any kind of convention
+ def tables
+ @connection.tables.select {|row| row.to_s !~ /^system_/i }
+ end
+
+ def remove_index(table_name, options = {})
+ execute "DROP INDEX #{quote_column_name(index_name(table_name, options))}"
+ end
+ end
+end
--- /dev/null
+module ::ActiveRecord
+ class Base
+ after_save :write_lobs
+
+ private
+ def write_lobs
+ if connection.is_a?(JdbcSpec::Informix)
+ self.class.columns.each do |c|
+ if [:text, :binary].include? c.type
+ value = self[c.name]
+ value = value.to_yaml if unserializable_attribute?(c.name, c)
+
+ unless value.nil? || (value == '')
+ connection.write_large_object(c.type == :binary,
+ c.name,
+ self.class.table_name,
+ self.class.primary_key,
+ quote_value(id),
+ value)
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+module ::JdbcSpec
+ module ActiveRecordExtensions
+ def informix_connection(config)
+ config[:port] ||= 9088
+ config[:url] ||= "jdbc:informix-sqli://#{config[:host]}:#{config[:port]}/#{config[:database]}:INFORMIXSERVER=#{config[:servername]}"
+ config[:driver] = 'com.informix.jdbc.IfxDriver'
+ jdbc_connection(config)
+ end
+ end
+
+ module Informix
+ def self.extended(base)
+ @@db_major_version = base.select_one("SELECT dbinfo('version', 'major') version FROM systables WHERE tabid = 1")['version'].to_i
+ end
+
+ def self.column_selector
+ [ /informix/i,
+ lambda { |cfg, column| column.extend(::JdbcSpec::Informix::Column) } ]
+ end
+
+ def self.adapter_selector
+ [ /informix/i,
+ lambda { |cfg, adapter| adapter.extend(::JdbcSpec::Informix) } ]
+ end
+
+ module Column
+ private
+ # TODO: Test all Informix column types.
+ def simplified_type(field_type)
+ if field_type =~ /serial/i
+ :primary_key
+ else
+ super
+ end
+ end
+ end
+
+ def modify_types(tp)
+ tp[:primary_key] = "SERIAL PRIMARY KEY"
+ tp[:string] = { :name => "VARCHAR", :limit => 255 }
+ tp[:integer] = { :name => "INTEGER" }
+ tp[:float] = { :name => "FLOAT" }
+ tp[:decimal] = { :name => "DECIMAL" }
+ tp[:datetime] = { :name => "DATETIME YEAR TO FRACTION(5)" }
+ tp[:timestamp] = { :name => "DATETIME YEAR TO FRACTION(5)" }
+ tp[:time] = { :name => "DATETIME HOUR TO FRACTION(5)" }
+ tp[:date] = { :name => "DATE" }
+ tp[:binary] = { :name => "BYTE" }
+ tp[:boolean] = { :name => "BOOLEAN" }
+ tp
+ end
+
+ def prefetch_primary_key?(table_name = nil)
+ true
+ end
+
+ def supports_migrations?
+ true
+ end
+
+ def default_sequence_name(table, column)
+ "#{table}_seq"
+ end
+
+ def add_limit_offset!(sql, options)
+ if options[:limit]
+ limit = "FIRST #{options[:limit]}"
+ # SKIP available only in IDS >= 10
+ offset = (@@db_major_version >= 10 && options[:offset]?
+ "SKIP #{options[:offset]}" : "")
+ sql.sub!(/^select /i, "SELECT #{offset} #{limit} ")
+ end
+ sql
+ end
+
+ def next_sequence_value(sequence_name)
+ select_one("SELECT #{sequence_name}.nextval id FROM systables WHERE tabid=1")['id']
+ end
+
+ # TODO: Add some smart quoting for newlines in string and text fields.
+ def quote_string(string)
+ string.gsub(/\'/, "''")
+ end
+
+ def quote(value, column = nil)
+ if column && [:binary, :text].include?(column.type)
+ # LOBs are updated separately by an after_save trigger.
+ "NULL"
+ elsif column && column.type == :date
+ "'#{value.mon}/#{value.day}/#{value.year}'"
+ else
+ super
+ end
+ end
+
+ def create_table(name, options = {})
+ super(name, options)
+ execute("CREATE SEQUENCE #{name}_seq")
+ end
+
+ def rename_table(name, new_name)
+ execute("RENAME TABLE #{name} TO #{new_name}")
+ execute("RENAME SEQUENCE #{name}_seq TO #{new_name}_seq")
+ end
+
+ def drop_table(name)
+ super(name)
+ execute("DROP SEQUENCE #{name}_seq")
+ end
+
+ def remove_index(table_name, options = {})
+ @connection.execute_update("DROP INDEX #{index_name(table_name, options)}")
+ end
+
+ private
+ def select(sql, name = nil)
+ # Informix does not like "= NULL", "!= NULL", or "<> NULL".
+ execute(sql.gsub(/(!=|<>)\s*null/i, "IS NOT NULL").gsub(/=\s*null/i, "IS NULL"), name)
+ end
+ end # module Informix
+end # module ::JdbcSpec
--- /dev/null
+module JdbcSpec
+ module Mimer
+ def self.adapter_selector
+ [/mimer/i, lambda {|cfg,adapt| adapt.extend(::JdbcSpec::Mimer)}]
+ end
+
+ def modify_types(tp)
+ tp[:primary_key] = "INTEGER NOT NULL PRIMARY KEY"
+ tp[:boolean][:limit] = nil
+ tp[:string][:limit] = 255
+ tp[:binary] = {:name => "BINARY VARYING", :limit => 4096}
+ tp[:text] = {:name => "VARCHAR", :limit => 4096}
+ tp[:datetime] = { :name => "TIMESTAMP" }
+ tp[:timestamp] = { :name => "TIMESTAMP" }
+ tp[:time] = { :name => "TIMESTAMP" }
+ tp[:date] = { :name => "TIMESTAMP" }
+ tp
+ end
+
+ def default_sequence_name(table, column) #:nodoc:
+ "#{table}_seq"
+ end
+
+ def create_table(name, options = {}) #:nodoc:
+ super(name, options)
+ execute "CREATE SEQUENCE #{name}_seq" unless options[:id] == false
+ end
+
+ def drop_table(name, options = {}) #:nodoc:
+ super(name) rescue nil
+ execute "DROP SEQUENCE #{name}_seq" rescue nil
+ end
+
+ def change_column(table_name, column_name, type, options = {}) #:nodoc:
+ execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} #{type_to_sql(type, options[:limit])}"
+ end
+
+ def change_column_default(table_name, column_name, default) #:nodoc:
+ execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} SET DEFAULT #{quote(default)}"
+ end
+
+ def remove_index(table_name, options = {}) #:nodoc:
+ execute "DROP INDEX #{index_name(table_name, options)}"
+ end
+
+ def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:
+ if pk.nil? # Who called us? What does the sql look like? No idea!
+ execute sql, name
+ elsif id_value # Pre-assigned id
+ log(sql, name) { @connection.execute_insert sql,pk }
+ else # Assume the sql contains a bind-variable for the id
+ id_value = select_one("SELECT NEXT_VALUE OF #{sequence_name} AS val FROM MIMER.ONEROW")['val']
+ log(sql, name) {
+ execute_prepared_insert(sql,id_value)
+ }
+ end
+ id_value
+ end
+
+ def execute_prepared_insert(sql, id)
+ @stmts ||= {}
+ @stmts[sql] ||= @connection.ps(sql)
+ stmt = @stmts[sql]
+ stmt.setLong(1,id)
+ stmt.executeUpdate
+ id
+ end
+
+ def quote(value, column = nil) #:nodoc:
+ return value.quoted_id if value.respond_to?(:quoted_id)
+
+ if String === value && column && column.type == :binary
+ return "X'#{quote_string(value.unpack("C*").collect {|v| v.to_s(16)}.join)}'"
+ end
+ case value
+ when String : %Q{'#{quote_string(value)}'}
+ when NilClass : 'NULL'
+ when TrueClass : '1'
+ when FalseClass : '0'
+ when Numeric : value.to_s
+ when Date, Time : %Q{TIMESTAMP '#{value.strftime("%Y-%m-%d %H:%M:%S")}'}
+ else %Q{'#{quote_string(value.to_yaml)}'}
+ end
+ end
+
+ def quoted_true
+ '1'
+ end
+
+ def quoted_false
+ '0'
+ end
+
+ def add_limit_offset!(sql, options) # :nodoc:
+ @limit = options[:limit]
+ @offset = options[:offset]
+ end
+
+ def select_all(sql, name = nil)
+ @offset ||= 0
+ if !@limit || @limit == -1
+ range = @offset..-1
+ else
+ range = @offset...(@offset+@limit)
+ end
+ select(sql, name)[range]
+ ensure
+ @limit = @offset = nil
+ end
+
+ def select_one(sql, name = nil)
+ @offset ||= 0
+ select(sql, name)[@offset]
+ ensure
+ @limit = @offset = nil
+ end
+
+ def _execute(sql, name = nil)
+ if sql =~ /^select/i
+ @offset ||= 0
+ if !@limit || @limit == -1
+ range = @offset..-1
+ else
+ range = @offset...(@offset+@limit)
+ end
+ @connection.execute_query(sql)[range]
+ else
+ @connection.execute_update(sql)
+ end
+ ensure
+ @limit = @offset = nil
+ end
+ end
+end
--- /dev/null
+require 'jdbc_adapter/tsql_helper'
+
+module ::ActiveRecord
+ class Base
+ # After setting large objects to empty, write data back with a helper method
+ after_save :write_lobs
+ def write_lobs() #:nodoc:
+ if connection.is_a?(JdbcSpec::MsSQL)
+ self.class.columns.select { |c| c.sql_type =~ /image/i }.each { |c|
+ value = self[c.name]
+ value = value.to_yaml if unserializable_attribute?(c.name, c)
+ next if value.nil? || (value == '')
+
+ connection.write_large_object(c.type == :binary, c.name, self.class.table_name, self.class.primary_key, quote_value(id), value)
+ }
+ end
+ end
+ private :write_lobs
+ end
+end
+
+module JdbcSpec
+ module MsSQL
+ include TSqlMethods
+
+ def self.column_selector
+ [/sqlserver|tds/i, lambda {|cfg,col| col.extend(::JdbcSpec::MsSQL::Column)}]
+ end
+
+ def self.adapter_selector
+ [/sqlserver|tds/i, lambda {|cfg,adapt| adapt.extend(::JdbcSpec::MsSQL)}]
+ end
+
+ module Column
+ attr_accessor :identity, :is_special
+
+ def simplified_type(field_type)
+ case field_type
+ when /int|bigint|smallint|tinyint/i then :integer
+ when /numeric/i then (@scale.nil? || @scale == 0) ? :integer : :decimal
+ when /float|double|decimal|money|real|smallmoney/i then :decimal
+ when /datetime|smalldatetime/i then :datetime
+ when /timestamp/i then :timestamp
+ when /time/i then :time
+ when /text|ntext/i then :text
+ when /binary|image|varbinary/i then :binary
+ when /char|nchar|nvarchar|string|varchar/i then :string
+ when /bit/i then :boolean
+ when /uniqueidentifier/i then :string
+ end
+ end
+
+ def type_cast(value)
+ return nil if value.nil? || value == "(null)" || value == "(NULL)"
+ case type
+ when :string then unquote_string value
+ when :integer then unquote(value).to_i rescue value ? 1 : 0
+ when :primary_key then value == true || value == false ? value == true ? 1 : 0 : value.to_i
+ when :decimal then self.class.value_to_decimal(unquote(value))
+ when :datetime then cast_to_datetime(value)
+ when :timestamp then cast_to_time(value)
+ when :time then cast_to_time(value)
+ when :date then cast_to_datetime(value)
+ when :boolean then value == true or (value =~ /^t(rue)?$/i) == 0 or unquote(value)=="1"
+ when :binary then unquote value
+ else value
+ end
+ end
+
+ # JRUBY-2011: Match balanced quotes and parenthesis - 'text',('text') or (text)
+ def unquote_string(value)
+ value.sub(/^\((.*)\)$/,'\1').sub(/^'(.*)'$/,'\1')
+ end
+
+ def unquote(value)
+ value.to_s.sub(/\A\([\(\']?/, "").sub(/[\'\)]?\)\Z/, "")
+ end
+
+ def cast_to_time(value)
+ return value if value.is_a?(Time)
+ time_array = ParseDate.parsedate(value)
+ time_array[0] ||= 2000
+ time_array[1] ||= 1
+ time_array[2] ||= 1
+ Time.send(ActiveRecord::Base.default_timezone, *time_array) rescue nil
+ end
+
+ def cast_to_datetime(value)
+ if value.is_a?(Time)
+ if value.year != 0 and value.month != 0 and value.day != 0
+ return value
+ else
+ return Time.mktime(2000, 1, 1, value.hour, value.min, value.sec) rescue nil
+ end
+ end
+ return cast_to_time(value) if value.is_a?(Date) or value.is_a?(String) rescue nil
+ value
+ end
+
+ # These methods will only allow the adapter to insert binary data with a length of 7K or less
+ # because of a SQL Server statement length policy.
+ def self.string_to_binary(value)
+ ''
+ end
+ end
+
+ def quote(value, column = nil)
+ return value.quoted_id if value.respond_to?(:quoted_id)
+
+ case value
+ when String, ActiveSupport::Multibyte::Chars
+ value = value.to_s
+ if column && column.type == :binary
+ "'#{quote_string(JdbcSpec::MsSQL::Column.string_to_binary(value))}'" # ' (for ruby-mode)
+ elsif column && [:integer, :float].include?(column.type)
+ value = column.type == :integer ? value.to_i : value.to_f
+ value.to_s
+ else
+ "'#{quote_string(value)}'" # ' (for ruby-mode)
+ end
+ when TrueClass then '1'
+ when FalseClass then '0'
+ when Time, DateTime then "'#{value.strftime("%Y%m%d %H:%M:%S")}'"
+ when Date then "'#{value.strftime("%Y%m%d")}'"
+ else super
+ end
+ end
+
+ def quote_string(string)
+ string.gsub(/\'/, "''")
+ end
+
+ def quote_table_name(name)
+ name
+ end
+
+ def quote_column_name(name)
+ "[#{name}]"
+ end
+
+ def change_order_direction(order)
+ order.split(",").collect {|fragment|
+ case fragment
+ when /\bDESC\b/i then fragment.gsub(/\bDESC\b/i, "ASC")
+ when /\bASC\b/i then fragment.gsub(/\bASC\b/i, "DESC")
+ else String.new(fragment).split(',').join(' DESC,') + ' DESC'
+ end
+ }.join(",")
+ end
+
+ def recreate_database(name)
+ drop_database(name)
+ create_database(name)
+ end
+
+ def drop_database(name)
+ execute "DROP DATABASE #{name}"
+ end
+
+ def create_database(name)
+ execute "CREATE DATABASE #{name}"
+ end
+
+ def rename_table(name, new_name)
+ execute "EXEC sp_rename '#{name}', '#{new_name}'"
+ end
+
+ # Adds a new column to the named table.
+ # See TableDefinition#column for details of the options you can use.
+ def add_column(table_name, column_name, type, options = {})
+ add_column_sql = "ALTER TABLE #{table_name} ADD #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
+ add_column_options!(add_column_sql, options)
+ # TODO: Add support to mimic date columns, using constraints to mark them as such in the database
+ # add_column_sql << " CONSTRAINT ck__#{table_name}__#{column_name}__date_only CHECK ( CONVERT(CHAR(12), #{quote_column_name(column_name)}, 14)='00:00:00:000' )" if type == :date
+ execute(add_column_sql)
+ end
+
+ def rename_column(table, column, new_column_name)
+ execute "EXEC sp_rename '#{table}.#{column}', '#{new_column_name}'"
+ end
+
+ def change_column(table_name, column_name, type, options = {}) #:nodoc:
+ sql_commands = ["ALTER TABLE #{table_name} ALTER COLUMN #{column_name} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"]
+ if options_include_default?(options)
+ remove_default_constraint(table_name, column_name)
+ sql_commands << "ALTER TABLE #{table_name} ADD CONSTRAINT DF_#{table_name}_#{column_name} DEFAULT #{quote(options[:default], options[:column])} FOR #{column_name}"
+ end
+ sql_commands.each {|c|
+ execute(c)
+ }
+ end
+ def change_column_default(table_name, column_name, default) #:nodoc:
+ remove_default_constraint(table_name, column_name)
+ execute "ALTER TABLE #{table_name} ADD CONSTRAINT DF_#{table_name}_#{column_name} DEFAULT #{quote(default, column_name)} FOR #{column_name}"
+ end
+ def remove_column(table_name, column_name)
+ remove_check_constraints(table_name, column_name)
+ remove_default_constraint(table_name, column_name)
+ execute "ALTER TABLE #{table_name} DROP COLUMN [#{column_name}]"
+ end
+
+ def remove_default_constraint(table_name, column_name)
+ defaults = select "select def.name from sysobjects def, syscolumns col, sysobjects tab where col.cdefault = def.id and col.name = '#{column_name}' and tab.name = '#{table_name}' and col.id = tab.id"
+ defaults.each {|constraint|
+ execute "ALTER TABLE #{table_name} DROP CONSTRAINT #{constraint["name"]}"
+ }
+ end
+
+ def remove_check_constraints(table_name, column_name)
+ # TODO remove all constraints in single method
+ constraints = select "SELECT CONSTRAINT_NAME FROM INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE where TABLE_NAME = '#{table_name}' and COLUMN_NAME = '#{column_name}'"
+ constraints.each do |constraint|
+ execute "ALTER TABLE #{table_name} DROP CONSTRAINT #{constraint["CONSTRAINT_NAME"]}"
+ end
+ end
+
+ def remove_index(table_name, options = {})
+ execute "DROP INDEX #{table_name}.#{index_name(table_name, options)}"
+ end
+
+
+ def columns(table_name, name = nil)
+ return [] if table_name =~ /^information_schema\./i
+ cc = super
+ cc.each do |col|
+ col.identity = true if col.sql_type =~ /identity/i
+ col.is_special = true if col.sql_type =~ /text|ntext|image/i
+ end
+ cc
+ end
+
+ def _execute(sql, name = nil)
+ if sql.lstrip =~ /^insert/i
+ if query_requires_identity_insert?(sql)
+ table_name = get_table_name(sql)
+ with_identity_insert_enabled(table_name) do
+ id = @connection.execute_insert(sql)
+ end
+ else
+ @connection.execute_insert(sql)
+ end
+ elsif sql.lstrip =~ /^\(?\s*(select|show)/i
+ repair_special_columns(sql)
+ @connection.execute_query(sql)
+ else
+ @connection.execute_update(sql)
+ end
+ end
+
+
+ private
+ # Turns IDENTITY_INSERT ON for table during execution of the block
+ # N.B. This sets the state of IDENTITY_INSERT to OFF after the
+ # block has been executed without regard to its previous state
+
+ def with_identity_insert_enabled(table_name, &block)
+ set_identity_insert(table_name, true)
+ yield
+ ensure
+ set_identity_insert(table_name, false)
+ end
+
+ def set_identity_insert(table_name, enable = true)
+ execute "SET IDENTITY_INSERT #{table_name} #{enable ? 'ON' : 'OFF'}"
+ rescue Exception => e
+ raise ActiveRecord::ActiveRecordError, "IDENTITY_INSERT could not be turned #{enable ? 'ON' : 'OFF'} for table #{table_name}"
+ end
+
+ def get_table_name(sql)
+ if sql =~ /^\s*insert\s+into\s+([^\(\s,]+)\s*|^\s*update\s+([^\(\s,]+)\s*/i
+ $1
+ elsif sql =~ /from\s+([^\(\s,]+)\s*/i
+ $1
+ else
+ nil
+ end
+ end
+
+ def identity_column(table_name)
+ @table_columns = {} unless @table_columns
+ @table_columns[table_name] = columns(table_name) if @table_columns[table_name] == nil
+ @table_columns[table_name].each do |col|
+ return col.name if col.identity
+ end
+
+ return nil
+ end
+
+ def query_requires_identity_insert?(sql)
+ table_name = get_table_name(sql)
+ id_column = identity_column(table_name)
+ sql =~ /\[#{id_column}\]/ ? table_name : nil
+ end
+
+ def get_special_columns(table_name)
+ special = []
+ @table_columns ||= {}
+ @table_columns[table_name] ||= columns(table_name)
+ @table_columns[table_name].each do |col|
+ special << col.name if col.is_special
+ end
+ special
+ end
+
+ def repair_special_columns(sql)
+ special_cols = get_special_columns(get_table_name(sql))
+ for col in special_cols.to_a
+ sql.gsub!(Regexp.new(" #{col.to_s} = "), " #{col.to_s} LIKE ")
+ sql.gsub!(/ORDER BY #{col.to_s}/i, '')
+ end
+ sql
+ end
+ end
+ end
+
--- /dev/null
+require 'active_record/connection_adapters/abstract/schema_definitions'
+
+module ::JdbcSpec
+ # Don't need to load native mysql adapter
+ $LOADED_FEATURES << "active_record/connection_adapters/mysql_adapter.rb"
+
+ module ActiveRecordExtensions
+ def mysql_connection(config)
+ config[:port] ||= 3306
+ if config[:url]
+ config[:url] = config[:url]['?'] ? "#{config[:url]}&#{MySQL::URL_OPTIONS}" : "#{config[:url]}?#{MySQL::URL_OPTIONS}"
+ else
+ config[:url] = "jdbc:mysql://#{config[:host]}:#{config[:port]}/#{config[:database]}?#{MySQL::URL_OPTIONS}"
+ end
+ config[:driver] = "com.mysql.jdbc.Driver"
+ jdbc_connection(config)
+ end
+ end
+
+ module MySQL
+ URL_OPTIONS = "zeroDateTimeBehavior=convertToNull&jdbcCompliantTruncation=false&useUnicode=true&characterEncoding=utf8"
+ def self.column_selector
+ [/mysql/i, lambda {|cfg,col| col.extend(::JdbcSpec::MySQL::Column)}]
+ end
+
+ def self.adapter_selector
+ [/mysql/i, lambda {|cfg,adapt| adapt.extend(::JdbcSpec::MySQL)}]
+ end
+
+ def self.extended(adapter)
+ adapter.execute("SET SQL_AUTO_IS_NULL=0")
+ end
+
+ module Column
+ TYPES_ALLOWING_EMPTY_STRING_DEFAULT = Set.new([:binary, :string, :text])
+
+ def simplified_type(field_type)
+ return :boolean if field_type =~ /tinyint\(1\)|bit/i
+ return :string if field_type =~ /enum/i
+ super
+ end
+
+ def init_column(name, default, *args)
+ @original_default = default
+ @default = nil if missing_default_forged_as_empty_string?
+ end
+
+ # MySQL misreports NOT NULL column default when none is given.
+ # We can't detect this for columns which may have a legitimate ''
+ # default (string, text, binary) but we can for others (integer,
+ # datetime, boolean, and the rest).
+ #
+ # Test whether the column has default '', is not null, and is not
+ # a type allowing default ''.
+ def missing_default_forged_as_empty_string?
+ !null && @original_default == '' && !TYPES_ALLOWING_EMPTY_STRING_DEFAULT.include?(type)
+ end
+ end
+
+ def modify_types(tp)
+ tp[:primary_key] = "int(11) DEFAULT NULL auto_increment PRIMARY KEY"
+ tp[:decimal] = { :name => "decimal" }
+ tp[:timestamp] = { :name => "datetime" }
+ tp[:datetime][:limit] = nil
+ tp
+ end
+
+ # QUOTING ==================================================
+
+ def quote(value, column = nil)
+ return value.quoted_id if value.respond_to?(:quoted_id)
+
+ if column && column.type == :primary_key
+ value.to_s
+ elsif column && String === value && column.type == :binary && column.class.respond_to?(:string_to_binary)
+ s = column.class.string_to_binary(value).unpack("H*")[0]
+ "x'#{s}'"
+ elsif BigDecimal === value
+ "'#{value.to_s("F")}'"
+ else
+ super
+ end
+ end
+
+ def quote_column_name(name) #:nodoc:
+ "`#{name}`"
+ end
+
+ def quote_table_name(name) #:nodoc:
+ quote_column_name(name).gsub('.', '`.`')
+ end
+
+ def quoted_true
+ "1"
+ end
+
+ def quoted_false
+ "0"
+ end
+
+ def begin_db_transaction #:nodoc:
+ @connection.begin
+ rescue Exception
+ # Transactions aren't supported
+ end
+
+ def commit_db_transaction #:nodoc:
+ @connection.commit
+ rescue Exception
+ # Transactions aren't supported
+ end
+
+ def rollback_db_transaction #:nodoc:
+ @connection.rollback
+ rescue Exception
+ # Transactions aren't supported
+ end
+
+ def disable_referential_integrity(&block) #:nodoc:
+ old = select_value("SELECT @@FOREIGN_KEY_CHECKS")
+ begin
+ update("SET FOREIGN_KEY_CHECKS = 0")
+ yield
+ ensure
+ update("SET FOREIGN_KEY_CHECKS = #{old}")
+ end
+ end
+
+ # SCHEMA STATEMENTS ========================================
+
+ def structure_dump #:nodoc:
+ if supports_views?
+ sql = "SHOW FULL TABLES WHERE Table_type = 'BASE TABLE'"
+ else
+ sql = "SHOW TABLES"
+ end
+
+ select_all(sql).inject("") do |structure, table|
+ table.delete('Table_type')
+
+ hash = select_one("SHOW CREATE TABLE #{quote_table_name(table.to_a.first.last)}")
+
+ if(table = hash["Create Table"])
+ structure += table + ";\n\n"
+ elsif(view = hash["Create View"])
+ structure += view + ";\n\n"
+ end
+ end
+ end
+
+ def recreate_database(name) #:nodoc:
+ drop_database(name)
+ create_database(name)
+ end
+
+ def create_database(name, options = {}) #:nodoc:
+ if options[:collation]
+ execute "CREATE DATABASE `#{name}` DEFAULT CHARACTER SET `#{options[:charset] || 'utf8'}` COLLATE `#{options[:collation]}`"
+ else
+ execute "CREATE DATABASE `#{name}` DEFAULT CHARACTER SET `#{options[:charset] || 'utf8'}`"
+ end
+ end
+
+ def drop_database(name) #:nodoc:
+ execute "DROP DATABASE IF EXISTS `#{name}`"
+ end
+
+ def current_database
+ select_one("SELECT DATABASE() as db")["db"]
+ end
+
+ def create_table(name, options = {}) #:nodoc:
+ super(name, {:options => "ENGINE=InnoDB CHARACTER SET utf8 COLLATE utf8_bin"}.merge(options))
+ end
+
+ def rename_table(name, new_name)
+ execute "RENAME TABLE #{quote_table_name(name)} TO #{quote_table_name(new_name)}"
+ end
+
+ def change_column_default(table_name, column_name, default) #:nodoc:
+ current_type = select_one("SHOW COLUMNS FROM #{quote_table_name(table_name)} LIKE '#{column_name}'")["Type"]
+
+ execute("ALTER TABLE #{quote_table_name(table_name)} CHANGE #{quote_column_name(column_name)} #{quote_column_name(column_name)} #{current_type} DEFAULT #{quote(default)}")
+ end
+
+ def change_column(table_name, column_name, type, options = {}) #:nodoc:
+ unless options_include_default?(options)
+ if column = columns(table_name).find { |c| c.name == column_name.to_s }
+ options[:default] = column.default
+ else
+ raise "No such column: #{table_name}.#{column_name}"
+ end
+ end
+
+ change_column_sql = "ALTER TABLE #{quote_table_name(table_name)} CHANGE #{quote_column_name(column_name)} #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
+ add_column_options!(change_column_sql, options)
+ execute(change_column_sql)
+ end
+
+ def rename_column(table_name, column_name, new_column_name) #:nodoc:
+ cols = select_one("SHOW COLUMNS FROM #{quote_table_name(table_name)} LIKE '#{column_name}'")
+ current_type = cols["Type"] || cols["COLUMN_TYPE"]
+ execute "ALTER TABLE #{quote_table_name(table_name)} CHANGE #{quote_table_name(column_name)} #{quote_column_name(new_column_name)} #{current_type}"
+ end
+
+ def add_limit_offset!(sql, options) #:nodoc:
+ if limit = options[:limit]
+ unless offset = options[:offset]
+ sql << " LIMIT #{limit}"
+ else
+ sql << " LIMIT #{offset}, #{limit}"
+ end
+ end
+ end
+
+ def show_variable(var)
+ res = execute("show variables like '#{var}'")
+ row = res.detect {|row| row["Variable_name"] == var }
+ row && row["Value"]
+ end
+
+ def charset
+ show_variable("character_set_database")
+ end
+
+ def collation
+ show_variable("collation_database")
+ end
+
+ private
+ def supports_views?
+ false
+ end
+ end
+end
--- /dev/null
+module ::ActiveRecord
+ class Base
+ def after_save_with_oracle_lob() #:nodoc:
+ if connection.is_a?(JdbcSpec::Oracle)
+ self.class.columns.select { |c| c.sql_type =~ /LOB\(|LOB$/i }.each { |c|
+ value = self[c.name]
+ value = value.to_yaml if unserializable_attribute?(c.name, c)
+ next if value.nil? || (value == '')
+
+ connection.write_large_object(c.type == :binary, c.name, self.class.table_name, self.class.primary_key, quote_value(id), value)
+ }
+ end
+ end
+ end
+end
+
+module ::JdbcSpec
+ module ActiveRecordExtensions
+ def oracle_connection(config)
+ config[:port] ||= 1521
+ config[:url] ||= "jdbc:oracle:thin:@#{config[:host]}:#{config[:port]}:#{config[:database]}"
+ config[:driver] ||= "oracle.jdbc.driver.OracleDriver"
+ jdbc_connection(config)
+ end
+ end
+
+ module Oracle
+ def self.extended(mod)
+ ActiveRecord::Base.after_save :after_save_with_oracle_lob unless @lob_callback_added
+ @lob_callback_added = true
+ end
+
+ def self.column_selector
+ [/oracle/i, lambda {|cfg,col| col.extend(::JdbcSpec::Oracle::Column)}]
+ end
+
+ def self.adapter_selector
+ [/oracle/i, lambda {|cfg,adapt| adapt.extend(::JdbcSpec::Oracle)
+=begin
+ (adapt.methods - %w(send __send__ id class methods is_a? kind_of? verify! active?)).each do |name|
+ new_name = "__#{name}"
+ (class << adapt; self; end).send :alias_method, new_name, name
+ (class << adapt; self; end).send :define_method, name do |*args|
+ puts "#{name}(#{args.inspect})"
+ adapt.send new_name, *args
+ end
+ end
+=end
+ }]
+ end
+
+ module Column
+ def type_cast(value)
+ return nil if value.nil?
+ case type
+ when :string then value
+ when :integer then defined?(value.to_i) ? value.to_i : (value ? 1 : 0)
+ when :primary_key then defined?(value.to_i) ? value.to_i : (value ? 1 : 0)
+ when :float then value.to_f
+ when :datetime then JdbcSpec::Oracle::Column.cast_to_date_or_time(value)
+ when :time then JdbcSpec::Oracle::Column.cast_to_time(value)
+ when :decimal then self.class.value_to_decimal(value)
+ when :boolean then self.class.value_to_boolean(value)
+ else value
+ end
+ end
+
+ def type_cast_code(var_name)
+ case type
+ when :string then nil
+ when :integer then "(#{var_name}.to_i rescue #{var_name} ? 1 : 0)"
+ when :primary_key then "(#{var_name}.to_i rescue #{var_name} ? 1 : 0)"
+ when :float then "#{var_name}.to_f"
+ when :datetime then "JdbcSpec::Oracle::Column.cast_to_date_or_time(#{var_name})"
+ when :time then "JdbcSpec::Oracle::Column.cast_to_time(#{var_name})"
+ when :decimal then "#{self.class.name}.value_to_decimal(#{var_name})"
+ when :boolean then "#{self.class.name}.value_to_boolean(#{var_name})"
+ else nil
+ end
+ end
+
+ private
+ def simplified_type(field_type)
+ case field_type
+ when /^number\(1\)$/i : :boolean
+ when /char/i : :string
+ when /float|double/i : :float
+ when /int/i : :integer
+ when /num|dec|real/i : @scale == 0 ? :integer : :decimal
+ when /date|time/i : :datetime
+ when /clob/i : :text
+ when /blob/i : :binary
+ end
+ end
+
+ def self.cast_to_date_or_time(value)
+ return value if value.is_a? Date
+ return nil if value.blank?
+ guess_date_or_time((value.is_a? Time) ? value : cast_to_time(value))
+ end
+
+ def self.cast_to_time(value)
+ return value if value.is_a? Time
+ time_array = ParseDate.parsedate value
+ time_array[0] ||= 2000; time_array[1] ||= 1; time_array[2] ||= 1;
+ Time.send(ActiveRecord::Base.default_timezone, *time_array) rescue nil
+ end
+
+ def self.guess_date_or_time(value)
+ (value.hour == 0 and value.min == 0 and value.sec == 0) ?
+ Date.new(value.year, value.month, value.day) : value
+ end
+ end
+
+ def table_alias_length
+ 30
+ end
+
+ def default_sequence_name(table, column) #:nodoc:
+ "#{table}_seq"
+ end
+
+ def create_table(name, options = {}) #:nodoc:
+ super(name, options)
+ seq_name = options[:sequence_name] || "#{name}_seq"
+ raise ActiveRecord::StatementInvalid.new("name #{seq_name} too long") if seq_name.length > table_alias_length
+ execute "CREATE SEQUENCE #{seq_name} START WITH 10000" unless options[:id] == false
+ end
+
+ def rename_table(name, new_name) #:nodoc:
+ execute "RENAME #{name} TO #{new_name}"
+ execute "RENAME #{name}_seq TO #{new_name}_seq" rescue nil
+ end
+
+ def drop_table(name, options = {}) #:nodoc:
+ super(name)
+ seq_name = options[:sequence_name] || "#{name}_seq"
+ execute "DROP SEQUENCE #{seq_name}" rescue nil
+ end
+
+ def recreate_database(name)
+ tables.each{ |table| drop_table(table) }
+ end
+
+ def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:
+ if pk.nil? # Who called us? What does the sql look like? No idea!
+ execute sql, name
+ elsif id_value # Pre-assigned id
+ execute sql, name
+ else # Assume the sql contains a bind-variable for the id
+ id_value = select_one("select #{sequence_name}.nextval id from dual")['id'].to_i
+ log(sql, name) {
+ @connection.execute_id_insert(sql,id_value)
+ }
+ end
+ id_value
+ end
+
+ def indexes(table, name = nil)
+ @connection.indexes(table, name, @connection.connection.meta_data.user_name)
+ end
+
+ def _execute(sql, name = nil)
+ case sql.strip
+ when /\A\(?\s*(select|show)/i:
+ @connection.execute_query(sql)
+ else
+ @connection.execute_update(sql)
+ end
+ end
+
+ def modify_types(tp)
+ tp[:primary_key] = "NUMBER(38) NOT NULL PRIMARY KEY"
+ tp[:integer] = { :name => "NUMBER", :limit => 38 }
+ tp[:datetime] = { :name => "DATE" }
+ tp[:timestamp] = { :name => "DATE" }
+ tp[:time] = { :name => "DATE" }
+ tp[:date] = { :name => "DATE" }
+ tp
+ end
+
+ def add_limit_offset!(sql, options) #:nodoc:
+ offset = options[:offset] || 0
+
+ if limit = options[:limit]
+ sql.replace "select * from (select raw_sql_.*, rownum raw_rnum_ from (#{sql}) raw_sql_ where rownum <= #{offset+limit}) where raw_rnum_ > #{offset}"
+ elsif offset > 0
+ sql.replace "select * from (select raw_sql_.*, rownum raw_rnum_ from (#{sql}) raw_sql_) where raw_rnum_ > #{offset}"
+ end
+ end
+
+ def current_database #:nodoc:
+ select_one("select sys_context('userenv','db_name') db from dual")["db"]
+ end
+
+ def remove_index(table_name, options = {}) #:nodoc:
+ execute "DROP INDEX #{index_name(table_name, options)}"
+ end
+
+ def change_column_default(table_name, column_name, default) #:nodoc:
+ execute "ALTER TABLE #{table_name} MODIFY #{column_name} DEFAULT #{quote(default)}"
+ end
+
+ def add_column_options!(sql, options) #:nodoc:
+ # handle case of defaults for CLOB columns, which would otherwise get "quoted" incorrectly
+ if options_include_default?(options) && (column = options[:column]) && column.type == :text
+ sql << " DEFAULT #{quote(options.delete(:default))}"
+ end
+ super
+ end
+
+ def change_column(table_name, column_name, type, options = {}) #:nodoc:
+ change_column_sql = "ALTER TABLE #{table_name} MODIFY #{column_name} #{type_to_sql(type, options[:limit])}"
+ add_column_options!(change_column_sql, options)
+ execute(change_column_sql)
+ end
+
+ def rename_column(table_name, column_name, new_column_name) #:nodoc:
+ execute "ALTER TABLE #{table_name} RENAME COLUMN #{column_name} to #{new_column_name}"
+ end
+
+ def remove_column(table_name, column_name) #:nodoc:
+ execute "ALTER TABLE #{table_name} DROP COLUMN #{column_name}"
+ end
+
+ def structure_dump #:nodoc:
+ s = select_all("select sequence_name from user_sequences").inject("") do |structure, seq|
+ structure << "create sequence #{seq.to_a.first.last};\n\n"
+ end
+
+ select_all("select table_name from user_tables").inject(s) do |structure, table|
+ ddl = "create table #{table.to_a.first.last} (\n "
+ cols = select_all(%Q{
+ select column_name, data_type, data_length, data_precision, data_scale, data_default, nullable
+ from user_tab_columns
+ where table_name = '#{table.to_a.first.last}'
+ order by column_id
+ }).map do |row|
+ row = row.inject({}) do |h,args|
+ h[args[0].downcase] = args[1]
+ h
+ end
+ col = "#{row['column_name'].downcase} #{row['data_type'].downcase}"
+ if row['data_type'] =='NUMBER' and !row['data_precision'].nil?
+ col << "(#{row['data_precision'].to_i}"
+ col << ",#{row['data_scale'].to_i}" if !row['data_scale'].nil?
+ col << ')'
+ elsif row['data_type'].include?('CHAR')
+ col << "(#{row['data_length'].to_i})"
+ end
+ col << " default #{row['data_default']}" if !row['data_default'].nil?
+ col << ' not null' if row['nullable'] == 'N'
+ col
+ end
+ ddl << cols.join(",\n ")
+ ddl << ");\n\n"
+ structure << ddl
+ end
+ end
+
+ def structure_drop #:nodoc:
+ s = select_all("select sequence_name from user_sequences").inject("") do |drop, seq|
+ drop << "drop sequence #{seq.to_a.first.last};\n\n"
+ end
+
+ select_all("select table_name from user_tables").inject(s) do |drop, table|
+ drop << "drop table #{table.to_a.first.last} cascade constraints;\n\n"
+ end
+ end
+
+ # SELECT DISTINCT clause for a given set of columns and a given ORDER BY clause.
+ #
+ # Oracle requires the ORDER BY columns to be in the SELECT list for DISTINCT
+ # queries. However, with those columns included in the SELECT DISTINCT list, you
+ # won't actually get a distinct list of the column you want (presuming the column
+ # has duplicates with multiple values for the ordered-by columns. So we use the
+ # FIRST_VALUE function to get a single (first) value for each column, effectively
+ # making every row the same.
+ #
+ # distinct("posts.id", "posts.created_at desc")
+ def distinct(columns, order_by)
+ return "DISTINCT #{columns}" if order_by.blank?
+
+ # construct a valid DISTINCT clause, ie. one that includes the ORDER BY columns, using
+ # FIRST_VALUE such that the inclusion of these columns doesn't invalidate the DISTINCT
+ order_columns = order_by.split(',').map { |s| s.strip }.reject(&:blank?)
+ order_columns = order_columns.zip((0...order_columns.size).to_a).map do |c, i|
+ "FIRST_VALUE(#{c.split.first}) OVER (PARTITION BY #{columns} ORDER BY #{c}) AS alias_#{i}__"
+ end
+ sql = "DISTINCT #{columns}, "
+ sql << order_columns * ", "
+ end
+
+ # ORDER BY clause for the passed order option.
+ #
+ # Uses column aliases as defined by #distinct.
+ def add_order_by_for_association_limiting!(sql, options)
+ return sql if options[:order].blank?
+
+ order = options[:order].split(',').collect { |s| s.strip }.reject(&:blank?)
+ order.map! {|s| $1 if s =~ / (.*)/}
+ order = order.zip((0...order.size).to_a).map { |s,i| "alias_#{i}__ #{s}" }.join(', ')
+
+ sql << "ORDER BY #{order}"
+ end
+
+
+ # QUOTING ==================================================
+ #
+ # see: abstract/quoting.rb
+
+ # camelCase column names need to be quoted; not that anyone using Oracle
+ # would really do this, but handling this case means we pass the test...
+ def quote_column_name(name) #:nodoc:
+ name.to_s =~ /[A-Z]/ ? "\"#{name}\"" : name.to_s
+ end
+
+ def quote_string(string) #:nodoc:
+ string.gsub(/'/, "''")
+ end
+
+ def quote(value, column = nil) #:nodoc:
+ return value.quoted_id if value.respond_to?(:quoted_id)
+
+ if column && [:text, :binary].include?(column.type)
+ if /(.*?)\([0-9]+\)/ =~ column.sql_type
+ %Q{empty_#{ $1.downcase }()}
+ else
+ %Q{empty_#{ column.sql_type.downcase rescue 'blob' }()}
+ end
+ else
+ if column && column.type == :primary_key
+ return value.to_s
+ end
+ case value
+ when String, ActiveSupport::Multibyte::Chars
+ if column.type == :datetime
+ %Q{TIMESTAMP'#{value}'}
+ else
+ %Q{'#{quote_string(value)}'}
+ end
+ when NilClass : 'null'
+ when TrueClass : '1'
+ when FalseClass : '0'
+ when Numeric : value.to_s
+ when Date, Time : %Q{TIMESTAMP'#{value.strftime("%Y-%m-%d %H:%M:%S")}'}
+ else %Q{'#{quote_string(value.to_yaml)}'}
+ end
+ end
+ end
+
+ def quoted_true #:nodoc:
+ '1'
+ end
+
+ def quoted_false #:nodoc:
+ '0'
+ end
+
+ private
+ def select(sql, name=nil)
+ records = execute(sql,name)
+ records.each do |col|
+ col.delete('raw_rnum_')
+ end
+ records
+ end
+ end
+end
--- /dev/null
+module ::JdbcSpec
+ # Don't need to load native postgres adapter
+ $LOADED_FEATURES << "active_record/connection_adapters/postgresql_adapter.rb"
+
+ module ActiveRecordExtensions
+ def postgresql_connection(config)
+ config[:host] ||= "localhost"
+ config[:port] ||= 5432
+ config[:url] ||= "jdbc:postgresql://#{config[:host]}:#{config[:port]}/#{config[:database]}"
+ config[:url] << config[:pg_params] if config[:pg_params]
+ config[:driver] ||= "org.postgresql.Driver"
+ jdbc_connection(config)
+ end
+ end
+
+ module PostgreSQL
+ def self.column_selector
+ [/postgre/i, lambda {|cfg,col| col.extend(::JdbcSpec::PostgreSQL::Column)}]
+ end
+
+ def self.adapter_selector
+ [/postgre/i, lambda {|cfg,adapt| adapt.extend(::JdbcSpec::PostgreSQL)}]
+ end
+
+ module Column
+ def type_cast(value)
+ case type
+ when :boolean then cast_to_boolean(value)
+ else super
+ end
+ end
+
+ def simplified_type(field_type)
+ return :integer if field_type =~ /^serial/i
+ return :string if field_type =~ /\[\]$/i || field_type =~ /^interval/i
+ return :string if field_type =~ /^(?:point|lseg|box|"?path"?|polygon|circle)/i
+ return :datetime if field_type =~ /^timestamp/i
+ return :float if field_type =~ /^real|^money/i
+ return :binary if field_type =~ /^bytea/i
+ return :boolean if field_type =~ /^bool/i
+ super
+ end
+
+ def cast_to_boolean(value)
+ if value == true || value == false
+ value
+ else
+ %w(true t 1).include?(value.to_s.downcase)
+ end
+ end
+
+ def cast_to_date_or_time(value)
+ return value if value.is_a? Date
+ return nil if value.blank?
+ guess_date_or_time((value.is_a? Time) ? value : cast_to_time(value))
+ end
+
+ def cast_to_time(value)
+ return value if value.is_a? Time
+ time_array = ParseDate.parsedate value
+ time_array[0] ||= 2000; time_array[1] ||= 1; time_array[2] ||= 1;
+ Time.send(ActiveRecord::Base.default_timezone, *time_array) rescue nil
+ end
+
+ def guess_date_or_time(value)
+ (value.hour == 0 and value.min == 0 and value.sec == 0) ?
+ Date.new(value.year, value.month, value.day) : value
+ end
+
+ def default_value(value)
+ # Boolean types
+ return "t" if value =~ /true/i
+ return "f" if value =~ /false/i
+
+ # Char/String/Bytea type values
+ return $1 if value =~ /^'(.*)'::(bpchar|text|character varying|bytea)$/
+
+ # Numeric values
+ return value if value =~ /^-?[0-9]+(\.[0-9]*)?/
+
+ # Fixed dates / timestamp
+ return $1 if value =~ /^'(.+)'::(date|timestamp)/
+
+ # Anything else is blank, some user type, or some function
+ # and we can't know the value of that, so return nil.
+ return nil
+ end
+ end
+
+ def modify_types(tp)
+ tp[:primary_key] = "serial primary key"
+ tp[:string][:limit] = 255
+ tp[:integer][:limit] = nil
+ tp[:boolean][:limit] = nil
+ tp
+ end
+
+ def default_sequence_name(table_name, pk = nil)
+ default_pk, default_seq = pk_and_sequence_for(table_name)
+ default_seq || "#{table_name}_#{pk || default_pk || 'id'}_seq"
+ end
+
+ # Resets sequence to the max value of the table's pk if present.
+ def reset_pk_sequence!(table, pk = nil, sequence = nil)
+ unless pk and sequence
+ default_pk, default_sequence = pk_and_sequence_for(table)
+ pk ||= default_pk
+ sequence ||= default_sequence
+ end
+ if pk
+ if sequence
+ select_value <<-end_sql, 'Reset sequence'
+ SELECT setval('#{sequence}', (SELECT COALESCE(MAX(#{pk})+(SELECT increment_by FROM #{sequence}), (SELECT min_value FROM #{sequence})) FROM #{table}), false)
+ end_sql
+ else
+ @logger.warn "#{table} has primary key #{pk} with no default sequence" if @logger
+ end
+ end
+ end
+
+ # Find a table's primary key and sequence.
+ def pk_and_sequence_for(table)
+ # First try looking for a sequence with a dependency on the
+ # given table's primary key.
+ result = select(<<-end_sql, 'PK and serial sequence')[0]
+ SELECT attr.attname AS nm, name.nspname AS nsp, seq.relname AS rel
+ FROM pg_class seq,
+ pg_attribute attr,
+ pg_depend dep,
+ pg_namespace name,
+ pg_constraint cons
+ WHERE seq.oid = dep.objid
+ AND seq.relnamespace = name.oid
+ AND seq.relkind = 'S'
+ AND attr.attrelid = dep.refobjid
+ AND attr.attnum = dep.refobjsubid
+ AND attr.attrelid = cons.conrelid
+ AND attr.attnum = cons.conkey[1]
+ AND cons.contype = 'p'
+ AND dep.refobjid = '#{table}'::regclass
+ end_sql
+
+ if result.nil? or result.empty?
+ # If that fails, try parsing the primary key's default value.
+ # Support the 7.x and 8.0 nextval('foo'::text) as well as
+ # the 8.1+ nextval('foo'::regclass).
+ # TODO: assumes sequence is in same schema as table.
+ result = select(<<-end_sql, 'PK and custom sequence')[0]
+ SELECT attr.attname AS nm, name.nspname AS nsp, split_part(def.adsrc, '\\\'', 2) AS rel
+ FROM pg_class t
+ JOIN pg_namespace name ON (t.relnamespace = name.oid)
+ JOIN pg_attribute attr ON (t.oid = attrelid)
+ JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)
+ JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])
+ WHERE t.oid = '#{table}'::regclass
+ AND cons.contype = 'p'
+ AND def.adsrc ~* 'nextval'
+ end_sql
+ end
+ # check for existence of . in sequence name as in public.foo_sequence. if it does not exist, join the current namespace
+ result['rel']['.'] ? [result['nm'], result['rel']] : [result['nm'], "#{result['nsp']}.#{result['rel']}"]
+ rescue
+ nil
+ end
+
+ def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:
+ execute(sql, name)
+ table = sql.split(" ", 4)[2]
+ id_value || pk && last_insert_id(table, sequence_name || default_sequence_name(table, pk))
+ end
+
+ def columns(table_name, name=nil)
+ schema_name = "public"
+ if table_name =~ /\./
+ parts = table_name.split(/\./)
+ table_name = parts.pop
+ schema_name = parts.join(".")
+ end
+ @connection.columns_internal(table_name, name, schema_name)
+ end
+
+ # From postgresql_adapter.rb
+ def indexes(table_name, name = nil)
+ result = select_rows(<<-SQL, name)
+ SELECT i.relname, d.indisunique, a.attname
+ FROM pg_class t, pg_class i, pg_index d, pg_attribute a
+ WHERE i.relkind = 'i'
+ AND d.indexrelid = i.oid
+ AND d.indisprimary = 'f'
+ AND t.oid = d.indrelid
+ AND t.relname = '#{table_name}'
+ AND a.attrelid = t.oid
+ AND ( d.indkey[0]=a.attnum OR d.indkey[1]=a.attnum
+ OR d.indkey[2]=a.attnum OR d.indkey[3]=a.attnum
+ OR d.indkey[4]=a.attnum OR d.indkey[5]=a.attnum
+ OR d.indkey[6]=a.attnum OR d.indkey[7]=a.attnum
+ OR d.indkey[8]=a.attnum OR d.indkey[9]=a.attnum )
+ ORDER BY i.relname
+ SQL
+
+ current_index = nil
+ indexes = []
+
+ result.each do |row|
+ if current_index != row[0]
+ indexes << ::ActiveRecord::ConnectionAdapters::IndexDefinition.new(table_name, row[0], row[1] == "t", [])
+ current_index = row[0]
+ end
+
+ indexes.last.columns << row[2]
+ end
+
+ indexes
+ end
+
+ def last_insert_id(table, sequence_name)
+ Integer(select_value("SELECT currval('#{sequence_name}')"))
+ end
+
+ def recreate_database(name)
+ drop_database(name)
+ create_database(name)
+ end
+
+ def create_database(name, options = {})
+ execute "CREATE DATABASE \"#{name}\" ENCODING='#{options[:encoding] || 'utf8'}'"
+ end
+
+ def drop_database(name)
+ execute "DROP DATABASE \"#{name}\""
+ end
+
+ def structure_dump
+ database = @config[:database]
+ if database.nil?
+ if @config[:url] =~ /\/([^\/]*)$/
+ database = $1
+ else
+ raise "Could not figure out what database this url is for #{@config["url"]}"
+ end
+ end
+
+ ENV['PGHOST'] = @config[:host] if @config[:host]
+ ENV['PGPORT'] = @config[:port].to_s if @config[:port]
+ ENV['PGPASSWORD'] = @config[:password].to_s if @config[:password]
+ search_path = @config[:schema_search_path]
+ search_path = "--schema=#{search_path}" if search_path
+
+ @connection.connection.close
+ begin
+ file = "db/#{RAILS_ENV}_structure.sql"
+ `pg_dump -i -U "#{@config[:username]}" -s -x -O -f #{file} #{search_path} #{database}`
+ raise "Error dumping database" if $?.exitstatus == 1
+
+ # need to patch away any references to SQL_ASCII as it breaks the JDBC driver
+ lines = File.readlines(file)
+ File.open(file, "w") do |io|
+ lines.each do |line|
+ line.gsub!(/SQL_ASCII/, 'UNICODE')
+ io.write(line)
+ end
+ end
+ ensure
+ reconnect!
+ end
+ end
+
+ def _execute(sql, name = nil)
+ case sql.strip
+ when /\A\(?\s*(select|show)/i:
+ @connection.execute_query(sql)
+ else
+ @connection.execute_update(sql)
+ end
+ end
+
+ # SELECT DISTINCT clause for a given set of columns and a given ORDER BY clause.
+ #
+ # PostgreSQL requires the ORDER BY columns in the select list for distinct queries, and
+ # requires that the ORDER BY include the distinct column.
+ #
+ # distinct("posts.id", "posts.created_at desc")
+ def distinct(columns, order_by)
+ return "DISTINCT #{columns}" if order_by.blank?
+
+ # construct a clean list of column names from the ORDER BY clause, removing
+ # any asc/desc modifiers
+ order_columns = order_by.split(',').collect { |s| s.split.first }
+ order_columns.delete_if(&:blank?)
+ order_columns = order_columns.zip((0...order_columns.size).to_a).map { |s,i| "#{s} AS alias_#{i}" }
+
+ # return a DISTINCT ON() clause that's distinct on the columns we want but includes
+ # all the required columns for the ORDER BY to work properly
+ sql = "DISTINCT ON (#{columns}) #{columns}, "
+ sql << order_columns * ', '
+ end
+
+ # ORDER BY clause for the passed order option.
+ #
+ # PostgreSQL does not allow arbitrary ordering when using DISTINCT ON, so we work around this
+ # by wrapping the sql as a sub-select and ordering in that query.
+ def add_order_by_for_association_limiting!(sql, options)
+ return sql if options[:order].blank?
+
+ order = options[:order].split(',').collect { |s| s.strip }.reject(&:blank?)
+ order.map! { |s| 'DESC' if s =~ /\bdesc$/i }
+ order = order.zip((0...order.size).to_a).map { |s,i| "id_list.alias_#{i} #{s}" }.join(', ')
+
+ sql.replace "SELECT * FROM (#{sql}) AS id_list ORDER BY #{order}"
+ end
+
+ def quote(value, column = nil)
+ return value.quoted_id if value.respond_to?(:quoted_id)
+
+ if value.kind_of?(String) && column && column.type == :binary
+ "'#{escape_bytea(value)}'"
+ elsif column && column.type == :primary_key
+ return value.to_s
+ else
+ super
+ end
+ end
+
+ def escape_bytea(s)
+ if s
+ result = ''
+ s.each_byte { |c| result << sprintf('\\\\%03o', c) }
+ result
+ end
+ end
+
+ def quote_column_name(name)
+ %("#{name}")
+ end
+
+ def quoted_date(value)
+ value.strftime("%Y-%m-%d %H:%M:%S")
+ end
+
+ def disable_referential_integrity(&block) #:nodoc:
+ execute(tables.collect { |name| "ALTER TABLE #{quote_table_name(name)} DISABLE TRIGGER ALL" }.join(";"))
+ yield
+ ensure
+ execute(tables.collect { |name| "ALTER TABLE #{quote_table_name(name)} ENABLE TRIGGER ALL" }.join(";"))
+ end
+
+ def rename_table(name, new_name)
+ execute "ALTER TABLE #{name} RENAME TO #{new_name}"
+ end
+
+ def add_column(table_name, column_name, type, options = {})
+ execute("ALTER TABLE #{table_name} ADD #{column_name} #{type_to_sql(type, options[:limit])}")
+ change_column_default(table_name, column_name, options[:default]) unless options[:default].nil?
+ if options[:null] == false
+ execute("UPDATE #{table_name} SET #{column_name} = '#{options[:default]}'") if options[:default]
+ execute("ALTER TABLE #{table_name} ALTER #{column_name} SET NOT NULL")
+ end
+ end
+
+ def change_column(table_name, column_name, type, options = {}) #:nodoc:
+ begin
+ execute "ALTER TABLE #{table_name} ALTER #{column_name} TYPE #{type_to_sql(type, options[:limit])}"
+ rescue ActiveRecord::StatementInvalid
+ # This is PG7, so we use a more arcane way of doing it.
+ begin_db_transaction
+ add_column(table_name, "#{column_name}_ar_tmp", type, options)
+ execute "UPDATE #{table_name} SET #{column_name}_ar_tmp = CAST(#{column_name} AS #{type_to_sql(type, options[:limit])})"
+ remove_column(table_name, column_name)
+ rename_column(table_name, "#{column_name}_ar_tmp", column_name)
+ commit_db_transaction
+ end
+ change_column_default(table_name, column_name, options[:default]) unless options[:default].nil?
+ end
+
+ def change_column_default(table_name, column_name, default) #:nodoc:
+ execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} SET DEFAULT '#{default}'"
+ end
+
+ def rename_column(table_name, column_name, new_column_name) #:nodoc:
+ execute "ALTER TABLE #{table_name} RENAME COLUMN #{column_name} TO #{new_column_name}"
+ end
+
+ def remove_index(table_name, options) #:nodoc:
+ execute "DROP INDEX #{index_name(table_name, options)}"
+ end
+
+ def type_to_sql(type, limit = nil, precision = nil, scale = nil) #:nodoc:
+ return super unless type.to_s == 'integer'
+
+ if limit.nil? || limit == 4
+ 'integer'
+ elsif limit < 4
+ 'smallint'
+ else
+ 'bigint'
+ end
+ end
+
+ def tables
+ @connection.tables(database_name, nil, nil, ["TABLE"])
+ end
+ end
+end
--- /dev/null
+module ::JdbcSpec
+ module ActiveRecordExtensions
+ def sqlite3_connection(config)
+ config[:url] ||= "jdbc:sqlite:#{config[:database]}"
+ config[:driver] ||= "org.sqlite.JDBC"
+ jdbc_connection(config)
+ end
+ end
+
+ module SQLite3
+ def self.column_selector
+ [/sqlite/i, lambda {|cfg,col| col.extend(::JdbcSpec::SQLite3::Column)}]
+ end
+
+ def self.adapter_selector
+ [/sqlite/i, lambda {|cfg,adapt| adapt.extend(::JdbcSpec::SQLite3)}]
+ end
+
+ module Column
+
+ private
+ def simplified_type(field_type)
+ case field_type
+ when /^integer\(1\)$/i then :boolean
+ when /text/i then :string
+ when /int/i then :integer
+ when /real/i then @scale == 0 ? :integer : :decimal
+ when /date|time/i then :datetime
+ when /blob/i then :binary
+ end
+ end
+
+ def self.cast_to_date_or_time(value)
+ return value if value.is_a? Date
+ return nil if value.blank?
+ guess_date_or_time((value.is_a? Time) ? value : cast_to_time(value))
+ end
+
+ def self.cast_to_time(value)
+ return value if value.is_a? Time
+ Time.at(value) rescue nil
+ end
+
+ def self.guess_date_or_time(value)
+ (value.hour == 0 and value.min == 0 and value.sec == 0) ?
+ Date.new(value.year, value.month, value.day) : value
+ end
+ end
+
+ def type_cast(value)
+ return nil if value.nil?
+ case type
+ when :string then value
+ when :integer then defined?(value.to_i) ? value.to_i : (value ? 1 : 0)
+ when :primary_key then defined?(value.to_i) ? value.to_i : (value ? 1 : 0)
+ when :float then value.to_f
+ when :datetime then JdbcSpec::SQLite3::Column.cast_to_date_or_time(value)
+ when :time then JdbcSpec::SQLite3::Column.cast_to_time(value)
+ when :decimal then self.class.value_to_decimal(value)
+ when :boolean then self.class.value_to_boolean(value)
+ else value
+ end
+ end
+
+ def modify_types(tp)
+ tp[:primary_key] = "INTEGER PRIMARY KEY AUTOINCREMENT"
+ tp[:float] = { :name => "REAL" }
+ tp[:decimal] = { :name => "REAL" }
+ tp[:datetime] = { :name => "INTEGER" }
+ tp[:timestamp] = { :name => "INTEGER" }
+ tp[:time] = { :name => "INTEGER" }
+ tp[:date] = { :name => "INTEGER" }
+ tp[:boolean] = { :name => "INTEGER", :limit => 1}
+ tp
+ end
+
+ def quote(value, column = nil) # :nodoc:
+ return value.quoted_id if value.respond_to?(:quoted_id)
+
+ case value
+ when String
+ if column && column.type == :binary
+ "'#{quote_string(value).unpack("C*").collect {|v| v.to_s(16)}.join}'"
+ else
+ "'#{quote_string(value)}'"
+ end
+ else super
+ end
+ end
+
+ def quote_string(str)
+ str.gsub(/'/, "''")
+ end
+
+ def quoted_true
+ '1'
+ end
+
+ def quoted_false
+ '0'
+ end
+
+ def add_column(table_name, column_name, type, options = {})
+ if option_not_null = options[:null] == false
+ option_not_null = options.delete(:null)
+ end
+ add_column_sql = "ALTER TABLE #{quote_table_name(table_name)} ADD #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
+ add_column_options!(add_column_sql, options)
+ execute(add_column_sql)
+ if option_not_null
+ alter_column_sql = "ALTER TABLE #{quote_table_name(table_name)} ALTER #{quote_column_name(column_name)} NOT NULL"
+ end
+ end
+
+ def remove_column(table_name, column_name) #:nodoc:
+ cols = columns(table_name).collect {|col| col.name}
+ cols.delete(column_name)
+ cols = cols.join(', ')
+ table_backup = table_name + "_backup"
+
+ @connection.begin
+
+ execute "CREATE TEMPORARY TABLE #{table_backup}(#{cols})"
+ insert "INSERT INTO #{table_backup} SELECT #{cols} FROM #{table_name}"
+ execute "DROP TABLE #{table_name}"
+ execute "CREATE TABLE #{table_name}(#{cols})"
+ insert "INSERT INTO #{table_name} SELECT #{cols} FROM #{table_backup}"
+ execute "DROP TABLE #{table_backup}"
+
+ @connection.commit
+ end
+
+ def change_column(table_name, column_name, type, options = {}) #:nodoc:
+ execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} #{type_to_sql(type, options[:limit])}"
+ end
+
+ def change_column_default(table_name, column_name, default) #:nodoc:
+ execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} SET DEFAULT #{quote(default)}"
+ end
+
+ def rename_column(table_name, column_name, new_column_name) #:nodoc:
+ execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} RENAME TO #{new_column_name}"
+ end
+
+ def rename_table(name, new_name)
+ execute "ALTER TABLE #{name} RENAME TO #{new_name}"
+ end
+
+ def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:
+ log(sql,name) do
+ @connection.execute_update(sql)
+ end
+ table = sql.split(" ", 4)[2]
+ id_value || last_insert_id(table, nil)
+ end
+
+ def last_insert_id(table, sequence_name)
+ Integer(select_value("SELECT SEQ FROM SQLITE_SEQUENCE WHERE NAME = '#{table}'"))
+ end
+
+ def add_limit_offset!(sql, options) #:nodoc:
+ if options[:limit]
+ sql << " LIMIT #{options[:limit]}"
+ sql << " OFFSET #{options[:offset]}" if options[:offset]
+ end
+ end
+
+ def tables
+ @connection.tables.select {|row| row.to_s !~ /^sqlite_/i }
+ end
+
+ def remove_index(table_name, options = {})
+ execute "DROP INDEX #{quote_column_name(index_name(table_name, options))}"
+ end
+
+ def indexes(table_name, name = nil)
+ result = select_rows("SELECT name, sql FROM sqlite_master WHERE tbl_name = '#{table_name}' AND type = 'index'", name)
+
+ result.collect do |row|
+ name = row[0]
+ index_sql = row[1]
+ unique = (index_sql =~ /unique/i)
+ cols = index_sql.match(/\((.*)\)/)[1].gsub(/,/,' ').split
+ ::ActiveRecord::ConnectionAdapters::IndexDefinition.new(table_name, name, unique, cols)
+ end
+ end
+ end
+end
--- /dev/null
+module JdbcSpec
+ module Sybase
+ def self.adapter_selector
+ [/sybase/i, lambda{|cfg,adapt| adapt.extend(JdbcSpec::Sybase)}]
+ end
+
+ def add_limit_offset!(sql, options) # :nodoc:
+ @limit = options[:limit]
+ @offset = options[:offset]
+ if use_temp_table?
+ # Use temp table to hack offset with Sybase
+ sql.sub!(/ FROM /i, ' INTO #artemp FROM ')
+ elsif zero_limit?
+ # "SET ROWCOUNT 0" turns off limits, so we havesy
+ # to use a cheap trick.
+ if sql =~ /WHERE/i
+ sql.sub!(/WHERE/i, 'WHERE 1 = 2 AND ')
+ elsif sql =~ /ORDER\s+BY/i
+ sql.sub!(/ORDER\s+BY/i, 'WHERE 1 = 2 ORDER BY')
+ else
+ sql << 'WHERE 1 = 2'
+ end
+ end
+ end
+
+ # If limit is not set at all, we can ignore offset;
+ # if limit *is* set but offset is zero, use normal select
+ # with simple SET ROWCOUNT. Thus, only use the temp table
+ # if limit is set and offset > 0.
+ def use_temp_table?
+ !@limit.nil? && !@offset.nil? && @offset > 0
+ end
+
+ def zero_limit?
+ !@limit.nil? && @limit == 0
+ end
+
+ end
+end
--- /dev/null
+module JdbcSpec
+ module MissingFunctionalityHelper
+ #Taken from SQLite adapter
+
+ def alter_table(table_name, options = {}) #:nodoc:
+ table_name = table_name.to_s.downcase
+ altered_table_name = "altered_#{table_name}"
+ caller = lambda {|definition| yield definition if block_given?}
+
+ transaction do
+ move_table(table_name, altered_table_name, options)
+ move_table(altered_table_name, table_name, &caller)
+ end
+ end
+
+ def move_table(from, to, options = {}, &block) #:nodoc:
+ copy_table(from, to, options, &block)
+ drop_table(from)
+ end
+
+ def copy_table(from, to, options = {}) #:nodoc:
+ create_table(to, options) do |@definition|
+ columns(from).each do |column|
+ column_name = options[:rename] ?
+ (options[:rename][column.name] ||
+ options[:rename][column.name.to_sym] ||
+ column.name) : column.name
+ column_name = column_name.to_s
+ @definition.column(column_name, column.type,
+ :limit => column.limit, :default => column.default,
+ :null => column.null)
+ end
+ @definition.primary_key(primary_key(from))
+ yield @definition if block_given?
+ end
+
+ copy_table_indexes(from, to)
+ copy_table_contents(from, to,
+ @definition.columns,
+ options[:rename] || {})
+ end
+
+ def copy_table_indexes(from, to) #:nodoc:
+ indexes(from).each do |index|
+ name = index.name.downcase
+ if to == "altered_#{from}"
+ name = "temp_#{name}"
+ elsif from == "altered_#{to}"
+ name = name[5..-1]
+ end
+
+ # index name can't be the same
+ opts = { :name => name.gsub(/_(#{from})_/, "_#{to}_") }
+ opts[:unique] = true if index.unique
+ add_index(to, index.columns, opts)
+ end
+ end
+
+ def copy_table_contents(from, to, columns, rename = {}) #:nodoc:
+ column_mappings = Hash[*columns.map {|col| [col.name, col.name]}.flatten]
+ rename.inject(column_mappings) {|map, a| map[a.last] = a.first; map}
+ from_columns = columns(from).collect {|col| col.name}
+ columns = columns.find_all{|col| from_columns.include?(column_mappings[col.name])}
+ execute("SELECT * FROM #{from}").each do |row|
+ sql = "INSERT INTO #{to} ("+columns.map(&:name)*','+") VALUES ("
+ sql << columns.map {|col| quote(row[column_mappings[col.name]],col)} * ', '
+ sql << ')'
+ execute sql
+ end
+ end
+ end
+end
--- /dev/null
+if defined?(Rake.application) && Rake.application && ENV["SKIP_AR_JDBC_RAKE_REDEFINES"].nil?
+ jdbc_rakefile = File.dirname(__FILE__) + "/jdbc.rake"
+ if Rake.application.lookup("environment")
+ # rails tasks already defined; load the override tasks now
+ load jdbc_rakefile
+ else
+ # rails tasks not loaded yet; load as an import
+ Rake.application.add_import(jdbc_rakefile)
+ end
+end
--- /dev/null
+# Common methods for handling TSQL databases.
+module TSqlMethods
+
+ def modify_types(tp) #:nodoc:
+ tp[:primary_key] = "int NOT NULL IDENTITY(1, 1) PRIMARY KEY"
+ tp[:integer][:limit] = nil
+ tp[:boolean] = {:name => "bit"}
+ tp[:binary] = { :name => "image"}
+ tp
+ end
+
+ def type_to_sql(type, limit = nil, precision = nil, scale = nil) #:nodoc:
+ return super unless type.to_s == 'integer'
+
+ if limit.nil? || limit == 4
+ 'int'
+ elsif limit == 2
+ 'smallint'
+ elsif limit == 1
+ 'tinyint'
+ else
+ 'bigint'
+ end
+ end
+
+ def add_limit_offset!(sql, options)
+ if options[:limit] and options[:offset]
+ total_rows = select_all("SELECT count(*) as TotalRows from (#{sql.gsub(/\bSELECT(\s+DISTINCT)?\b/i, "SELECT\\1 TOP 1000000000")}) tally")[0]["TotalRows"].to_i
+ if (options[:limit] + options[:offset]) >= total_rows
+ options[:limit] = (total_rows - options[:offset] >= 0) ? (total_rows - options[:offset]) : 0
+ end
+ sql.sub!(/^\s*SELECT(\s+DISTINCT)?/i, "SELECT * FROM (SELECT TOP #{options[:limit]} * FROM (SELECT\\1 TOP #{options[:limit] + options[:offset]} ")
+ sql << ") AS tmp1"
+ if options[:order]
+ options[:order] = options[:order].split(',').map do |field|
+ parts = field.split(" ")
+ tc = parts[0]
+ if sql =~ /\.\[/ and tc =~ /\./ # if column quoting used in query
+ tc.gsub!(/\./, '\\.\\[')
+ tc << '\\]'
+ end
+ if sql =~ /#{tc} AS (t\d_r\d\d?)/
+ parts[0] = $1
+ elsif parts[0] =~ /\w+\.(\w+)/
+ parts[0] = $1
+ end
+ parts.join(' ')
+ end.join(', ')
+ sql << " ORDER BY #{change_order_direction(options[:order])}) AS tmp2 ORDER BY #{options[:order]}"
+ else
+ sql << " ) AS tmp2"
+ end
+ elsif sql !~ /^\s*SELECT (@@|COUNT\()/i
+ sql.sub!(/^\s*SELECT(\s+DISTINCT)?/i) do
+ "SELECT#{$1} TOP #{options[:limit]}"
+ end unless options[:limit].nil?
+ end
+ end
+end
--- /dev/null
+module JdbcAdapter
+ module Version
+ VERSION = "0.9.0.1"
+ end
+end
+++ /dev/null
---- !ruby/object:Gem::Specification
-name: activerecord-jdbc-adapter
-version: !ruby/object:Gem::Version
- prerelease: false
- segments:
- - 1
- - 1
- - 3
- version: 1.1.3
-platform: ruby
-authors:
- - Nick Sieger, Ola Bini and JRuby contributors
-autorequire:
-bindir: bin
-cert_chain: []
-
-date: 2011-07-26 00:00:00 +02:00
-default_executable:
-dependencies:
- - !ruby/object:Gem::Dependency
- name: rubyforge
- prerelease: false
- requirement: &id001 !ruby/object:Gem::Requirement
- requirements:
- - - ">="
- - !ruby/object:Gem::Version
- segments:
- - 2
- - 0
- - 4
- version: 2.0.4
- type: :development
- version_requirements: *id001
- - !ruby/object:Gem::Dependency
- name: hoe
- prerelease: false
- requirement: &id002 !ruby/object:Gem::Requirement
- requirements:
- - - ">="
- - !ruby/object:Gem::Version
- segments:
- - 2
- - 9
- - 4
- version: 2.9.4
- type: :development
- version_requirements: *id002
-description: |-
- activerecord-jdbc-adapter is a database adapter for Rails' ActiveRecord
- component that can be used with JRuby[http://www.jruby.org/]. It allows use of
- virtually any JDBC-compliant database with your JRuby on Rails application.
-email: nick@nicksieger.com, ola.bini@gmail.com
-executables: []
-
-extensions: []
-
-extra_rdoc_files:
- - History.txt
- - Manifest.txt
- - README.txt
- - LICENSE.txt
-files:
- - History.txt
- - Manifest.txt
- - README.txt
- - Rakefile
- - LICENSE.txt
- - lib/activerecord-jdbc-adapter.rb
- - lib/arjdbc.rb
- - lib/jdbc_adapter.rb
- - lib/pg.rb
- - lib/active_record/connection_adapters/derby_adapter.rb
- - lib/active_record/connection_adapters/h2_adapter.rb
- - lib/active_record/connection_adapters/hsqldb_adapter.rb
- - lib/active_record/connection_adapters/informix_adapter.rb
- - lib/active_record/connection_adapters/jdbc_adapter.rb
- - lib/active_record/connection_adapters/jndi_adapter.rb
- - lib/active_record/connection_adapters/mssql_adapter.rb
- - lib/active_record/connection_adapters/mysql2_adapter.rb
- - lib/active_record/connection_adapters/mysql_adapter.rb
- - lib/active_record/connection_adapters/oracle_adapter.rb
- - lib/active_record/connection_adapters/postgresql_adapter.rb
- - lib/active_record/connection_adapters/sqlite3_adapter.rb
- - lib/arel/engines/sql/compilers/db2_compiler.rb
- - lib/arel/engines/sql/compilers/derby_compiler.rb
- - lib/arel/engines/sql/compilers/h2_compiler.rb
- - lib/arel/engines/sql/compilers/hsqldb_compiler.rb
- - lib/arel/engines/sql/compilers/jdbc_compiler.rb
- - lib/arel/engines/sql/compilers/mssql_compiler.rb
- - lib/arel/visitors/compat.rb
- - lib/arel/visitors/db2.rb
- - lib/arel/visitors/derby.rb
- - lib/arel/visitors/firebird.rb
- - lib/arel/visitors/hsqldb.rb
- - lib/arel/visitors/sql_server.rb
- - lib/arjdbc/db2.rb
- - lib/arjdbc/derby.rb
- - lib/arjdbc/discover.rb
- - lib/arjdbc/firebird.rb
- - lib/arjdbc/h2.rb
- - lib/arjdbc/hsqldb.rb
- - lib/arjdbc/informix.rb
- - lib/arjdbc/jdbc.rb
- - lib/arjdbc/mimer.rb
- - lib/arjdbc/mssql.rb
- - lib/arjdbc/mysql.rb
- - lib/arjdbc/oracle.rb
- - lib/arjdbc/postgresql.rb
- - lib/arjdbc/sqlite3.rb
- - lib/arjdbc/sybase.rb
- - lib/arjdbc/version.rb
- - lib/arjdbc/db2/adapter.rb
- - lib/arjdbc/derby/adapter.rb
- - lib/arjdbc/derby/connection_methods.rb
- - lib/arjdbc/firebird/adapter.rb
- - lib/arjdbc/h2/adapter.rb
- - lib/arjdbc/h2/connection_methods.rb
- - lib/arjdbc/hsqldb/adapter.rb
- - lib/arjdbc/hsqldb/connection_methods.rb
- - lib/arjdbc/informix/adapter.rb
- - lib/arjdbc/informix/connection_methods.rb
- - lib/arjdbc/jdbc/adapter.rb
- - lib/arjdbc/jdbc/callbacks.rb
- - lib/arjdbc/jdbc/column.rb
- - lib/arjdbc/jdbc/compatibility.rb
- - lib/arjdbc/jdbc/connection.rb
- - lib/arjdbc/jdbc/connection_methods.rb
- - lib/arjdbc/jdbc/core_ext.rb
- - lib/arjdbc/jdbc/discover.rb
- - lib/arjdbc/jdbc/driver.rb
- - lib/arjdbc/jdbc/extension.rb
- - lib/arjdbc/jdbc/java.rb
- - lib/arjdbc/jdbc/missing_functionality_helper.rb
- - lib/arjdbc/jdbc/quoted_primary_key.rb
- - lib/arjdbc/jdbc/railtie.rb
- - lib/arjdbc/jdbc/rake_tasks.rb
- - lib/arjdbc/jdbc/require_driver.rb
- - lib/arjdbc/jdbc/type_converter.rb
- - lib/arjdbc/mimer/adapter.rb
- - lib/arjdbc/mssql/adapter.rb
- - lib/arjdbc/mssql/connection_methods.rb
- - lib/arjdbc/mssql/limit_helpers.rb
- - lib/arjdbc/mssql/tsql_helper.rb
- - lib/arjdbc/mysql/adapter.rb
- - lib/arjdbc/mysql/connection_methods.rb
- - lib/arjdbc/oracle/adapter.rb
- - lib/arjdbc/oracle/connection_methods.rb
- - lib/arjdbc/postgresql/adapter.rb
- - lib/arjdbc/postgresql/connection_methods.rb
- - lib/arjdbc/sqlite3/adapter.rb
- - lib/arjdbc/sqlite3/connection_methods.rb
- - lib/arjdbc/sybase/adapter.rb
- - lib/generators/jdbc/jdbc_generator.rb
- - lib/jdbc_adapter/rake_tasks.rb
- - lib/jdbc_adapter/version.rb
- - lib/arjdbc/jdbc/adapter_java.jar
- - test/abstract_db_create.rb
- - test/db2_simple_test.rb
- - test/derby_migration_test.rb
- - test/derby_multibyte_test.rb
- - test/derby_simple_test.rb
- - test/generic_jdbc_connection_test.rb
- - test/h2_simple_test.rb
- - test/has_many_through.rb
- - test/helper.rb
- - test/hsqldb_simple_test.rb
- - test/informix_simple_test.rb
- - test/jdbc_common.rb
- - test/jndi_callbacks_test.rb
- - test/jndi_test.rb
- - test/manualTestDatabase.rb
- - test/mssql_db_create_test.rb
- - test/mssql_identity_insert_test.rb
- - test/mssql_legacy_types_test.rb
- - test/mssql_limit_offset_test.rb
- - test/mssql_multibyte_test.rb
- - test/mssql_simple_test.rb
- - test/mysql_db_create_test.rb
- - test/mysql_info_test.rb
- - test/mysql_multibyte_test.rb
- - test/mysql_nonstandard_primary_key_test.rb
- - test/mysql_simple_test.rb
- - test/oracle_simple_test.rb
- - test/oracle_specific_test.rb
- - test/pick_rails_version.rb
- - test/postgres_db_create_test.rb
- - test/postgres_drop_db_test.rb
- - test/postgres_information_schema_leak_test.rb
- - test/postgres_mixed_case_test.rb
- - test/postgres_native_type_mapping_test.rb
- - test/postgres_nonseq_pkey_test.rb
- - test/postgres_reserved_test.rb
- - test/postgres_schema_search_path_test.rb
- - test/postgres_simple_test.rb
- - test/postgres_table_alias_length_test.rb
- - test/simple.rb
- - test/sqlite3_simple_test.rb
- - test/sybase_jtds_simple_test.rb
- - test/activerecord/connection_adapters/type_conversion_test.rb
- - test/activerecord/connections/native_jdbc_mysql/connection.rb
- - test/db/db2.rb
- - test/db/derby.rb
- - test/db/h2.rb
- - test/db/hsqldb.rb
- - test/db/informix.rb
- - test/db/jdbc.rb
- - test/db/jndi_config.rb
- - test/db/logger.rb
- - test/db/mssql.rb
- - test/db/mysql.rb
- - test/db/oracle.rb
- - test/db/postgres.rb
- - test/db/sqlite3.rb
- - test/models/add_not_null_column_to_table.rb
- - test/models/auto_id.rb
- - test/models/data_types.rb
- - test/models/entry.rb
- - test/models/mixed_case.rb
- - test/models/reserved_word.rb
- - test/models/string_id.rb
- - test/models/validates_uniqueness_of_string.rb
- - lib/arjdbc/jdbc/jdbc.rake
- - src/java/arjdbc/db2/DB2RubyJdbcConnection.java
- - src/java/arjdbc/derby/DerbyModule.java
- - src/java/arjdbc/h2/H2RubyJdbcConnection.java
- - src/java/arjdbc/informix/InformixRubyJdbcConnection.java
- - src/java/arjdbc/jdbc/AdapterJavaService.java
- - src/java/arjdbc/jdbc/JdbcConnectionFactory.java
- - src/java/arjdbc/jdbc/RubyJdbcConnection.java
- - src/java/arjdbc/jdbc/SQLBlock.java
- - src/java/arjdbc/mssql/MssqlRubyJdbcConnection.java
- - src/java/arjdbc/mysql/MySQLModule.java
- - src/java/arjdbc/mysql/MySQLRubyJdbcConnection.java
- - src/java/arjdbc/oracle/OracleRubyJdbcConnection.java
- - src/java/arjdbc/postgresql/PostgresqlRubyJdbcConnection.java
- - src/java/arjdbc/sqlite3/Sqlite3RubyJdbcConnection.java
- - rakelib/compile.rake
- - rakelib/db.rake
- - rakelib/package.rake
- - rakelib/rails.rake
- - rakelib/test.rake
- - rails_generators/jdbc_generator.rb
- - rails_generators/templates/config/initializers/jdbc.rb
- - rails_generators/templates/lib/tasks/jdbc.rake
- - .gemtest
-has_rdoc: true
-homepage: http://jruby-extras.rubyforge.org/activerecord-jdbc-adapter
-licenses: []
-
-post_install_message:
-rdoc_options:
- - --main
- - README.txt
- - -SHN
- - -f
- - darkfish
-require_paths:
- - lib
-required_ruby_version: !ruby/object:Gem::Requirement
- requirements:
- - - ">="
- - !ruby/object:Gem::Version
- segments:
- - 0
- version: "0"
-required_rubygems_version: !ruby/object:Gem::Requirement
- requirements:
- - - ">="
- - !ruby/object:Gem::Version
- segments:
- - 0
- version: "0"
-requirements: []
-
-rubyforge_project: jruby-extras
-rubygems_version: 1.3.6
-signing_key:
-specification_version: 3
-summary: JDBC adapter for ActiveRecord, for use within JRuby on Rails.
-test_files: []
-
-
+++ /dev/null
-== 1.1.3
-
-- Remove AR version < 3 guard around some caching methods (sugg. invadersmustdie)
-- Small bug in arjdbc/discover logic, thanks autotelik.
-- Added bigint serial support + some testcases for native type mapping (postgres only)
-- mssql: use subscript instead of #first. (Kim Toms)
-- #71: fix yield called out of block error
-- Silence Rake::DSL warnings for Rake > 0.9
-
-== 1.1.2
-
-- Update version of H2 driver from 1.1.107 to 1.3.153 (Ketan
- Padegaonkar, Jeremy Stephens)
-- Fix errors in db:test:clone_structure with PostgreSQL (Andrea Campi)
-- Fixing limit for sqlServer2000 if primary key is not named 'id'
- (Luca Simone)
-- DB2: define jdbc_columns (fixes table_exists? bug) (Nick Kreucher)
-- ACTIVERECORD_JDBC-152 - omitting limit when dumping bytea fields
- (Gregor Schmidt)
-- Postgres doesn't support a limit for bytea columns (Alex Tambellini)
-- JRUBY-5642: Default to schema public if no schema given for postgres
- (Anthony Juckel)
-- Sqlite3 supports float data type so use float (Alex Tambellini)
-- GH #21: Now using sqlite3 driver from
- http://www.xerial.org/trac/Xerial/wiki/SQLiteJDBC (thanks Ukabu)
-- GH #65: PG: Respect integer sizes (Alex Tambellini)
-- GH #59: PG: Properly escape bytea-escaped string
-- GH #53: oracle: allow configuration of schema through schema: key
-- GH #50: PG: support multiple schema in search_path (Daniel
- Schreiber)
-- GH #25: Reload ArJdbc.column_types if number of constants changed
-- GH #47: Allow table statistics for indexes to be approximate; speeds
- up Oracle
-- GH #67: Change primary_keys to use the same catalog/schema/table
- separation logic as columns_internal (Marcus Brito). This change
- allows set_table_name to specify a custom schema.
-- GH #49: mssql: quote table names like column names
-- GH #56: mssql: Fix 'select 1' behavior introduced by AR 3.0.7
-- GH #55: Make decimal columns with no precision or scale stay
- decimals
-- GH #45: Add Arel limit support for Firebird (Systho))
-- GH #39: PG: allow negative integer default values
-- GH #19: Make a stub Mysql::Error class
-- ACTIVERECORD_JDBC-148: mssql: Ensure regex doesn't match 'from' in a
- field name
-- GH#31: mssql: Remove extra code breaking mssql w/o limit
-- ACTIVERECORD_JDBC-156: mssql: Logic fix for detecting select_count?
-
-== 1.1.1
-
-- Arel 2.0.7 compatibility: fix bugs arising from use of Arel 2.0.7 +
- ArJdbc 1.1.0.
- - Gracefully handle changes to limit in Arel's AST
- - Avoid conflict with Arel 2.0.7's mssql visitor
-- Upgrade to PostgreSQL 9.0.801 JDBC drivers (David Kellum)
-
-== 1.1.0 (12/09/10)
-
-- Don't narrow platform to '-java' only: revert back to 0.9.2 where
- ar-jdbc can be installed under any Ruby (for easier Bundler/Warbler
- usage and less confusion on rubygems.org).
-- Upgrade MySQL execute code to use RETURN_GENERATED_KEYS.
-- Upgrade to MySQL driver version 5.1.13
-- Add multi-statement support, idea from oruen. For databases that
- support it, you can now do:
- results = Model.connection.execute("select 1; select 2")
- and receive back an array of multiple result set arrays. For use with
- MySQL, you need to add
- options:
- allowMultiQueries: true
- in database.yml.
-- ACTIVERECORD_JDBC-144: Fix limits appearing in schema dump for some
- datatypes (Uwe Kubosch)
-- Fixes for DB2 limit/offset
-- Fix rake db:create for 'jdbc' adapter (Joeri Samson)
-- add create/drop database methods to h2 adapter (sahglie)
-- Use connection getDatabaseProductName instead of getClass.getName
- when detecting JNDI dialects (Denis Odorcic)
-- ACTIVERECORD_JDBC-146: Fix create_table to not append encoding (Marc Slemko)
-- All green on SQLite3 Rails master ActiveRecord tests
-- ACTIVERECORD_JDBC-140: Sync postgres add/change column code from Rails master
-- ACTIVERECORD_JDBC-139: TEXT/DATE on PostgreSQL should not have limits
-
-== 1.0.3 (11/29/10)
-
-- ACTIVERECORD_JDBC-143: Implement table_exists? fixing association
- table names with schema prefixes
-- Cleanup of column code for hsqldb (Denis Odorcic)
-- Rails 3.0.3 support - add Arel 2 visitors for all adapters
-- Fix MySQL date types to not have limits (Chris Lowder)
-- ACTIVERECORD_JDBC-141: Better schema support in H2
-
-== 1.0.2
-
-- ACTIVERECORD_JDBC-134: Fix conflicting adapter/column superclasses
-- ACTIVERECORD_JDBC-135: Fix regression on PG with boolean and :limit
-- Slew of Derby fixes courtesy of Denis Odorcic
-
-== 1.0.1
-
-- Fix db:test:purge issue affecting all adapters in 1.0.0 due to
- incorrect fix to JRUBY-5081 in 8b4b9c5
-
-== 1.0.0
-
-- Thanks to David Kellum, Dmitry Denisov, Dwayne Litzenberger, Gregor
- Schmidt, James Walker, John Duff, Joshua Suggs, Nicholas J Kreucher,
- Peter Donald, Geoff Longman, Uwe Kubosch, Youhei Kondou, Michael
- Pitman, Alex B, and Ryan Bell for their contributions to this
- release.
-- BIG set of DB2 updates (Thanks Nick Kreucher)
-- Deprecate jdbc_adapter/rake_tasks
-- (1.0.0.beta1)
-- Make database-specific extensions only load when necessary
-- Allow for discovery of database extensions outside of ar-jdbc
- proper. This should allow for custom database development to be
- developed and released without relying on AR-JDBC core.
-- Get AR's own tests running as close to 100% as possible. MySQL is
- currently 100%, SQLite3 is close.
-- JRUBY-4876: Bump up Derby's max index name length (Uwe Kubosch)
-- (1.0.0.beta2)
-- 98 commits since beta1
-- MSSQL updates from dlitz and realityforge
-- ACTIVERECORD_JDBC-131: Fix string slug issue for DB2 (Youhei Kondou)
-- JRUBY-1642: Don't use H2 INFORMATION_SCHEMA in table or column
- searches
-- JRUBY-4972: Attempt to deal with type(0)/:limit => 0 by not setting
- it808e213
-- JRUBY-5040: Fix issue with limits on timestamps in MySQL
-- JRUBY-3555: Allow setting Derby schema with 'schema:' option
-- ACTIVERECORD_JDBC-98: Make sure we actuall raise an error when
- inappropriately configured
-- ACTIVERECORD_JDBC-112: Add schema dumper tests for already-fixed
- MySQL type limits
-- ACTIVERECORD_JDBC-113: Fix PG float precision issue
-- ACTIVERECORD_JDBC-103: Fix decimal options for PG add/change column
- (Michael Pitman)
-- ACTIVERECORD_JDBC-127: Fix quoting of Date vs. Time(stamp) for
- Oracle (Lenny Marks)
-- Oracle: Sort out the NUMBER vs NUMBER(x) vs NUMBER(x,y) situation.
-- JRUBY-3051: Think we finally got the PG mixed-case patches applied.
-- JRUBY-5081: Consolidate code for dropping DB via postgres
-- ACTIVERECORD_JDBC-101: Add override of LONGVARCHAR => CLOB for
- informix
-- ACTIVERECORD_JDBC-107: Fix MySQL update_all issue on AR 2.3
-- ACTIVERECORD_JDBC-124: Filter out special _row_num column
-- ACTIVERECORD_JDBC-126: Fix sql 2000 limit/offset per Michael Pitman
-- ACTIVERECORD_JDBC-125: Add tweak to limit/offset code for HABTM
- queries (alex b)
-- ACTIVERECORD_JDBC-129: Don't have limits for text, binary or bit
- fields
-- (1.0.0 final)
-- Fix a few more SQLite3 AR tests
-- SQLite3: handle ":memory:" database
-- Release new SQLite3 driver 3.6.14.2 and new Derby driver 10.6.2.1
-
-== 0.9.7
-
-- JRUBY-4781: Fix multiple database connection collision issue w/
- Oracle
-- ACTIVERECORD_JDBC-115: Support SAVEPOINTS for MySQL and PG so that
- nested transactions can be faked
-- ACTIVERECORD_JDBC-116: Handle schema.table better for MySQL (thanks
- Dilshod Mukhtarov)
-- Fix 'Wrong # of arguments (2 for 1)' issue with #create_database for
- MySQL and AR 3.0
-- SQLServer 2000 support (thanks Jay McGaffigan)
-
-== 0.9.6
-
-- The Oracle release!
-- Oracle should be working much better with this release. Also updated
- to work with Rails 3.
-- Get all unit tests running cleanly on Oracle, fixing previous
- datetime/timezone issues.
-- ACTIVERECORD_JDBC-83: Add :sequence_start_value option to
- create_table, following oracle_enhanced adapter
-- ACTIVERECORD_JDBC-33: Don't double-quote table names in oracle
-- ACTIVERECORD_JDBC-17: Fix Oracle primary keys so that /^NUMBER$/ => :integer
-- Fix remaining blockers ACTIVERECORD_JDBC-82, JRUBY-3675,
- ACTIVERECORD_JDBC-22, ACTIVERECORD_JDBC-27, JRUBY-4759
-
-== 0.9.5
-
-- The MSSQL release, courtesy of Mike Williams and Lonely
- Planet.
-- JRuby + AR-JDBC is now seen as the hassle-free way of using Rails
- with SQLServer!
-- Many fixes for MSSQL, including ACTIVERECORD_JDBC-18,
- ACTIVERECORD_JDBC-41, ACTIVERECORD_JDBC-56, ACTIVERECORD_JDBC-94,
- ACTIVERECORD_JDBC-99, JRUBY-3805, JRUBY-3793, JRUBY-4221
-- All tests pass on Rails 3.0.0.beta3!
-
-== 0.9.4
-
-- ACTIVERECORD_JDBC-96: DB2 JdbcSpec cannot dump schema correctly
- (Youhei Kondou)
-- ACTIVERECORD_JDBC-97: Dont use Rails 3 deprecated constants (David
- Calavera)
-- Updates for rake db:schema:dump compatibility with Rails 2.3+ and
- MySQL (Joakim Kolsjö)
-- Rails 3.0.0.beta2 compatibility
-- Return of Derby, H2, Hsqldb support (requires AR >= 3.0.0.beta2)
-
-== 0.9.3
-
-- Rails 3 compatibility
-- PLEASE NOTE: ActiveRecord in Rails 3 has changed in a way that
- doesn't allow non-standard DBs (such as the Derby and H2 embedded
- DBs) to work. We're investigating the effort required to support
- these databases and hope to have something for a future release.
-- ACTIVERECORD_JDBC-91: Fix schema search path for PostgreSQL (Alex
- Kuebo)
-- ACTIVERECORD_JDBC-87: DB2 ID insert fix (Youhei Kondou)
-- ACTIVERECORD_JDBC-90: MSSQL fix for DATEs (jlangenauer)
-- ACTIVERECORD_JDBC-93: Fix string IDs for sqlite3, hsql/h2 (moser)
-- ACTIVERECORD_JDBC-86: Fix Derby queries starting with VALUES (Dwayne Litzenberger)
-- ACTIVERECORD_JDBC-95: Fix INSERT ... RETURNING for PostgreSQL
-
-== 0.9.2
-
-- The main, highly awaited fix for this release is a solution to the
- rake db:create/db:drop issue. The main change is a new 'jdbc' rails
- generator that should be run once to prepare a Rails application to
- use JDBC. The upside of this generator is that you no longer will
- need to alter database.yml for JDBC. See the README.txt for details.
-- Cleanup and reconnect if errors occur during begin/rollback
- (Jean-Dominique Morani, Christian Seiler)
-- ACTIVERECORD_JDBC-1: Add #drop_database method for oracle (does the
- same thing as recreate_database)
-- Sqlite3 and MSSQL fixes (Jean-Dominique Morani)
-- JRUBY-3512: Treat LONGVARCHAR as a CLOB for Mssql
-- JRUBY-3624: Upgrade Derby to 10.5.3.0 and add native limit/offset
- support (Christopher Saunders)
-- JRUBY-3616: Fix postgres non-sequence primary keys (David Kellum)
-- JRUBY-3669: Fix Oracle case with unconfigured schema (Dan Powell)
-- Fixed quote_column_name of jdbc_oracle to accept numbers (Marcelo
- Murad)
-- Fix for mysql tables with non standard primary keys such that the
- schema dump is correct (Nick Zalabak)
-- MSSQL fixes from Mike Luu:
- - add support for MSSQL uniqueidentifier datatype
- - always quote strings using unicode identifier for MSSQL
-- Changes primary_key generation to use always instead of by default
- for DB2 (Amos King)
-- Improves the SQLite adapter by fixing rename_column, change_column,
- change_column_default, changing remove_column, and adding
- remove_columns (Ryan Baumann)
-- More oracle love courtesy Ben Browning and Jens Himmelreich
-- JRUBY-3608: Add missing change_column_null method for postgres
-- JRUBY-3508: Fix quoting of integer and float columns
-
-== 0.9.1
-
-- We did a lot of internal cleanup this release in the hopes of
- simplifying the code and increasing performance.
-- Many SQLite updates (thanks Nils Christian Haugen)
-- JRUBY-2912: Fix MSSQL create/drop database (Joern Hartmann)
-- JRUBY-2767: Mistake in selecting identity with H2/HSQLDB
-- JRUBY-2884: jdbc_postgre.rb issue handling nil booleans (also a fix
- for hsqldb/h2) + tests
-- JRUBY-2995: activerecord jdbc derby adapter should quote columns
- called 'year'
-- JRUBY-2897: jdbc_postgre.rb needs microsecond support
-- JRUBY-3282: Upgrade to derby 10.4.2.0 to allow unique constraints
- with nullable columns
-- Update h2 from 1.0.63 to 1.1.107 in driver
-- JRUBY-3026: [Derby] Allow select/delete/update conditions with
- comparison to NULL using '='
-- JRUBY-2996: ...(actually this fixes only remaining issue of this bug
- which was symbols making into quote were exploding
-- JRUBY-2691: Update sybase driver to pass simple unit tests with jtds
- and verify it works with the new dialect keyword. patch by Leigh
- Kennedy
-- Make :float type work on h2,hsql [returned as string]. Make :float
- work on hsqldb (no paren value supported). Make REAL_TYPE just
- return RubyFloat
-- JRUBY-3222: Upgrade #type_to_sql to variation of AR 2.1.2 version
-- Add patch supplied in JRUBY-3489 (patch by Jean-Dominique Morani)
-- Various Oracle fixes by edsono
-- JRUBY-2688: Don't hard-code MySQL connection character encoding to
- utf8
-
-== 0.9
-
-- Now updated to support ActiveRecord 2.2. JNDI-based connections will
- automatically connect/disconnect for every AR connection pool
- checkout/checkin. For best results, set your pool: parameter >= the
- actual maximum size of the JNDI connection pool. (We'll look at how
- to eliminate the need to configure AR's pool in the future.)
-- NEW! Informix support courtesy of Javier Fernandez-Ivern.
-- Backport another Oracle CLOB issue, thanks Edson César.
-- Rubyforge #22018: chomp final trailing semicolon for oracle
-- JRUBY-2848: Fix NPE error in set_native_database_types
-- Rework oracle lob saving callback to be Rails 2.1 friendly (assist
- from court3nay)
-- JRUBY-2715: Add create/drop database methods to Postgres (Peter Williams)
-- JRUBY-3183: Fix structure dump for Postgres (Ryan Bell)
-- JRUBY-3184: recreate_database for test database working for PG (Ryan Bell)
-- JRUBY-3186: disable referential integrity for PG (Ryan Bell)
-- Authoritative repository now hosted at
- git://github.com/nicksieger/activerecord-jdbc-adapter.git; rubyforge
- svn trunk cleaned out.
-
-== 0.8.2
-
-- Added an optional config key called :dialect. Using :dialect allows you to
- override the default SQL dialect for the driver class being used. There are
- a few cases for this:
- - Using using Sybase w/ the jTDS driver.
- - Using rebranded drivers.
- - It makes more sense to use :dialect, rather then :driver when using JNDI.
-- JRUBY-2619: Typo with :test config causing problems with dev database (Igor Minar)
-- 20524, JRUBY-2612: Since when did I think that there was a #true? method on Object?
-
-== 0.8.1
-
-- Now sporting a JDBC sqlite3 adapter! Thanks Joseph Athman.
-- Added support for InterSystems Cache database (Ryan Bell)
-- Fix for JRUBY-2256
-- JRUBY-1638, JRUBY-2404, JRUBY-2463: schema.table handling and Oracle NUMBER fixes (Darcy Schultz & Jesse Hu)
-- Add structure dump and other DDL-ish for DB2 (courtesy abedra and stuarthalloway)
-- Fix missing quote_table_name function under Rails 1.2.6 and earlier
-- Small tweaks to jdbc.rake to select proper config
-- JRUBY-2011: Fix MSSQL string un-quoting issue (Silvio Fonseca)
-- JRUBY-1977, 17427: Fix information_schema select issue with MSSQL (Matt Burke)
-- 20479: Improve get_table_name for MSSQL (Aslak Hellesøy)
-- 20243: numerics improvements for MSSQL (Aslak Hellesøy)
-- 20172: don't quote table names for MSSQL (Thor Marius Henrichsen)
-- 19729: check for primary key existence in postgres during insert (Martin Luder)
-- JRUBY-2297, 18846: retrying failing SQL statements is harmful when not autocommitting (Craig McMillan)
-- 10021: very preliminary sybase support. (Mark Atkinson) Not usable until collision w/ sqlserver driver is resolved.
-- JRUBY-2312, JRUBY-2319, JRUBY-2322: Oracle timestamping issues (Jesse Hu & Michael König)
-- JRUBY-2422: Fix MySQL referential integrity and rollback issues
-- JRUBY-2382: mysql string quoting fails with ArrayIndexOutofBoundsException
-
-== 0.8
-
-- NOTE: This release is only compatible with JRuby 1.1RC3 or later.
-- Because of recent API changes in trunk in preparation for JRuby 1.1, this release is not
- backward compatible with previous JRuby releases. Hence the version bump.
-- Internal: convert Java methods to be defined with annotations
-- Fix problem with reserved words coming back pre-quoted from #indexes in postgres
-- JRUBY-2205: Fix N^2 allocation of bytelists for mysql quoting (taw)
-- Attempt a fix for Rubyforge 18059
-- Upgrade derby to 10.3.2.1
-- Fix db:create etc. in the case where JDBC is loaded in Rails' preinitializer.rb
-- Fix db:drop to actually work
-- Fix for Rubyforge #11567 (Matt Williams)
-
-== 0.7.2
-
-- JRUBY-1905: add_column for derby, hsqldb, and postgresql (Stephen Bannasch)
-- Fix db:create for JDBC
-- Support Rails 2 with the old "require 'jdbc_adapter'" approach
-- JRUBY-1966: Instead of searching for just tables, search for views and tables.
-- JRUBY-1583: DB2 numeric quoting (Ryan Shillington)
-- JRUBY-1634: Oracle DATE type mapping (Daniel Wintschel)
-- JRUBY-1543: rename_column issue with more recent MySQL drivers (Oliver Schmelzle)
-- Rubyforge #15074: ConnectionAdapters::JdbcAdapter.indexes is missing name and
- schema_name parameters in the method signature (Igor Minar)
-- Rubyforge #13558: definition for the indexes method (T Meyarivan)
-- JRUBY-2051: handle schemaname and tablename more correctly for columns
-- JRUBY-2102: Postgres Adapter cannot handle datetime type (Rainer Hahnekamp)
-- JRUBY-2018: Oracle behind ActiveRecord-JDBC fails with "Invalid column index" (K Venkatasubramaniyan)
-- JRUBY-2012: jdbc_mysql structure dump fails for mysql views (Tyler Jennings)
-
-== 0.7.1
-
-- Add adapter and driver for H2 courtesy of Caleb Land
-- Fix "undefined method `last' for {}:Hash" error introduced with new Rake 0.8.1 (JRUBY-1859)
-
-== 0.7
-
-- PLEASE NOTE: This release is not compatible with JRuby releases earlier than
- 1.0.3 or 1.1b2. If you must use JRuby 1.0.2 or earlier, please install the
- 0.6 release.
-- Release coincides with JRuby 1.0.3 and JRuby 1.1b2 releases
-- Simultaneous support for JRuby trunk and 1.0 branch
-- Get rid of log_no_bench method, so we time SQL execution again.
-- Implement #select_rows
-- MySQL migration and quoting updates
-
-== 0.6
-
-- Gem is renamed to "activerecord-jdbc-adapter" to follow new conventions
- introduced in Rails 2.0 for third-party adapters. Rails 2.0 compatibility is
- introduced.
-- Add dependency on ActiveRecord >= 1.14 (from the Rails 1.1.x release)
-- New drivers (jdbc-XXX) and adapter (activerecord-jdbcXXX-adapter) gems
- available separately. See the README.txt file for details.
-- Plain "jdbc" driver is still available if you want to use the full
- driver/url way of specifying the driver.
-- More bugfixes to Oracle and SQLServer courtesy of Ola & ThoughtWorks
-
-== 0.5
-
-- Release coincides with JRuby 1.0.1 release
-- It is no longer necessary to specify :driver and :url configuration
- parameters for the mysql, postgresql, oracle, derby, hsqldb, and h2
- adapters. The previous configuration is still valid and compatible, but for
- new applications, this makes it possible to use the exact same database.yml
- configuration as Rails applications running under native Ruby.
-- JDBC drivers can now be dynamically loaded by Ruby code, without being on
- the classpath prior to launching JRuby. Simply use "require
- 'jdbc-driver.jar'" in JRuby code to add it to the runtime classpath.
-- Updates to HSQL, MS SQLServer, Postgres, Oracle and Derby adapters
-
-== 0.4
-
-- Release coincides with JRuby 1.0 release
-- Shoring up PostgreSQL (courtesy Dudley Flanders) and HSQL (courtesy Matthew
- Williams)
-- Fix timestamps on Oracle to use DATE (as everything else)
-- Derby fixes: Fix for open result set issue, better structure dump, quoting,
- column type changing
-- Sybase type recognition fix (courtesy Dean Mao)
-
-== 0.3.1
-
-- Derby critical fixes shortly after 0.3
-
-== 0.3
-
-- Release coincides with JRuby 1.0.0RC1 release
-- Improvements for Derby, Postgres, and Oracle, all of which are running
- > 95% of AR tests
-
-== 0.2.4
-
-- Release coincides with JRuby 0.9.9 release
-- JRuby 0.9.9 is required
-- MySQL close to 100% working
-- Derby improvements
-- DECIMAL/NUMERIC/FLOAT/REAL bugs fixed with type recognition for Oracle,
- Postgres, etc.
-- HSQLDB has regressed this release and may not be functioning; we'll get it
- fixed for the next one
-
-== 0.2.3
-
-- Release coincides (and compatible) with JRuby 0.9.8 release
-- 8 bugs fixed: see http://rubyurl.com/0Da
-- Improvements and compatibility fixes for Rails 1.2.x
-
-== 0.2.1, 0.2.2
-
-- Early releases, added better support for multiple databases
-
-== 0.0.1
-
-- Initial, very alpha release
+++ /dev/null
-Copyright (c) 2006-2008 Nick Sieger <nick@nicksieger.com>
-Copyright (c) 2006-2008 Ola Bini <ola.bini@gmail.com>
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+++ /dev/null
-History.txt
-Manifest.txt
-README.txt
-Rakefile
-LICENSE.txt
-lib/activerecord-jdbc-adapter.rb
-lib/arjdbc.rb
-lib/jdbc_adapter.rb
-lib/pg.rb
-lib/active_record/connection_adapters/derby_adapter.rb
-lib/active_record/connection_adapters/h2_adapter.rb
-lib/active_record/connection_adapters/hsqldb_adapter.rb
-lib/active_record/connection_adapters/informix_adapter.rb
-lib/active_record/connection_adapters/jdbc_adapter.rb
-lib/active_record/connection_adapters/jndi_adapter.rb
-lib/active_record/connection_adapters/mssql_adapter.rb
-lib/active_record/connection_adapters/mysql2_adapter.rb
-lib/active_record/connection_adapters/mysql_adapter.rb
-lib/active_record/connection_adapters/oracle_adapter.rb
-lib/active_record/connection_adapters/postgresql_adapter.rb
-lib/active_record/connection_adapters/sqlite3_adapter.rb
-lib/arel/engines/sql/compilers/db2_compiler.rb
-lib/arel/engines/sql/compilers/derby_compiler.rb
-lib/arel/engines/sql/compilers/h2_compiler.rb
-lib/arel/engines/sql/compilers/hsqldb_compiler.rb
-lib/arel/engines/sql/compilers/jdbc_compiler.rb
-lib/arel/engines/sql/compilers/mssql_compiler.rb
-lib/arel/visitors/compat.rb
-lib/arel/visitors/db2.rb
-lib/arel/visitors/derby.rb
-lib/arel/visitors/firebird.rb
-lib/arel/visitors/hsqldb.rb
-lib/arel/visitors/sql_server.rb
-lib/arjdbc/db2.rb
-lib/arjdbc/derby.rb
-lib/arjdbc/discover.rb
-lib/arjdbc/firebird.rb
-lib/arjdbc/h2.rb
-lib/arjdbc/hsqldb.rb
-lib/arjdbc/informix.rb
-lib/arjdbc/jdbc.rb
-lib/arjdbc/mimer.rb
-lib/arjdbc/mssql.rb
-lib/arjdbc/mysql.rb
-lib/arjdbc/oracle.rb
-lib/arjdbc/postgresql.rb
-lib/arjdbc/sqlite3.rb
-lib/arjdbc/sybase.rb
-lib/arjdbc/version.rb
-lib/arjdbc/db2/adapter.rb
-lib/arjdbc/derby/adapter.rb
-lib/arjdbc/derby/connection_methods.rb
-lib/arjdbc/firebird/adapter.rb
-lib/arjdbc/h2/adapter.rb
-lib/arjdbc/h2/connection_methods.rb
-lib/arjdbc/hsqldb/adapter.rb
-lib/arjdbc/hsqldb/connection_methods.rb
-lib/arjdbc/informix/adapter.rb
-lib/arjdbc/informix/connection_methods.rb
-lib/arjdbc/jdbc/adapter.rb
-lib/arjdbc/jdbc/callbacks.rb
-lib/arjdbc/jdbc/column.rb
-lib/arjdbc/jdbc/compatibility.rb
-lib/arjdbc/jdbc/connection.rb
-lib/arjdbc/jdbc/connection_methods.rb
-lib/arjdbc/jdbc/core_ext.rb
-lib/arjdbc/jdbc/discover.rb
-lib/arjdbc/jdbc/driver.rb
-lib/arjdbc/jdbc/extension.rb
-lib/arjdbc/jdbc/java.rb
-lib/arjdbc/jdbc/missing_functionality_helper.rb
-lib/arjdbc/jdbc/quoted_primary_key.rb
-lib/arjdbc/jdbc/railtie.rb
-lib/arjdbc/jdbc/rake_tasks.rb
-lib/arjdbc/jdbc/require_driver.rb
-lib/arjdbc/jdbc/type_converter.rb
-lib/arjdbc/mimer/adapter.rb
-lib/arjdbc/mssql/adapter.rb
-lib/arjdbc/mssql/connection_methods.rb
-lib/arjdbc/mssql/limit_helpers.rb
-lib/arjdbc/mssql/tsql_helper.rb
-lib/arjdbc/mysql/adapter.rb
-lib/arjdbc/mysql/connection_methods.rb
-lib/arjdbc/oracle/adapter.rb
-lib/arjdbc/oracle/connection_methods.rb
-lib/arjdbc/postgresql/adapter.rb
-lib/arjdbc/postgresql/connection_methods.rb
-lib/arjdbc/sqlite3/adapter.rb
-lib/arjdbc/sqlite3/connection_methods.rb
-lib/arjdbc/sybase/adapter.rb
-lib/generators/jdbc/jdbc_generator.rb
-lib/jdbc_adapter/rake_tasks.rb
-lib/jdbc_adapter/version.rb
-lib/arjdbc/jdbc/adapter_java.jar
-test/abstract_db_create.rb
-test/db2_simple_test.rb
-test/derby_migration_test.rb
-test/derby_multibyte_test.rb
-test/derby_simple_test.rb
-test/generic_jdbc_connection_test.rb
-test/h2_simple_test.rb
-test/has_many_through.rb
-test/helper.rb
-test/hsqldb_simple_test.rb
-test/informix_simple_test.rb
-test/jdbc_common.rb
-test/jndi_callbacks_test.rb
-test/jndi_test.rb
-test/manualTestDatabase.rb
-test/mssql_db_create_test.rb
-test/mssql_identity_insert_test.rb
-test/mssql_legacy_types_test.rb
-test/mssql_limit_offset_test.rb
-test/mssql_multibyte_test.rb
-test/mssql_simple_test.rb
-test/mysql_db_create_test.rb
-test/mysql_info_test.rb
-test/mysql_multibyte_test.rb
-test/mysql_nonstandard_primary_key_test.rb
-test/mysql_simple_test.rb
-test/oracle_simple_test.rb
-test/oracle_specific_test.rb
-test/pick_rails_version.rb
-test/postgres_db_create_test.rb
-test/postgres_drop_db_test.rb
-test/postgres_information_schema_leak_test.rb
-test/postgres_mixed_case_test.rb
-test/postgres_native_type_mapping_test.rb
-test/postgres_nonseq_pkey_test.rb
-test/postgres_reserved_test.rb
-test/postgres_schema_search_path_test.rb
-test/postgres_simple_test.rb
-test/postgres_table_alias_length_test.rb
-test/simple.rb
-test/sqlite3_simple_test.rb
-test/sybase_jtds_simple_test.rb
-test/activerecord/connection_adapters/type_conversion_test.rb
-test/activerecord/connections/native_jdbc_mysql/connection.rb
-test/db/db2.rb
-test/db/derby.rb
-test/db/h2.rb
-test/db/hsqldb.rb
-test/db/informix.rb
-test/db/jdbc.rb
-test/db/jndi_config.rb
-test/db/logger.rb
-test/db/mssql.rb
-test/db/mysql.rb
-test/db/oracle.rb
-test/db/postgres.rb
-test/db/sqlite3.rb
-test/models/add_not_null_column_to_table.rb
-test/models/auto_id.rb
-test/models/data_types.rb
-test/models/entry.rb
-test/models/mixed_case.rb
-test/models/reserved_word.rb
-test/models/string_id.rb
-test/models/validates_uniqueness_of_string.rb
-lib/arjdbc/jdbc/jdbc.rake
-src/java/arjdbc/db2/DB2RubyJdbcConnection.java
-src/java/arjdbc/derby/DerbyModule.java
-src/java/arjdbc/h2/H2RubyJdbcConnection.java
-src/java/arjdbc/informix/InformixRubyJdbcConnection.java
-src/java/arjdbc/jdbc/AdapterJavaService.java
-src/java/arjdbc/jdbc/JdbcConnectionFactory.java
-src/java/arjdbc/jdbc/RubyJdbcConnection.java
-src/java/arjdbc/jdbc/SQLBlock.java
-src/java/arjdbc/mssql/MssqlRubyJdbcConnection.java
-src/java/arjdbc/mysql/MySQLModule.java
-src/java/arjdbc/mysql/MySQLRubyJdbcConnection.java
-src/java/arjdbc/oracle/OracleRubyJdbcConnection.java
-src/java/arjdbc/postgresql/PostgresqlRubyJdbcConnection.java
-src/java/arjdbc/sqlite3/Sqlite3RubyJdbcConnection.java
-rakelib/compile.rake
-rakelib/db.rake
-rakelib/package.rake
-rakelib/rails.rake
-rakelib/test.rake
-rails_generators/jdbc_generator.rb
-rails_generators/templates
-rails_generators/templates/config
-rails_generators/templates/lib
-rails_generators/templates/config/initializers
-rails_generators/templates/config/initializers/jdbc.rb
-rails_generators/templates/lib/tasks
-rails_generators/templates/lib/tasks/jdbc.rake
+++ /dev/null
-activerecord-jdbc-adapter is a database adapter for Rails' ActiveRecord
-component that can be used with JRuby[http://www.jruby.org/]. It allows use of
-virtually any JDBC-compliant database with your JRuby on Rails application.
-
-== Databases
-
-Activerecord-jdbc-adapter provides full or nearly full support for:
-MySQL, PostgreSQL, SQLite3, Oracle, Microsoft SQL Server, DB2,
-FireBird, Derby, HSQLDB, H2, and Informix.
-
-Other databases will require testing and likely a custom configuration module.
-Please join the activerecord-jdbc
-mailing-lists[http://kenai.com/projects/activerecord-jdbc/lists] to help us discover
-support for more databases.
-
-== Using ActiveRecord JDBC
-
-=== Inside Rails
-
-To use activerecord-jdbc-adapter with JRuby on Rails:
-
-1. Choose the adapter you wish to gem install. The following pre-packaged
-adapters are available:
-
- * base jdbc (<tt>activerecord-jdbc-adapter</tt>). Supports all available databases via JDBC, but requires you to download and manually install the database vendor's JDBC driver .jar file.
- * mysql (<tt>activerecord-jdbcmysql-adapter</tt>)
- * postgresql (<tt>activerecord-jdbcpostgresql-adapter</tt>)
- * sqlite3 (<tt>activerecord-jdbcsqlite3-adapter</tt>)
- * derby (<tt>activerecord-jdbcderby-adapter</tt>)
- * hsqldb (<tt>activerecord-jdbchsqldb-adapter</tt>)
- * h2 (<tt>activerecord-jdbch2-adapter</tt>)
- * mssql (<tt>activerecord-jdbcmssql-adapter</tt>)
-
-2a. For Rails 3, if you're generating a new application, use the
-following command to generate your application:
-
- jruby -S rails new sweetapp -m http://jruby.org/rails3.rb
-
-2b. Otherwise, you'll need to perform some extra configuration steps
-to prepare your Rails application for JDBC.
-
-If you're using Rails 3, you'll need to modify your Gemfile to use the
-activerecord-jdbc-adapter gem under JRuby. Change your Gemfile to look
-like the following (using sqlite3 as an example):
-
- if defined?(JRUBY_VERSION)
- gem 'activerecord-jdbc-adapter'
- gem 'jdbc-sqlite3'
- else
- gem 'sqlite3-ruby', :require => 'sqlite3'
- end
-
-If you're using Rails 2:
-
- jruby script/generate jdbc
-
-3. Configure your database.yml in the normal Rails style.
-
-Legacy configuration: If you use one of the convenience
-'activerecord-jdbcXXX-adapter' adapters, you can still put a 'jdbc'
-prefix in front of the database adapter name as below.
-
- development:
- adapter: jdbcmysql
- username: blog
- password:
- hostname: localhost
- database: weblog_development
-
-For other databases, you'll need to know the database driver class and
-URL. Example:
-
- development:
- adapter: jdbc
- username: blog
- password:
- driver: com.mysql.jdbc.Driver
- url: jdbc:mysql://localhost:3306/weblog_development
-
- For JNDI data sources, you may simply specify the JNDI location as follows
- (the adapter will be automatically detected):
-
- production:
- adapter: jdbc
- jndi: jdbc/mysqldb
-
-=== Standalone, with ActiveRecord
-
-1. Install the gem with JRuby:
-
- jruby -S gem install activerecord-jdbc-adapter
-
-If you wish to use the adapter for a specific database, you can
-install it directly and a driver gem will be installed as well:
-
- jruby -S gem install activerecord-jdbcderby-adapter
-
-2. After this you can establish a JDBC connection like this:
-
- ActiveRecord::Base.establish_connection(
- :adapter => 'jdbcderby',
- :database => "db/my-database"
- )
-
-or like this (but requires that you manually put the driver jar on the classpath):
-
- ActiveRecord::Base.establish_connection(
- :adapter => 'jdbc',
- :driver => 'org.apache.derby.jdbc.EmbeddedDriver',
- :url => 'jdbc:derby:test_ar;create=true'
- )
-
-== Extending AR-JDBC
-
-You can create your own extension to AR-JDBC for a JDBC-based database
-that core AR-JDBC does not support. We've created an example project
-for the Intersystems Cache database that you can examine as a
-template. See the project for more information at the following URL:
-
- http://github.com/nicksieger/activerecord-cachedb-adapter
-
-== Getting the source
-
-The source for activerecord-jdbc-adapter is available using git.
-
- git clone git://github.com/nicksieger/activerecord-jdbc-adapter.git
-
-== Feedback
-
-Please file bug reports at
-http://kenai.com/jira/browse/ACTIVERECORD_JDBC. If you're not sure if
-something's a bug, feel free to pre-report it on the mailing lists.
-
-== Project Info
-
-* Mailing Lists: http://kenai.com/projects/activerecord-jdbc/lists
-* Issues: http://kenai.com/jira/browse/ACTIVERECORD_JDBC
-* Source:
- git://github.com/nicksieger/activerecord-jdbc-adapter.git
- git://kenai.com/activerecord-jdbc~main
-
-== Running AR-JDBC's Tests
-
-Drivers for 6 open-source databases are included. Provided you have
-MySQL installed, you can simply type <tt>jruby -S rake</tt> to run the
-tests. A database named <tt>weblog_development</tt> is needed
-beforehand with a connection user of "blog" and an empty password. You
-alse need to grant "blog" create privileges on
-'test_rake_db_create.*'.
-
-If you also have PostgreSQL available, those tests will be run if the
-`psql' executable can be found. Also ensure you have a database named
-<tt>weblog_development</tt> and a user named "blog" and an empty
-password.
-
-If you want rails logging enabled during these test runs you can edit
-test/jdbc_common.rb and add the following line:
-
-require 'db/logger'
-
-== Running AR Tests
-
-To run the current AR-JDBC sources with ActiveRecord, just use the
-included "rails:test" task. Be sure to specify a driver and a path to
-the ActiveRecord sources.
-
- jruby -S rake rails:test DRIVER=mysql RAILS=/path/activerecord_source_dir
-
-== Authors
-
-This project was written by Nick Sieger <nick@nicksieger.com> and Ola Bini
-<olabini@gmail.com> with lots of help from the JRuby community.
-
-== License
-
-activerecord-jdbc-adapter is released under a BSD license. See the LICENSE file
-included with the distribution for details.
-
-Open-source driver gems for activerecord-jdbc-adapter are licensed under the
-same license the database's drivers are licensed. See each driver gem's
-LICENSE.txt file for details.
+++ /dev/null
-require 'rake/testtask'
-require 'rake/clean'
-CLEAN.include 'derby*', 'test.db.*','test/reports', 'test.sqlite3','lib/**/*.jar','manifest.mf', '*.log'
-
-task :default => [:java_compile, :test]
-
-task :filelist do
- puts FileList['pkg/**/*'].inspect
-end
-
+++ /dev/null
-require 'arjdbc/derby'
+++ /dev/null
-require 'arjdbc/h2'
+++ /dev/null
-require 'arjdbc/hsqldb'
+++ /dev/null
-require 'arjdbc/informix'
+++ /dev/null
-require 'arjdbc/jdbc'
+++ /dev/null
-require 'arjdbc/jdbc'
+++ /dev/null
-require 'arjdbc/mssql'
+++ /dev/null
-require 'arjdbc/mysql'
+++ /dev/null
-require 'arjdbc/mysql'
+++ /dev/null
-require 'arjdbc/oracle'
+++ /dev/null
-require 'arjdbc/postgresql'
+++ /dev/null
-require 'arjdbc/sqlite3'
+++ /dev/null
-require 'arjdbc'
-if ActiveRecord::VERSION::MAJOR >= 3
- begin
- require 'arjdbc/jdbc/railtie'
- rescue LoadError
- # Assume we don't have railties in this version of AR
- end
-end
+++ /dev/null
-require 'arel/engines/sql/compilers/ibm_db_compiler'
-
-module Arel
- module SqlCompiler
- class DB2Compiler < IBM_DBCompiler
- end
- end
-end
-
+++ /dev/null
-module Arel
- module SqlCompiler
- class DerbyCompiler < GenericCompiler
- end
- end
-end
+++ /dev/null
-module Arel
- module SqlCompiler
- class H2Compiler < GenericCompiler
- end
- end
-end
+++ /dev/null
-module Arel
- module SqlCompiler
- class HsqldbCompiler < GenericCompiler
- def select_sql
- # HSQLDB needs to add LIMIT in right after SELECT
- query = super
- offset = relation.skipped
- limit = relation.taken
- @engine.connection.add_limit_offset!(query, :limit => limit,
- :offset => offset) if offset || limit
- query
- end
- end
- end
-end
+++ /dev/null
-module Arel
- module SqlCompiler
- class JDBCCompiler < GenericCompiler
- end
- end
-end
+++ /dev/null
-module Arel
- module SqlCompiler
- class MsSQLCompiler < GenericCompiler
- def select_sql
- projections = @relation.projections
- offset = relation.skipped
- limit = relation.taken
- if Count === projections.first && projections.size == 1 &&
- (relation.taken.present? || relation.wheres.present?) && relation.joins(self).blank?
- subquery = [
- "SELECT * FROM #{relation.from_clauses}", build_clauses
- ].join ' '
- @engine.connection.add_limit_offset!(subquery, :limit => limit, :offset => offset) if offset || limit
- query = "SELECT COUNT(*) AS count_id FROM (#{subquery}) AS subquery"
- else
- query = [
- "SELECT #{relation.select_clauses.join(', ')}",
- "FROM #{relation.from_clauses}",
- build_clauses
- ].compact.join ' '
- @engine.connection.add_limit_offset!(query, :limit => limit, :offset => offset) if offset || limit
- end
- query
- end
-
- def build_clauses
- joins = relation.joins(self)
- wheres = relation.where_clauses
- groups = relation.group_clauses
- havings = relation.having_clauses
- orders = relation.order_clauses
-
- clauses = [ "",
- joins,
- ("WHERE #{wheres.join(' AND ')}" unless wheres.empty?),
- ("GROUP BY #{groups.join(', ')}" unless groups.empty?),
- ("HAVING #{havings.join(' AND ')}" unless havings.empty?),
- ("ORDER BY #{orders.join(', ')}" unless orders.empty?)
- ].compact.join ' '
-
- clauses << " #{locked}" unless locked.blank?
- clauses unless clauses.blank?
- end
- end
- end
-end
+++ /dev/null
-module Arel
- module Visitors
- module ArJdbcCompat
- def limit_for(limit_or_node)
- limit_or_node.respond_to?(:expr) ? limit_or_node.expr.to_i : limit_or_node
- end
- end
-
- class ToSql
- include ArJdbcCompat
- end
- end
-end
+++ /dev/null
-require 'arel/visitors/compat'
-
-module Arel
- module Visitors
- class DB2 < Arel::Visitors::ToSql
- def visit_Arel_Nodes_SelectStatement o
- add_limit_offset([o.cores.map { |x| visit_Arel_Nodes_SelectCore x }.join,
- ("ORDER BY #{o.orders.map { |x| visit x }.join(', ')}" unless o.orders.empty?),
- ].compact.join(' '), o)
- end
-
- def add_limit_offset(sql, o)
- @connection.replace_limit_offset! sql, limit_for(o.limit), o.offset && o.offset.value
- end
- end
- end
-end
+++ /dev/null
-require 'arel/visitors/compat'
-
-module Arel
- module Visitors
- class Derby < Arel::Visitors::ToSql
- def visit_Arel_Nodes_SelectStatement o
- [
- o.cores.map { |x| visit_Arel_Nodes_SelectCore x }.join,
- ("ORDER BY #{o.orders.map { |x| visit x }.join(', ')}" unless o.orders.empty?),
- ("FETCH FIRST #{limit_for(o.limit)} ROWS ONLY" if o.limit),
- (visit(o.offset) if o.offset),
- (visit(o.lock) if o.lock),
- ].compact.join ' '
- end
-
- def visit_Arel_Nodes_Offset o
- "OFFSET #{visit o.value} ROWS"
- end
- end
- end
-end
+++ /dev/null
-require 'arel/visitors/compat'
-
-module Arel
- module Visitors
- class Firebird < Arel::Visitors::ToSql
- def visit_Arel_Nodes_SelectStatement o
- [
- o.cores.map { |x| visit_Arel_Nodes_SelectCore x }.join,
- ("ORDER BY #{o.orders.map { |x| visit x }.join(', ')}" unless o.orders.empty?),
- ("ROWS #{limit_for(o.limit)} " if o.limit),
- ("TO #{o.offset} " if o.offset),
- ].compact.join ' '
- end
-
- end
- end
-end
+++ /dev/null
-require 'arel/visitors/compat'
-
-module Arel
- module Visitors
- class HSQLDB < Arel::Visitors::ToSql
- def visit_Arel_Nodes_SelectStatement o
- [
- limit_offset(o.cores.map { |x| visit_Arel_Nodes_SelectCore x }.join, o),
- ("ORDER BY #{o.orders.map { |x| visit x }.join(', ')}" unless o.orders.empty?),
- ].compact.join ' '
- end
-
- def limit_offset sql, o
- offset = o.offset || 0
- bef = sql[7..-1]
- if limit = o.limit
- "SELECT LIMIT #{offset} #{limit_for(limit)} #{bef}"
- elsif offset > 0
- "SELECT LIMIT #{offset} 0 #{bef}"
- else
- sql
- end
- end
- end
- end
-end
+++ /dev/null
-require 'arel/visitors/compat'
-
-module Arel
- module Visitors
- class SQLServer < Arel::Visitors::ToSql
- include ArJdbc::MsSQL::LimitHelpers::SqlServerReplaceLimitOffset
-
- def select_count? o
- sel = o.cores.length == 1 && o.cores.first
- projections = sel && sel.projections.length == 1 && sel.projections
- projections && Arel::Nodes::Count === projections.first
- end
-
- # Need to mimic the subquery logic in ARel 1.x for select count with limit
- # See arel/engines/sql/compilers/mssql_compiler.rb for details
- def visit_Arel_Nodes_SelectStatement o
- order = "ORDER BY #{o.orders.map { |x| visit x }.join(', ')}" unless o.orders.empty?
- if o.limit
- if select_count?(o)
- subquery = true
- sql = o.cores.map do |x|
- x = x.dup
- x.projections = [Arel::Nodes::SqlLiteral.new("*")]
- visit_Arel_Nodes_SelectCore x
- end.join
- else
- sql = o.cores.map { |x| visit_Arel_Nodes_SelectCore x }.join
- end
-
- order ||= "ORDER BY #{@connection.determine_order_clause(sql)}"
- replace_limit_offset!(sql, limit_for(o.limit).to_i, o.offset && o.offset.value.to_i, order)
- sql = "SELECT COUNT(*) AS count_id FROM (#{sql}) AS subquery" if subquery
- else
- sql = super
- end
- sql
- end
- end
-
- class SQLServer2000 < SQLServer
- include ArJdbc::MsSQL::LimitHelpers::SqlServer2000ReplaceLimitOffset
- end
- end
-end
+++ /dev/null
-if defined?(JRUBY_VERSION)
- begin
- tried_gem ||= false
- require 'active_record/version'
- rescue LoadError
- raise if tried_gem
- require 'rubygems'
- gem 'activerecord'
- tried_gem = true
- retry
- end
- if ActiveRecord::VERSION::MAJOR < 2
- if defined?(RAILS_CONNECTION_ADAPTERS)
- RAILS_CONNECTION_ADAPTERS << %q(jdbc)
- else
- RAILS_CONNECTION_ADAPTERS = %w(jdbc)
- end
- if ActiveRecord::VERSION::MAJOR == 1 && ActiveRecord::VERSION::MINOR == 14
- require 'arjdbc/jdbc'
- end
- else
- require 'active_record'
- require 'arjdbc/jdbc'
- end
-else
- warn "activerecord-jdbc-adapter is for use with JRuby only"
-end
-
-require 'arjdbc/version'
+++ /dev/null
-require 'arjdbc/jdbc'
-require 'arjdbc/db2/adapter'
+++ /dev/null
-module ArJdbc
- module DB2
- def self.column_selector
- [ /(db2|as400)/i,
- lambda { |cfg, column| column.extend(::ArJdbc::DB2::Column) } ]
- end
-
- def self.jdbc_connection_class
- ::ActiveRecord::ConnectionAdapters::DB2JdbcConnection
- end
-
- module Column
- def type_cast(value)
- return nil if value.nil? || value =~ /^\s*null\s*$/i
- case type
- when :string then value
- when :integer then defined?(value.to_i) ? value.to_i : (value ? 1 : 0)
- when :primary_key then defined?(value.to_i) ? value.to_i : (value ? 1 : 0)
- when :float then value.to_f
- when :datetime then ArJdbc::DB2::Column.cast_to_date_or_time(value)
- when :date then ArJdbc::DB2::Column.cast_to_date_or_time(value)
- when :timestamp then ArJdbc::DB2::Column.cast_to_time(value)
- when :time then ArJdbc::DB2::Column.cast_to_time(value)
- # TODO AS400 stores binary strings in EBCDIC (CCSID 65535), need to convert back to ASCII
- else
- super
- end
- end
-
- def type_cast_code(var_name)
- case type
- when :datetime then "ArJdbc::DB2::Column.cast_to_date_or_time(#{var_name})"
- when :date then "ArJdbc::DB2::Column.cast_to_date_or_time(#{var_name})"
- when :timestamp then "ArJdbc::DB2::Column.cast_to_time(#{var_name})"
- when :time then "ArJdbc::DB2::Column.cast_to_time(#{var_name})"
- else
- super
- end
- end
-
- def self.cast_to_date_or_time(value)
- return value if value.is_a? Date
- return nil if value.blank?
- guess_date_or_time((value.is_a? Time) ? value : cast_to_time(value))
- end
-
- def self.cast_to_time(value)
- return value if value.is_a? Time
- # AS400 returns a 2 digit year, LUW returns a 4 digit year, so comp = true to help out AS400
- time_array = ParseDate.parsedate(value, true)
- time_array[0] ||= 2000; time_array[1] ||= 1; time_array[2] ||= 1;
- Time.send(ActiveRecord::Base.default_timezone, *time_array) rescue nil
- end
-
- def self.guess_date_or_time(value)
- (value.hour == 0 and value.min == 0 and value.sec == 0) ?
- Date.new(value.year, value.month, value.day) : value
- end
-
- private
- # <b>DEPRECATED:</b> SMALLINT is now used for boolean field types. Please
- # convert your tables using DECIMAL(5) for boolean values to SMALLINT instead.
- def use_decimal5_for_boolean
- warn "[DEPRECATION] using DECIMAL(5) for boolean is deprecated. Convert your columns to SMALLINT instead."
- :boolean
- end
-
- # http://publib.boulder.ibm.com/infocenter/db2luw/v9r7/topic/com.ibm.db2.luw.apdv.java.doc/doc/rjvjdata.html
- def simplified_type(field_type)
- case field_type
- # old jdbc_db2.rb used decimal(5,0) as boolean
- when /^smallint/i then :boolean
- when /^decimal\(5\)$/i then use_decimal5_for_boolean
- when /^real/i then :float
- when /^timestamp/i then :datetime
- else
- super
- end
- end
-
- # Post process default value from JDBC into a Rails-friendly format (columns{-internal})
- def default_value(value)
- # IBM i (AS400) will return an empty string instead of null for no default
- return nil if value.blank?
-
- # string defaults are surrounded by single quotes
- return $1 if value =~ /^'(.*)'$/
-
- value
- end
- end
-
- def _execute(sql, name = nil)
- if ActiveRecord::ConnectionAdapters::JdbcConnection::select?(sql)
- @connection.execute_query(sql)
- elsif ActiveRecord::ConnectionAdapters::JdbcConnection::insert?(sql)
- (@connection.execute_insert(sql) or last_insert_id(sql)).to_i
- else
- @connection.execute_update(sql)
- end
- end
-
- # holy moly batman! all this to tell AS400 "yes i am sure"
- def execute_and_auto_confirm(sql)
- begin
- @connection.execute_update "call qsys.qcmdexc('QSYS/CHGJOB INQMSGRPY(*SYSRPYL)',0000000031.00000)"
- @connection.execute_update "call qsys.qcmdexc('ADDRPYLE SEQNBR(9876) MSGID(CPA32B2) RPY(''I'')',0000000045.00000)"
- rescue Exception => e
- raise "Could not call CHGJOB INQMSGRPY(*SYSRPYL) and ADDRPYLE SEQNBR(9876) MSGID(CPA32B2) RPY('I').\n" +
- "Do you have authority to do this?\n\n" + e.to_s
- end
-
- r = execute sql
-
- begin
- @connection.execute_update "call qsys.qcmdexc('QSYS/CHGJOB INQMSGRPY(*DFT)',0000000027.00000)"
- @connection.execute_update "call qsys.qcmdexc('RMVRPYLE SEQNBR(9876)',0000000021.00000)"
- rescue Exception => e
- raise "Could not call CHGJOB INQMSGRPY(*DFT) and RMVRPYLE SEQNBR(9876).\n" +
- "Do you have authority to do this?\n\n" + e.to_s
- end
- r
- end
-
- def last_insert_id(sql)
- table_name = sql.split(/\s/)[2]
- result = select(ActiveRecord::Base.send(:sanitize_sql,
- %[select IDENTITY_VAL_LOCAL() as last_insert_id from #{table_name}],
- nil))
- result.last['last_insert_id']
- end
-
- def modify_types(tp)
- tp[:primary_key] = 'int not null generated by default as identity (start with 1) primary key'
- tp[:string][:limit] = 255
- tp[:integer][:limit] = nil
- tp[:boolean] = {:name => "smallint"}
- tp
- end
-
- def type_to_sql(type, limit = nil, precision = nil, scale = nil)
- limit = nil if type.to_sym == :integer
- super(type, limit, precision, scale)
- end
-
- def adapter_name
- 'DB2'
- end
-
- def arel2_visitors
- require 'arel/visitors/db2'
- {'db2' => ::Arel::Visitors::DB2, 'as400' => ::Arel::Visitors::DB2}
- end
-
- def add_limit_offset!(sql, options)
- replace_limit_offset!(sql, options[:limit], options[:offset])
- end
-
- def replace_limit_offset!(sql, limit, offset)
- if limit
- limit = limit.to_i
- if !offset
- if limit == 1
- sql << " FETCH FIRST ROW ONLY"
- else
- sql << " FETCH FIRST #{limit} ROWS ONLY"
- end
- else
- offset = offset.to_i
- sql.gsub!(/SELECT/i, 'SELECT B.* FROM (SELECT A.*, row_number() over () AS internal$rownum FROM (SELECT')
- sql << ") A ) B WHERE B.internal$rownum > #{offset} AND B.internal$rownum <= #{limit + offset}"
- end
- end
- sql
- end
-
- def pk_and_sequence_for(table)
- # In JDBC/DB2 side, only upcase names of table and column are handled.
- keys = super(table.upcase)
- if keys && keys[0]
- # In ActiveRecord side, only downcase names of table and column are handled.
- keys[0] = keys[0].downcase
- end
- keys
- end
-
- def quote_column_name(column_name)
- column_name
- end
-
- def quote(value, column = nil) # :nodoc:
- if column && column.respond_to?(:primary) && column.primary && column.klass != String
- return value.to_i.to_s
- end
- if column && (column.type == :decimal || column.type == :integer) && value
- return value.to_s
- end
- case value
- when String
- if column && column.type == :binary
- "BLOB('#{quote_string(value)}')"
- else
- "'#{quote_string(value)}'"
- end
- else super
- end
- end
-
- def quote_string(string)
- string.gsub(/'/, "''") # ' (for ruby-mode)
- end
-
- def quoted_true
- '1'
- end
-
- def quoted_false
- '0'
- end
-
- def reorg_table(table_name)
- unless as400?
- @connection.execute_update "call sysproc.admin_cmd ('REORG TABLE #{table_name}')"
- end
- end
-
- def recreate_database(name)
- tables.each {|table| drop_table("#{db2_schema}.#{table}")}
- end
-
- def remove_index(table_name, options = { })
- execute "DROP INDEX #{quote_column_name(index_name(table_name, options))}"
- end
-
- # http://publib.boulder.ibm.com/infocenter/db2luw/v9r7/topic/com.ibm.db2.luw.admin.dbobj.doc/doc/t0020130.html
- # ...not supported on IBM i, so we raise in this case
- def rename_column(table_name, column_name, new_column_name) #:nodoc:
- if as400?
- raise NotImplementedError, "rename_column is not supported on IBM i"
- else
- execute "ALTER TABLE #{table_name} RENAME COLUMN #{column_name} TO #{new_column_name}"
- reorg_table(table_name)
- end
- end
-
- def change_column_null(table_name, column_name, null)
- if null
- execute_and_auto_confirm "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} DROP NOT NULL"
- else
- execute_and_auto_confirm "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} SET NOT NULL"
- end
- reorg_table(table_name)
- end
-
- def change_column_default(table_name, column_name, default)
- if default.nil?
- execute_and_auto_confirm "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} DROP DEFAULT"
- else
- execute_and_auto_confirm "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} SET WITH DEFAULT #{quote(default)}"
- end
- reorg_table(table_name)
- end
-
- def change_column(table_name, column_name, type, options = {})
- data_type = type_to_sql(type, options[:limit], options[:precision], options[:scale])
- sql = "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} SET DATA TYPE #{data_type}"
- as400? ? execute_and_auto_confirm(sql) : execute(sql)
- reorg_table(table_name)
-
- if options.include?(:default) and options.include?(:null)
- # which to run first?
- if options[:null] or options[:default].nil?
- change_column_null(table_name, column_name, options[:null])
- change_column_default(table_name, column_name, options[:default])
- else
- change_column_default(table_name, column_name, options[:default])
- change_column_null(table_name, column_name, options[:null])
- end
- elsif options.include?(:default)
- change_column_default(table_name, column_name, options[:default])
- elsif options.include?(:null)
- change_column_null(table_name, column_name, options[:null])
- end
- end
-
- # http://publib.boulder.ibm.com/infocenter/db2luw/v9r7/topic/com.ibm.db2.luw.admin.dbobj.doc/doc/t0020132.html
- def remove_column(table_name, column_name) #:nodoc:
- sql = "ALTER TABLE #{table_name} DROP COLUMN #{column_name}"
-
- as400? ? execute_and_auto_confirm(sql) : execute(sql)
- reorg_table(table_name)
- end
-
- # http://publib.boulder.ibm.com/infocenter/db2luw/v9r7/topic/com.ibm.db2.luw.sql.ref.doc/doc/r0000980.html
- def rename_table(name, new_name) #:nodoc:
- execute "RENAME TABLE #{name} TO #{new_name}"
- reorg_table(new_name)
- end
-
- def tables
- @connection.tables(nil, db2_schema, nil, ["TABLE"])
- end
-
- # only record precision and scale for types that can set
- # them via CREATE TABLE:
- # http://publib.boulder.ibm.com/infocenter/db2luw/v9r7/topic/com.ibm.db2.luw.sql.ref.doc/doc/r0000927.html
- HAVE_LIMIT = %w(FLOAT DECFLOAT CHAR VARCHAR CLOB BLOB NCHAR NCLOB DBCLOB GRAPHIC VARGRAPHIC) #TIMESTAMP
- HAVE_PRECISION = %w(DECIMAL NUMERIC)
- HAVE_SCALE = %w(DECIMAL NUMERIC)
-
- def columns(table_name, name = nil)
- cols = @connection.columns(table_name, name, db2_schema)
-
- # scrub out sizing info when CREATE TABLE doesn't support it
- # but JDBC reports it (doh!)
- for col in cols
- base_sql_type = col.sql_type.sub(/\(.*/, "").upcase
- col.limit = nil unless HAVE_LIMIT.include?(base_sql_type)
- col.precision = nil unless HAVE_PRECISION.include?(base_sql_type)
- #col.scale = nil unless HAVE_SCALE.include?(base_sql_type)
- end
-
- cols
- end
-
- def jdbc_columns(table_name, name = nil)
- columns(table_name, name)
- end
-
- def indexes(table_name, name = nil)
- @connection.indexes(table_name, name, db2_schema)
- end
-
- def add_quotes(name)
- return name unless name
- %Q{"#{name}"}
- end
-
- def strip_quotes(str)
- return str unless str
- return str unless /^(["']).*\1$/ =~ str
- str[1..-2]
- end
-
- def expand_double_quotes(name)
- return name unless name && name['"']
- name.gsub(/"/,'""')
- end
-
- def structure_dump #:nodoc:
- definition=""
- rs = @connection.connection.meta_data.getTables(nil,db2_schema.upcase,nil,["TABLE"].to_java(:string))
- while rs.next
- tname = rs.getString(3)
- definition << "CREATE TABLE #{tname} (\n"
- rs2 = @connection.connection.meta_data.getColumns(nil,db2_schema.upcase,tname,nil)
- first_col = true
- while rs2.next
- col_name = add_quotes(rs2.getString(4));
- default = ""
- d1 = rs2.getString(13)
- # IBM i (as400 toolbox driver) will return an empty string if there is no default
- if @config[:url] =~ /^jdbc:as400:/
- default = !d1.blank? ? " DEFAULT #{d1}" : ""
- else
- default = d1 ? " DEFAULT #{d1}" : ""
- end
-
- type = rs2.getString(6)
- col_precision = rs2.getString(7)
- col_scale = rs2.getString(9)
- col_size = ""
- if HAVE_SCALE.include?(type) and col_scale
- col_size = "(#{col_precision},#{col_scale})"
- elsif (HAVE_LIMIT + HAVE_PRECISION).include?(type) and col_precision
- col_size = "(#{col_precision})"
- end
- nulling = (rs2.getString(18) == 'NO' ? " NOT NULL" : "")
- create_col_string = add_quotes(expand_double_quotes(strip_quotes(col_name))) +
- " " +
- type +
- col_size +
- "" +
- nulling +
- default
- if !first_col
- create_col_string = ",\n #{create_col_string}"
- else
- create_col_string = " #{create_col_string}"
- end
-
- definition << create_col_string
-
- first_col = false
- end
- definition << ");\n\n"
- end
- definition
- end
-
- private
- def as400?
- @config[:url] =~ /^jdbc:as400:/
- end
-
- def db2_schema
- if @config[:schema].blank?
- if as400?
- # AS400 implementation takes schema from library name (last part of url)
- schema = @config[:url].split('/').last.strip
- (schema[-1..-1] == ";") ? schema.chop : schema
- else
- # LUW implementation uses schema name of username by default
- @config[:username] or ENV['USER']
- end
- else
- @config[:schema]
- end
- end
- end
-end
+++ /dev/null
-require 'arjdbc/jdbc'
-jdbc_require_driver 'jdbc/derby'
-require 'arjdbc/derby/connection_methods'
-require 'arjdbc/derby/adapter'
-
-
-
+++ /dev/null
-require 'arjdbc/jdbc/missing_functionality_helper'
-
-module ::ArJdbc
- module Derby
- def self.column_selector
- [/derby/i, lambda {|cfg,col| col.extend(::ArJdbc::Derby::Column)}]
- end
-
- def self.monkey_rails
- unless @already_monkeyd
- # Needed because Rails is broken wrt to quoting of
- # some values. Most databases are nice about it,
- # but not Derby. The real issue is that you can't
- # compare a CHAR value to a NUMBER column.
- ::ActiveRecord::Associations::ClassMethods.module_eval do
- private
-
- def select_limited_ids_list(options, join_dependency)
- connection.select_all(
- construct_finder_sql_for_association_limiting(options, join_dependency),
- "#{name} Load IDs For Limited Eager Loading"
- ).collect { |row| connection.quote(row[primary_key], columns_hash[primary_key]) }.join(", ")
- end
- end
-
- @already_monkeyd = true
- end
- end
-
- def self.extended(*args)
- monkey_rails
- end
-
- def self.included(*args)
- monkey_rails
- end
-
- module Column
- def simplified_type(field_type)
- case field_type
- when /smallint/i then :boolean
- when /real/i then :float
- when /decimal/i then :decimal
- else
- super
- end
- end
-
- # Post process default value from JDBC into a Rails-friendly format (columns{-internal})
- def default_value(value)
- # jdbc returns column default strings with actual single quotes around the value.
- return $1 if value =~ /^'(.*)'$/
-
- value
- end
- end
-
- def adapter_name #:nodoc:
- 'Derby'
- end
-
- def arel2_visitors
- require 'arel/visitors/derby'
- {'derby' => ::Arel::Visitors::Derby, 'jdbcderby' => ::Arel::Visitors::Derby}
- end
-
- include ArJdbc::MissingFunctionalityHelper
-
- def index_name_length
- 128
- end
-
- # Convert the specified column type to a SQL string.
- # In Derby, the following cannot specify a limit:
- # - integer
- # - boolean (smallint)
- # - timestamp
- # - date
- def type_to_sql(type, limit = nil, precision = nil, scale = nil) #:nodoc:
- return super unless [:integer, :boolean, :timestamp, :date].include? type
-
- native = native_database_types[type.to_s.downcase.to_sym]
- native.is_a?(Hash) ? native[:name] : native
- end
-
- def modify_types(tp)
- tp[:primary_key] = "int generated by default as identity NOT NULL PRIMARY KEY"
- tp[:string][:limit] = 256
- tp[:integer][:limit] = nil
- tp[:boolean] = {:name => "smallint"}
- tp[:timestamp][:limit] = nil
- tp[:date][:limit] = nil
-
- # sonar
- # New type
- tp[:big_integer] = {:name => "bigint"}
- # /sonar
-
- tp
- end
-
- # Override default -- fix case where ActiveRecord passes :default => nil, :null => true
- def add_column_options!(sql, options)
- options.delete(:default) if options.has_key?(:default) && options[:default].nil?
- sql << " DEFAULT #{quote(options.delete(:default))}" if options.has_key?(:default)
- super
- end
-
- def classes_for_table_name(table)
- ActiveRecord::Base.send(:subclasses).select {|klass| klass.table_name == table}
- end
-
- # Set the sequence to the max value of the table's column.
- def reset_sequence!(table, column, sequence = nil)
- mpk = select_value("SELECT MAX(#{quote_column_name(column)}) FROM #{quote_table_name(table)}")
- execute("ALTER TABLE #{quote_table_name(table)} ALTER COLUMN #{quote_column_name(column)} RESTART WITH #{mpk.to_i + 1}")
- end
-
- def reset_pk_sequence!(table, pk = nil, sequence = nil)
- klasses = classes_for_table_name(table)
- klass = klasses.nil? ? nil : klasses.first
- pk = klass.primary_key unless klass.nil?
- if pk && klass.columns_hash[pk].type == :integer
- reset_sequence!(klass.table_name, pk)
- end
- end
-
- def remove_index(table_name, options) #:nodoc:
- execute "DROP INDEX #{index_name(table_name, options)}"
- end
-
- def rename_table(name, new_name)
- execute "RENAME TABLE #{quote_table_name(name)} TO #{quote_table_name(new_name)}"
- end
-
- AUTO_INC_STMT2 = "SELECT AUTOINCREMENTSTART, AUTOINCREMENTINC, COLUMNNAME, REFERENCEID, COLUMNDEFAULT FROM SYS.SYSCOLUMNS WHERE REFERENCEID = (SELECT T.TABLEID FROM SYS.SYSTABLES T WHERE T.TABLENAME = '%s') AND COLUMNNAME = '%s'"
-
- def add_quotes(name)
- return name unless name
- %Q{"#{name}"}
- end
-
- def strip_quotes(str)
- return str unless str
- return str unless /^(["']).*\1$/ =~ str
- str[1..-2]
- end
-
- def expand_double_quotes(name)
- return name unless name && name['"']
- name.gsub(/"/,'""')
- end
-
- def auto_increment_stmt(tname, cname)
- stmt = AUTO_INC_STMT2 % [tname, strip_quotes(cname)]
- data = execute(stmt).first
- if data
- start = data['autoincrementstart']
- if start
- coldef = ""
- coldef << " GENERATED " << (data['columndefault'].nil? ? "ALWAYS" : "BY DEFAULT ")
- coldef << "AS IDENTITY (START WITH "
- coldef << start
- coldef << ", INCREMENT BY "
- coldef << data['autoincrementinc']
- coldef << ")"
- return coldef
- end
- end
- ""
- end
-
-
- def add_column(table_name, column_name, type, options = {})
- add_column_sql = "ALTER TABLE #{quote_table_name(table_name)} ADD #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
- add_column_options!(add_column_sql, options)
- execute(add_column_sql)
- end
-
- def execute(sql, name = nil)
- if sql =~ /\A\s*(UPDATE|INSERT)/i
- i = sql =~ /\swhere\s/im
- if i
- sql[i..-1] = sql[i..-1].gsub(/!=\s*NULL/, 'IS NOT NULL').gsub(/=\sNULL/i, 'IS NULL')
- end
- else
- sql.gsub!(/= NULL/i, 'IS NULL')
- end
- super
- end
-
- # SELECT DISTINCT clause for a given set of columns and a given ORDER BY clause.
- #
- # Derby requires the ORDER BY columns in the select list for distinct queries, and
- # requires that the ORDER BY include the distinct column.
- #
- # distinct("posts.id", "posts.created_at desc")
- #
- # Based on distinct method for PostgreSQL Adapter
- def distinct(columns, order_by)
- return "DISTINCT #{columns}" if order_by.blank?
-
- # construct a clean list of column names from the ORDER BY clause, removing
- # any asc/desc modifiers
- order_columns = order_by.split(',').collect { |s| s.split.first }
- order_columns.delete_if(&:blank?)
- order_columns = order_columns.zip((0...order_columns.size).to_a).map { |s,i| "#{s} AS alias_#{i}" }
-
- # return a DISTINCT clause that's distinct on the columns we want but includes
- # all the required columns for the ORDER BY to work properly
- sql = "DISTINCT #{columns}, #{order_columns * ', '}"
- sql
- end
-
- SIZEABLE = %w(VARCHAR CLOB BLOB)
-
- def structure_dump #:nodoc:
- definition=""
- rs = @connection.connection.meta_data.getTables(nil,nil,nil,["TABLE"].to_java(:string))
- while rs.next
- tname = rs.getString(3)
- definition << "CREATE TABLE #{tname} (\n"
- rs2 = @connection.connection.meta_data.getColumns(nil,nil,tname,nil)
- first_col = true
- while rs2.next
- col_name = add_quotes(rs2.getString(4));
- default = ""
- d1 = rs2.getString(13)
- if d1 =~ /^GENERATED_/
- default = auto_increment_stmt(tname, col_name)
- elsif d1
- default = " DEFAULT #{d1}"
- end
-
- type = rs2.getString(6)
- col_size = rs2.getString(7)
- nulling = (rs2.getString(18) == 'NO' ? " NOT NULL" : "")
- create_col_string = add_quotes(expand_double_quotes(strip_quotes(col_name))) +
- " " +
- type +
- (SIZEABLE.include?(type) ? "(#{col_size})" : "") +
- nulling +
- default
- if !first_col
- create_col_string = ",\n #{create_col_string}"
- else
- create_col_string = " #{create_col_string}"
- end
-
- definition << create_col_string
-
- first_col = false
- end
- definition << ");\n\n"
- end
- definition
- end
-
- def remove_column(table_name, column_name)
- execute "ALTER TABLE #{quote_table_name(table_name)} DROP COLUMN #{quote_column_name(column_name)} RESTRICT"
- end
-
- # Notes about changing in Derby:
- # http://db.apache.org/derby/docs/10.2/ref/rrefsqlj81859.html#rrefsqlj81859__rrefsqlj37860)
- #
- # We support changing columns using the strategy outlined in:
- # https://issues.apache.org/jira/browse/DERBY-1515
- #
- # This feature has not made it into a formal release and is not in Java 6. We will
- # need to conditionally support this somehow (supposed to arrive for 10.3.0.0)
- def change_column(table_name, column_name, type, options = {})
- # null/not nulling is easy, handle that separately
- if options.include?(:null)
- # This seems to only work with 10.2 of Derby
- if options.delete(:null) == false
- execute "ALTER TABLE #{quote_table_name(table_name)} ALTER COLUMN #{quote_column_name(column_name)} NOT NULL"
- else
- execute "ALTER TABLE #{quote_table_name(table_name)} ALTER COLUMN #{quote_column_name(column_name)} NULL"
- end
- end
-
- # anything left to do?
- unless options.empty?
- begin
- execute "ALTER TABLE #{quote_table_name(table_name)} ALTER COLUMN #{quote_column_name(column_name)} SET DATA TYPE #{type_to_sql(type, options[:limit])}"
- rescue
- transaction do
- temp_new_column_name = "#{column_name}_newtype"
- # 1) ALTER TABLE t ADD COLUMN c1_newtype NEWTYPE;
- add_column table_name, temp_new_column_name, type, options
- # 2) UPDATE t SET c1_newtype = c1;
- execute "UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(temp_new_column_name)} = CAST(#{quote_column_name(column_name)} AS #{type_to_sql(type, options[:limit])})"
- # 3) ALTER TABLE t DROP COLUMN c1;
- remove_column table_name, column_name
- # 4) ALTER TABLE t RENAME COLUMN c1_newtype to c1;
- rename_column table_name, temp_new_column_name, column_name
- end
- end
- end
- end
-
- def rename_column(table_name, column_name, new_column_name) #:nodoc:
- execute "RENAME COLUMN #{quote_table_name(table_name)}.#{quote_column_name(column_name)} TO #{quote_column_name(new_column_name)}"
- end
-
- def primary_keys(table_name)
- @connection.primary_keys table_name.to_s.upcase
- end
-
- def columns(table_name, name=nil)
- @connection.columns_internal(table_name.to_s, name, derby_schema)
- end
-
- def tables
- @connection.tables(nil, derby_schema)
- end
-
- def recreate_database(db_name)
- tables.each do |t|
- drop_table t
- end
- end
-
- def quote_column_name(name) #:nodoc:
- %Q{"#{name.to_s.upcase.gsub(/"/, '""')}"}
- end
-
- def quoted_true
- '1'
- end
-
- def quoted_false
- '0'
- end
-
- def add_limit_offset!(sql, options) #:nodoc:
- if options[:offset]
- sql << " OFFSET #{options[:offset]} ROWS"
- end
- if options[:limit]
- #ROWS/ROW and FIRST/NEXT mean the same
- sql << " FETCH FIRST #{options[:limit]} ROWS ONLY"
- end
- end
-
- private
- # Derby appears to define schemas using the username
- def derby_schema
- if @config.has_key?(:schema)
- config[:schema]
- else
- (@config[:username] && @config[:username].to_s) || ''
- end
- end
- end
-end
-
-
+++ /dev/null
-module ActiveRecord
- class Base
- class << self
- def derby_connection(config)
- config[:url] ||= "jdbc:derby:#{config[:database]};create=true"
- config[:driver] ||= "org.apache.derby.jdbc.EmbeddedDriver"
- conn = embedded_driver(config)
- md = conn.jdbc_connection.meta_data
- if md.database_major_version < 10 || (md.database_major_version == 10 && md.database_minor_version < 5)
- raise ::ActiveRecord::ConnectionFailed, "Derby adapter requires Derby 10.5 or later"
- end
- conn
- end
-
- alias_method :jdbcderby_connection, :derby_connection
- end
- end
-end
+++ /dev/null
-# arjdbc/discover.rb: Declare ArJdbc.extension modules in this file
-# that loads a custom module and adapter.
-
-module ::ArJdbc
- # Adapters built-in to AR are required up-front so we can override
- # the native ones
- require 'arjdbc/mysql'
- extension :MySQL do |name|
- name =~ /mysql/i
- end
-
- require 'arjdbc/postgresql'
- extension :PostgreSQL do |name|
- name =~ /postgre/i
- end
-
- require 'arjdbc/sqlite3'
- extension :SQLite3 do |name|
- name =~ /sqlite/i
- end
-
- # Other adapters are lazy-loaded
- extension :DB2 do |name, config|
- if name =~ /(db2|as400)/i && config[:url] !~ /^jdbc:derby:net:/
- require 'arjdbc/db2'
- true
- end
- end
-
- extension :Derby do |name|
- if name =~ /derby/i
- require 'arjdbc/derby'
- true
- end
- end
-
- extension :FireBird do |name|
- if name =~ /firebird/i
- require 'arjdbc/firebird'
- true
- end
- end
-
- extension :H2 do |name|
- if name =~ /\.h2\./i
- require 'arjdbc/h2'
- true
- end
- end
-
- extension :HSQLDB do |name|
- if name =~ /hsqldb/i
- require 'arjdbc/hsqldb'
- true
- end
- end
-
- extension :Informix do |name|
- if name =~ /informix/i
- require 'arjdbc/informix'
- true
- end
- end
-
- extension :Mimer do |name|
- if name =~ /mimer/i
- require 'arjdbc/mimer'
- true
- end
- end
-
- extension :MsSQL do |name|
- if name =~ /sqlserver|tds|Microsoft SQL/i
- require 'arjdbc/mssql'
- true
- end
- end
-
- extension :Oracle do |name|
- if name =~ /oracle/i
- require 'arjdbc/oracle'
- true
- end
- end
-
- extension :Sybase do |name|
- if name =~ /sybase|tds/i
- require 'arjdbc/sybase'
- true
- end
- end
-end
+++ /dev/null
-require 'arjdbc/jdbc'
-require 'arjdbc/firebird/adapter'
+++ /dev/null
-module ::ArJdbc
- module FireBird
-
- def self.extended(mod)
- unless @lob_callback_added
- ActiveRecord::Base.class_eval do
- def after_save_with_firebird_blob
- self.class.columns.select { |c| c.sql_type =~ /blob/i }.each do |c|
- value = self[c.name]
- value = value.to_yaml if unserializable_attribute?(c.name, c)
- next if value.nil?
- connection.write_large_object(c.type == :binary, c.name, self.class.table_name, self.class.primary_key, quote_value(id), value)
- end
- end
- end
-
- ActiveRecord::Base.after_save :after_save_with_firebird_blob
- @lob_callback_added = true
- end
- end
-
- def adapter_name
- 'Firebird'
- end
-
- def arel2_visitors
- require 'arel/visitors/firebird'
- {'firebird' => ::Arel::Visitors::Firebird, 'firebirdsql' => ::Arel::Visitors::Firebird}
- end
-
- def modify_types(tp)
- tp[:primary_key] = 'INTEGER NOT NULL PRIMARY KEY'
- tp[:string][:limit] = 252
- tp[:integer][:limit] = nil
- tp
- end
-
- def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) # :nodoc:
- execute(sql, name)
- id_value
- end
-
- def add_limit_offset!(sql, options) # :nodoc:
- if options[:limit]
- limit_string = "FIRST #{options[:limit]}"
- limit_string << " SKIP #{options[:offset]}" if options[:offset]
- sql.sub!(/\A(\s*SELECT\s)/i, '\&' + limit_string + ' ')
- end
- end
-
- def prefetch_primary_key?(table_name = nil)
- true
- end
-
- def default_sequence_name(table_name, primary_key) # :nodoc:
- "#{table_name}_seq"
- end
-
- def next_sequence_value(sequence_name)
- select_one("SELECT GEN_ID(#{sequence_name}, 1 ) FROM RDB$DATABASE;")["gen_id"]
- end
-
- def create_table(name, options = {}) #:nodoc:
- super(name, options)
- execute "CREATE GENERATOR #{name}_seq"
- end
-
- def rename_table(name, new_name) #:nodoc:
- execute "RENAME #{name} TO #{new_name}"
- execute "UPDATE RDB$GENERATORS SET RDB$GENERATOR_NAME='#{new_name}_seq' WHERE RDB$GENERATOR_NAME='#{name}_seq'" rescue nil
- end
-
- def drop_table(name, options = {}) #:nodoc:
- super(name)
- execute "DROP GENERATOR #{name}_seq" rescue nil
- end
-
- def change_column(table_name, column_name, type, options = {}) #:nodoc:
- execute "ALTER TABLE #{table_name} ALTER #{column_name} TYPE #{type_to_sql(type, options[:limit])}"
- end
-
- def rename_column(table_name, column_name, new_column_name)
- execute "ALTER TABLE #{table_name} ALTER #{column_name} TO #{new_column_name}"
- end
-
- def remove_index(table_name, options) #:nodoc:
- execute "DROP INDEX #{index_name(table_name, options)}"
- end
-
- def quote(value, column = nil) # :nodoc:
- return value.quoted_id if value.respond_to?(:quoted_id)
-
- # BLOBs are updated separately by an after_save trigger.
- return value.nil? ? "NULL" : "'#{quote_string(value[0..1])}'" if column && [:binary, :text].include?(column.type)
-
- if [Time, DateTime].include?(value.class)
- "CAST('#{value.strftime("%Y-%m-%d %H:%M:%S")}' AS TIMESTAMP)"
- else
- if column && column.type == :primary_key
- return value.to_s
- end
- super
- end
- end
-
- def quote_string(string) # :nodoc:
- string.gsub(/'/, "''")
- end
-
- def quote_column_name(column_name) # :nodoc:
- %Q("#{ar_to_fb_case(column_name)}")
- end
-
- def quoted_true # :nodoc:
- quote(1)
- end
-
- def quoted_false # :nodoc:
- quote(0)
- end
-
- private
-
- # Maps uppercase Firebird column names to lowercase for ActiveRecord;
- # mixed-case columns retain their original case.
- def fb_to_ar_case(column_name)
- column_name =~ /[[:lower:]]/ ? column_name : column_name.to_s.downcase
- end
-
- # Maps lowercase ActiveRecord column names to uppercase for Fierbird;
- # mixed-case columns retain their original case.
- def ar_to_fb_case(column_name)
- column_name =~ /[[:upper:]]/ ? column_name : column_name.to_s.upcase
- end
- end
-end
+++ /dev/null
-require 'arjdbc/jdbc'
-jdbc_require_driver 'jdbc/h2'
-require 'arjdbc/h2/connection_methods'
-require 'arjdbc/h2/adapter'
+++ /dev/null
-require 'arjdbc/hsqldb/adapter'
-
-module ArJdbc
- module H2
- include HSQLDB
-
- def self.jdbc_connection_class
- ::ActiveRecord::ConnectionAdapters::H2JdbcConnection
- end
-
- def adapter_name #:nodoc:
- 'H2'
- end
-
- def arel2_visitors
- super.merge 'h2' => ::Arel::Visitors::HSQLDB, 'jdbch2' => ::Arel::Visitors::HSQLDB
- end
-
- def h2_adapter
- true
- end
-
- def tables
- @connection.tables(nil, h2_schema)
- end
-
- def columns(table_name, name=nil)
- @connection.columns_internal(table_name.to_s, name, h2_schema)
- end
-
- private
- def h2_schema
- @config[:schema] || ''
- end
- end
-end
+++ /dev/null
-module ActiveRecord
- class Base
- class << self
- def h2_connection(config)
- config[:url] ||= "jdbc:h2:#{config[:database]}"
- config[:driver] ||= "org.h2.Driver"
- embedded_driver(config)
- end
- alias_method :jdbch2_connection, :h2_connection
- end
- end
-end
+++ /dev/null
-require 'arjdbc/jdbc'
-jdbc_require_driver 'jdbc/hsqldb'
-require 'arjdbc/hsqldb/connection_methods'
-require 'arjdbc/hsqldb/adapter'
+++ /dev/null
-module ::ArJdbc
- module HSQLDB
- def self.column_selector
- [/hsqldb|\.h2\./i, lambda {|cfg,col| col.extend(::ArJdbc::HSQLDB::Column)}]
- end
-
- module Column
- private
- def simplified_type(field_type)
- case field_type
- when /longvarchar/i then :text
- when /tinyint/i then :boolean
- when /real/i then :float
- when /decimal/i then :decimal
- else
- super
- end
- end
-
- # Override of ActiveRecord::ConnectionAdapters::Column
- def extract_limit(sql_type)
- # HSQLDB appears to return "LONGVARCHAR(0)" for :text columns, which
- # for AR purposes should be interpreted as "no limit"
- return nil if sql_type =~ /\(0\)/
- super
- end
-
- # Post process default value from JDBC into a Rails-friendly format (columns{-internal})
- def default_value(value)
- # jdbc returns column default strings with actual single quotes around the value.
- return $1 if value =~ /^'(.*)'$/
-
- value
- end
- end
-
- def adapter_name #:nodoc:
- 'Hsqldb'
- end
-
- def arel2_visitors
- require 'arel/visitors/hsqldb'
- {'hsqldb' => ::Arel::Visitors::HSQLDB, 'jdbchsqldb' => ::Arel::Visitors::HSQLDB}
- end
-
- def modify_types(tp)
- tp[:primary_key] = "INTEGER GENERATED BY DEFAULT AS IDENTITY(START WITH 0) PRIMARY KEY"
- tp[:integer][:limit] = nil
- tp[:boolean][:limit] = nil
- # set text and float limits so we don't see odd scales tacked on
- # in migrations
- tp[:boolean] = { :name => "tinyint" }
- tp[:text][:limit] = nil
- tp[:float][:limit] = 17 if defined?(::Jdbc::H2)
- tp[:string][:limit] = 255
- tp[:datetime] = { :name => "DATETIME" }
- tp[:timestamp] = { :name => "DATETIME" }
- tp[:time] = { :name => "TIME" }
- tp[:date] = { :name => "DATE" }
- tp
- end
-
- def quote(value, column = nil) # :nodoc:
- return value.quoted_id if value.respond_to?(:quoted_id)
-
- case value
- when String
- if respond_to?(:h2_adapter) && value.empty?
- "''"
- elsif column && column.type == :binary
- "'#{value.unpack("H*")}'"
- elsif column && (column.type == :integer ||
- column.respond_to?(:primary) && column.primary && column.klass != String)
- value.to_i.to_s
- else
- "'#{quote_string(value)}'"
- end
- else
- super
- end
- end
-
- def quote_column_name(name) #:nodoc:
- name = name.to_s
- if name =~ /[-]/
- %Q{"#{name.upcase}"}
- else
- name
- end
- end
-
- def quote_string(str)
- str.gsub(/'/, "''")
- end
-
- def quoted_true
- '1'
- end
-
- def quoted_false
- '0'
- end
-
- def add_column(table_name, column_name, type, options = {})
- add_column_sql = "ALTER TABLE #{quote_table_name(table_name)} ADD #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
- add_column_options!(add_column_sql, options)
- execute(add_column_sql)
- end
-
- def change_column(table_name, column_name, type, options = {}) #:nodoc:
- execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} #{type_to_sql(type, options[:limit])}"
- end
-
- def change_column_default(table_name, column_name, default) #:nodoc:
- execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} SET DEFAULT #{quote(default)}"
- end
-
- def rename_column(table_name, column_name, new_column_name) #:nodoc:
- execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} RENAME TO #{new_column_name}"
- end
-
- # Maps logical Rails types to MySQL-specific data types.
- def type_to_sql(type, limit = nil, precision = nil, scale = nil)
- return super if defined?(::Jdbc::H2) || type.to_s != 'integer' || limit == nil
-
- type
- end
-
- def rename_table(name, new_name)
- execute "ALTER TABLE #{name} RENAME TO #{new_name}"
- end
-
- def last_insert_id
- Integer(select_value("CALL IDENTITY()"))
- end
-
- def _execute(sql, name = nil)
- result = super
- ActiveRecord::ConnectionAdapters::JdbcConnection::insert?(sql) ? last_insert_id : result
- end
-
- def add_limit_offset!(sql, options) #:nodoc:
- if sql =~ /^select/i
- offset = options[:offset] || 0
- bef = sql[7..-1]
- if limit = options[:limit]
- sql.replace "SELECT LIMIT #{offset} #{limit} #{bef}"
- elsif offset > 0
- sql.replace "SELECT LIMIT #{offset} 0 #{bef}"
- end
- end
- end
-
- # override to filter out system tables that otherwise end
- # up in db/schema.rb during migrations. JdbcConnection#tables
- # now takes an optional block filter so we can screen out
- # rows corresponding to system tables. HSQLDB names its
- # system tables SYSTEM.*, but H2 seems to name them without
- # any kind of convention
- def tables
- @connection.tables.select {|row| row.to_s !~ /^system_/i }
- end
-
- def remove_index(table_name, options = {})
- execute "DROP INDEX #{quote_column_name(index_name(table_name, options))}"
- end
-
- def recreate_database(name)
- drop_database(name)
- end
-
- # do nothing since database gets created upon connection. However
- # this method gets called by rails db rake tasks so now we're
- # avoiding method_missing error
- def create_database(name)
- end
-
- def drop_database(name)
- execute("DROP ALL OBJECTS")
- end
- end
-end
+++ /dev/null
-module ActiveRecord
- class Base
- class << self
- def hsqldb_connection(config)
- require "arjdbc/hsqldb"
- config[:url] ||= "jdbc:hsqldb:#{config[:database]}"
- config[:driver] ||= "org.hsqldb.jdbcDriver"
- embedded_driver(config)
- end
-
- alias_method :jdbchsqldb_connection, :hsqldb_connection
- end
- end
-end
+++ /dev/null
-require 'arjdbc/jdbc'
-require 'arjdbc/informix/connection_methods'
-require 'arjdbc/informix/adapter'
+++ /dev/null
-module ::ActiveRecord
- class Base
- after_save :write_lobs
-
- private
- def write_lobs
- if connection.is_a?(ArJdbc::Informix)
- self.class.columns.each do |c|
- if [:text, :binary].include? c.type
- value = self[c.name]
- value = value.to_yaml if unserializable_attribute?(c.name, c)
-
- unless value.nil? || (value == '')
- connection.write_large_object(c.type == :binary,
- c.name,
- self.class.table_name,
- self.class.primary_key,
- quote_value(id),
- value)
- end
- end
- end
- end
- end
- end
-end
-
-module ::ArJdbc
- module Informix
- def self.extended(base)
- @@db_major_version = base.select_one("SELECT dbinfo('version', 'major') version FROM systables WHERE tabid = 1")['version'].to_i
- end
-
- def self.column_selector
- [ /informix/i,
- lambda { |cfg, column| column.extend(::ArJdbc::Informix::Column) } ]
- end
-
- def self.jdbc_connection_class
- ::ActiveRecord::ConnectionAdapters::InformixJdbcConnection
- end
-
- module Column
- private
- # TODO: Test all Informix column types.
- def simplified_type(field_type)
- if field_type =~ /serial/i
- :primary_key
- else
- super
- end
- end
- end
-
- def modify_types(tp)
- tp[:primary_key] = "SERIAL PRIMARY KEY"
- tp[:string] = { :name => "VARCHAR", :limit => 255 }
- tp[:integer] = { :name => "INTEGER" }
- tp[:float] = { :name => "FLOAT" }
- tp[:decimal] = { :name => "DECIMAL" }
- tp[:datetime] = { :name => "DATETIME YEAR TO FRACTION(5)" }
- tp[:timestamp] = { :name => "DATETIME YEAR TO FRACTION(5)" }
- tp[:time] = { :name => "DATETIME HOUR TO FRACTION(5)" }
- tp[:date] = { :name => "DATE" }
- tp[:binary] = { :name => "BYTE" }
- tp[:boolean] = { :name => "BOOLEAN" }
- tp
- end
-
- def prefetch_primary_key?(table_name = nil)
- true
- end
-
- def supports_migrations?
- true
- end
-
- def default_sequence_name(table, column)
- "#{table}_seq"
- end
-
- def add_limit_offset!(sql, options)
- if options[:limit]
- limit = "FIRST #{options[:limit]}"
- # SKIP available only in IDS >= 10
- offset = (@@db_major_version >= 10 && options[:offset]?
- "SKIP #{options[:offset]}" : "")
- sql.sub!(/^select /i, "SELECT #{offset} #{limit} ")
- end
- sql
- end
-
- def next_sequence_value(sequence_name)
- select_one("SELECT #{sequence_name}.nextval id FROM systables WHERE tabid=1")['id']
- end
-
- # TODO: Add some smart quoting for newlines in string and text fields.
- def quote_string(string)
- string.gsub(/\'/, "''")
- end
-
- def quote(value, column = nil)
- if column && [:binary, :text].include?(column.type)
- # LOBs are updated separately by an after_save trigger.
- "NULL"
- elsif column && column.type == :date
- "'#{value.mon}/#{value.day}/#{value.year}'"
- else
- super
- end
- end
-
- def create_table(name, options = {})
- super(name, options)
- execute("CREATE SEQUENCE #{name}_seq")
- end
-
- def rename_table(name, new_name)
- execute("RENAME TABLE #{name} TO #{new_name}")
- execute("RENAME SEQUENCE #{name}_seq TO #{new_name}_seq")
- end
-
- def drop_table(name)
- super(name)
- execute("DROP SEQUENCE #{name}_seq")
- end
-
- def remove_index(table_name, options = {})
- @connection.execute_update("DROP INDEX #{index_name(table_name, options)}")
- end
-
- private
- def select(sql, name = nil)
- # Informix does not like "= NULL", "!= NULL", or "<> NULL".
- execute(sql.gsub(/(!=|<>)\s*null/i, "IS NOT NULL").gsub(/=\s*null/i, "IS NULL"), name)
- end
- end # module Informix
-end # module ::ArJdbc
+++ /dev/null
-class ActiveRecord::Base
- class << self
- def informix_connection(config)
- config[:port] ||= 9088
- config[:url] ||= "jdbc:informix-sqli://#{config[:host]}:#{config[:port]}/#{config[:database]}:INFORMIXSERVER=#{config[:servername]}"
- config[:driver] = 'com.informix.jdbc.IfxDriver'
- jdbc_connection(config)
- end
- end
-end
+++ /dev/null
-require 'arjdbc/jdbc/adapter'
-require 'arjdbc/jdbc/discover'
+++ /dev/null
-require 'active_record/version'
-require 'active_record/connection_adapters/abstract_adapter'
-require 'arjdbc/version'
-require 'arjdbc/jdbc/require_driver'
-require 'arjdbc/jdbc/connection_methods'
-require 'arjdbc/jdbc/compatibility'
-require 'arjdbc/jdbc/core_ext'
-require 'arjdbc/jdbc/java'
-require 'arjdbc/jdbc/type_converter'
-require 'arjdbc/jdbc/driver'
-require 'arjdbc/jdbc/column'
-require 'arjdbc/jdbc/connection'
-require 'arjdbc/jdbc/callbacks'
-require 'arjdbc/jdbc/extension'
-require 'bigdecimal'
-
-module ActiveRecord
- module ConnectionAdapters
- class JdbcAdapter < AbstractAdapter
- extend ShadowCoreMethods
- include CompatibilityMethods if CompatibilityMethods.needed?(self)
- include JdbcConnectionPoolCallbacks if JdbcConnectionPoolCallbacks.needed?
-
- attr_reader :config
-
- def initialize(connection, logger, config)
- @config = config
- spec = adapter_spec config
- unless connection
- connection_class = jdbc_connection_class spec
- connection = connection_class.new config
- end
- super(connection, logger)
- extend spec if spec
- configure_arel2_visitors(config)
- connection.adapter = self
- JndiConnectionPoolCallbacks.prepare(self, connection)
- end
-
- def jdbc_connection_class(spec)
- connection_class = spec.jdbc_connection_class if spec && spec.respond_to?(:jdbc_connection_class)
- connection_class = ::ActiveRecord::ConnectionAdapters::JdbcConnection unless connection_class
- connection_class
- end
-
- def jdbc_column_class
- ActiveRecord::ConnectionAdapters::JdbcColumn
- end
-
- # Retrieve the raw java.sql.Connection object.
- def jdbc_connection
- raw_connection.connection
- end
-
- # Locate specialized adapter specification if one exists based on config data
- def adapter_spec(config)
- 2.times do
- dialect = (config[:dialect] || config[:driver]).to_s
- ::ArJdbc.constants.map { |name| ::ArJdbc.const_get name }.each do |constant|
- if constant.respond_to? :adapter_matcher
- spec = constant.adapter_matcher(dialect, config)
- return spec if spec
- end
- end
-
- # If nothing matches and we're using jndi, try to automatically detect the database.
- break unless config[:jndi] and !config[:dialect]
- begin
- conn = Java::javax.naming.InitialContext.new.lookup(config[:jndi]).getConnection
- config[:dialect] = conn.getMetaData.getDatabaseProductName
-
- # Derby-specific hack
- if ::ArJdbc::Derby.adapter_matcher(config[:dialect], config)
- # Needed to set the correct database schema name
- config[:username] ||= conn.getMetaData.getUserName
- end
- rescue
- conn.close if conn
- end
- end
- nil
- end
-
- def modify_types(tp)
- tp
- end
-
- def adapter_name #:nodoc:
- 'JDBC'
- end
-
- def arel2_visitors
- {}
- end
-
- def configure_arel2_visitors(config)
- if defined?(::Arel::Visitors::VISITORS)
- visitors = ::Arel::Visitors::VISITORS
- visitor = nil
- arel2_visitors.each do |k,v|
- visitor = v
- visitors[k] = v
- end
- if visitor && config[:adapter] =~ /^(jdbc|jndi)$/
- visitors[config[:adapter]] = visitor
- end
- end
- end
-
- def is_a?(klass) # :nodoc:
- # This is to fake out current_adapter? conditional logic in AR tests
- if Class === klass && klass.name =~ /#{adapter_name}Adapter$/i
- true
- else
- super
- end
- end
-
- def supports_migrations?
- true
- end
-
- def native_database_types #:nodoc:
- @connection.native_database_types
- end
-
- def database_name #:nodoc:
- @connection.database_name
- end
-
- def native_sql_to_type(tp)
- if /^(.*?)\(([0-9]+)\)/ =~ tp
- tname = $1
- limit = $2.to_i
- ntype = native_database_types
- if ntype[:primary_key] == tp
- return :primary_key,nil
- else
- ntype.each do |name,val|
- if name == :primary_key
- next
- end
- if val[:name].downcase == tname.downcase && (val[:limit].nil? || val[:limit].to_i == limit)
- return name,limit
- end
- end
- end
- elsif /^(.*?)/ =~ tp
- tname = $1
- ntype = native_database_types
- if ntype[:primary_key] == tp
- return :primary_key,nil
- else
- ntype.each do |name,val|
- if val[:name].downcase == tname.downcase && val[:limit].nil?
- return name,nil
- end
- end
- end
- else
- return :string,255
- end
- return nil,nil
- end
-
- def active?
- @connection.active?
- end
-
- def reconnect!
- @connection.reconnect!
- @connection
- end
-
- def disconnect!
- @connection.disconnect!
- end
-
- def execute(sql, name = nil)
- if name == :skip_logging
- _execute(sql)
- else
- log(sql, name) { _execute(sql) }
- end
- end
-
- # we need to do it this way, to allow Rails stupid tests to always work
- # even if we define a new execute method. Instead of mixing in a new
- # execute, an _execute should be mixed in.
- def _execute(sql, name = nil)
- @connection.execute(sql)
- end
-
- def jdbc_insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)
- insert_sql(sql, name, pk, id_value, sequence_name)
- end
-
- def jdbc_update(sql, name = nil) #:nodoc:
- execute(sql, name)
- end
- def jdbc_select_all(sql, name = nil)
- select(sql, name)
- end
-
- # Allow query caching to work even when we override alias_method_chain'd methods
- alias_chained_method :select_all, :query_cache, :jdbc_select_all
- alias_chained_method :update, :query_dirty, :jdbc_update
- alias_chained_method :insert, :query_dirty, :jdbc_insert
-
- # Do we need this? Not in AR 3.
- def select_one(sql, name = nil)
- select(sql, name).first
- end
-
- def select_rows(sql, name = nil)
- rows = []
- select(sql, name).each {|row| rows << row.values }
- rows
- end
-
- def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)
- id = execute(sql, name = nil)
- id_value || id
- end
-
- def jdbc_columns(table_name, name = nil)
- @connection.columns(table_name.to_s)
- end
- alias_chained_method :columns, :query_cache, :jdbc_columns
-
- def tables(name = nil)
- @connection.tables
- end
-
- def table_exists?(name)
- jdbc_columns(name) rescue nil
- end
-
- def indexes(table_name, name = nil, schema_name = nil)
- @connection.indexes(table_name, name, schema_name)
- end
-
- def begin_db_transaction
- @connection.begin
- end
-
- def commit_db_transaction
- @connection.commit
- end
-
- def rollback_db_transaction
- @connection.rollback
- end
-
- def write_large_object(*args)
- @connection.write_large_object(*args)
- end
-
- def pk_and_sequence_for(table)
- key = primary_key(table)
- [key, nil] if key
- end
-
- def primary_key(table)
- primary_keys(table).first
- end
-
- def primary_keys(table)
- @connection.primary_keys(table)
- end
-
- def select(*args)
- execute(*args)
- end
-
- def translate_exception(e, message)
- puts e.backtrace if $DEBUG || ENV['DEBUG']
- super
- end
- protected :translate_exception
- end
- end
-end
+++ /dev/null
-module ActiveRecord
- module ConnectionAdapters
- module JdbcConnectionPoolCallbacks
- def self.included(base)
- if base.respond_to?(:set_callback) # Rails 3 callbacks
- base.set_callback :checkin, :after, :on_checkin
- base.set_callback :checkout, :before, :on_checkout
- else
- base.checkin :on_checkin
- base.checkout :on_checkout
- end
- end
-
- def self.needed?
- ActiveRecord::Base.respond_to?(:connection_pool)
- end
-
- def on_checkin
- # default implementation does nothing
- end
-
- def on_checkout
- # default implementation does nothing
- end
- end
-
- module JndiConnectionPoolCallbacks
- def self.prepare(adapter, conn)
- if ActiveRecord::Base.respond_to?(:connection_pool) && conn.jndi_connection?
- adapter.extend self
- conn.disconnect! # disconnect initial connection in JdbcConnection#initialize
- end
- end
-
- def on_checkin
- disconnect!
- end
-
- def on_checkout
- reconnect!
- end
- end
- end
-end
+++ /dev/null
-module ActiveRecord
- module ConnectionAdapters
- class JdbcColumn < Column
- attr_writer :limit, :precision
-
- def initialize(config, name, default, *args)
- call_discovered_column_callbacks(config)
- super(name,default_value(default),*args)
- init_column(name, default, *args)
- end
-
- def init_column(*args)
- end
-
- def default_value(val)
- val
- end
-
- def self.column_types
- # GH #25: reset the column types if the # of constants changed
- # since last call
- if ::ArJdbc.constants.size != driver_constants.size
- @driver_constants = nil
- @column_types = nil
- end
- @column_types ||= driver_constants.select {|c|
- c.respond_to? :column_selector }.map {|c|
- c.column_selector }.inject({}) {|h,val|
- h[val[0]] = val[1]; h }
- end
-
- def self.driver_constants
- @driver_constants ||= ::ArJdbc.constants.map {|c| ::ArJdbc.const_get c }
- end
-
- protected
- def call_discovered_column_callbacks(config)
- dialect = config[:dialect] || config[:driver]
- for reg, func in JdbcColumn.column_types
- if reg === dialect.to_s
- func.call(config,self)
- end
- end
- end
- end
- end
-end
+++ /dev/null
-# AR's 2.2 version of this method is sufficient, but we need it for
-# older versions
-if ActiveRecord::VERSION::MAJOR <= 2 && ActiveRecord::VERSION::MINOR < 2
- module ActiveRecord
- module ConnectionAdapters # :nodoc:
- module SchemaStatements
- # Convert the speficied column type to a SQL string.
- def type_to_sql(type, limit = nil, precision = nil, scale = nil)
- if native = native_database_types[type]
- column_type_sql = (native.is_a?(Hash) ? native[:name] : native).dup
-
- if type == :decimal # ignore limit, use precision and scale
- scale ||= native[:scale]
-
- if precision ||= native[:precision]
- if scale
- column_type_sql << "(#{precision},#{scale})"
- else
- column_type_sql << "(#{precision})"
- end
- elsif scale
- raise ArgumentError, "Error adding decimal column: precision cannot be empty if scale if specified"
- end
-
- elsif limit ||= native.is_a?(Hash) && native[:limit]
- column_type_sql << "(#{limit})"
- end
-
- column_type_sql
- else
- type
- end
- end
- end
- end
- end
-end
-
-module ActiveRecord
- module ConnectionAdapters
- module CompatibilityMethods
- def self.needed?(base)
- !base.instance_methods.include?("quote_table_name")
- end
-
- def quote_table_name(name)
- quote_column_name(name)
- end
- end
- end
-end
+++ /dev/null
-module ActiveRecord
- module ConnectionAdapters
- class JdbcConnection
- module ConfigHelper
- attr_reader :config
-
- def config=(config)
- @config = config.symbolize_keys
- end
-
- def configure_connection
- config[:retry_count] ||= 5
- config[:connection_alive_sql] ||= "select 1"
-
- # sonar
- @jndi_connection = true
- # /sonar
- @connection = nil
- if config[:jndi]
- begin
- configure_jndi
- rescue => e
- warn "JNDI data source unavailable: #{e.message}; trying straight JDBC"
- configure_jdbc
- end
- else
- configure_jdbc
- end
- end
-
- def configure_jndi
- jndi = config[:jndi].to_s
- ctx = javax.naming.InitialContext.new
- ds = ctx.lookup(jndi)
- @connection_factory = JdbcConnectionFactory.impl do
- ds.connection
- end
- unless config[:driver]
- config[:driver] = connection.meta_data.connection.java_class.name
- end
- @jndi_connection = true
- end
-
- def configure_url
- url = config[:url].to_s
- if Hash === config[:options]
- options = ''
- config[:options].each do |k,v|
- options << '&' unless options.empty?
- options << "#{k}=#{v}"
- end
- url = url['?'] ? "#{url}&#{options}" : "#{url}?#{options}" unless options.empty?
- config[:url] = url
- config[:options] = nil
- end
- url
- end
-
- def configure_jdbc
- unless config[:driver] && config[:url]
- raise ::ActiveRecord::ConnectionNotEstablished, "jdbc adapter requires driver class and url"
- end
-
- driver = config[:driver].to_s
- user = config[:username].to_s
- pass = config[:password].to_s
- url = configure_url
- # sonar
- #jdbc_driver = (config[:driver_instance] ||= JdbcDriver.new(driver))
- # /sonar
- @connection_factory = JdbcConnectionFactory.impl do
- # sonar
- #jdbc_driver.connection(url, user, pass)
- ::Java::OrgSonarServerUi::JRubyFacade.getInstance().getConnection()
- # /sonar
- end
- end
- end
-
- attr_reader :adapter, :connection_factory
-
- # @native_database_types - setup properly by adapter= versus set_native_database_types.
- # This contains type information for the adapter. Individual adapters can make tweaks
- # by defined modify_types
- #
- # @native_types - This is the default type settings sans any modifications by the
- # individual adapter. My guess is that if we loaded two adapters of different types
- # then this is used as a base to be tweaked by each adapter to create @native_database_types
-
- def initialize(config)
- self.config = config
- configure_connection
- connection # force the connection to load
- set_native_database_types
- @stmts = {}
- rescue ::ActiveRecord::ActiveRecordError
- raise
- rescue Exception => e
- raise ::ActiveRecord::JDBCError.new("The driver encountered an unknown error: #{e}").tap { |err|
- err.errno = 0
- err.sql_exception = e
- }
- end
-
- def adapter=(adapter)
- @adapter = adapter
- @native_database_types = dup_native_types
- @adapter.modify_types(@native_database_types)
- @adapter.config.replace(config)
- end
-
- # Duplicate all native types into new hash structure so it can be modified
- # without destroying original structure.
- def dup_native_types
- types = {}
- @native_types.each_pair do |k, v|
- types[k] = v.inject({}) do |memo, kv|
- memo[kv.first] = begin kv.last.dup rescue kv.last end
- memo
- end
- end
- types
- end
- private :dup_native_types
-
- def jndi_connection?
- @jndi_connection
- end
-
- def active?
- @connection
- end
-
- private
- include ConfigHelper
- end
- end
-end
+++ /dev/null
-class ActiveRecord::Base
- class << self
- def jdbc_connection(config)
- adapter_class = config[:adapter_class]
- adapter_class ||= ::ActiveRecord::ConnectionAdapters::JdbcAdapter
- adapter_class.new(nil, logger, config)
- end
- alias jndi_connection jdbc_connection
-
- def embedded_driver(config)
- config[:username] ||= "sa"
- config[:password] ||= ""
- jdbc_connection(config)
- end
- end
-end
+++ /dev/null
-module ActiveRecord # :nodoc:
- # Represents exceptions that have propagated up through the JDBC API.
- class JDBCError < ActiveRecordError
- # The vendor code or error number that came from the database
- attr_accessor :errno
-
- # The full Java SQLException object that was raised
- attr_accessor :sql_exception
- end
-
- module ConnectionAdapters # :nodoc:
- # Allows properly re-wrapping/re-defining methods that may already
- # be alias_method_chain'd.
- module ShadowCoreMethods
- def alias_chained_method(meth, feature, target)
- if instance_methods.include?("#{meth}_without_#{feature}")
- alias_method "#{meth}_without_#{feature}".to_sym, target
- else
- alias_method meth, target if meth != target
- end
- end
- end
- end
-end
+++ /dev/null
-module ArJdbc
- def self.discover_extensions
- if defined?(::Gem) && ::Gem.respond_to?(:find_files)
- files = ::Gem.find_files('arjdbc/discover')
- else
- files = $LOAD_PATH.map do |p|
- discover = File.join(p, 'arjdbc','discover.rb')
- File.exist?(discover) ? discover : nil
- end.compact
- end
- files.each do |f|
- puts "Loading #{f}" if $DEBUG
- require f
- end
- end
-
- discover_extensions
-end
+++ /dev/null
-module ActiveRecord
- module ConnectionAdapters
- class JdbcDriver
- def initialize(name)
- @name = name
- @driver = driver_class.new
- end
-
- def driver_class
- @driver_class ||= begin
- driver_class_const = (@name[0...1].capitalize + @name[1..@name.length]).gsub(/\./, '_')
- Jdbc::Mutex.synchronized do
- unless Jdbc.const_defined?(driver_class_const)
- driver_class_name = @name
- Jdbc.module_eval do
- java_import(driver_class_name) { driver_class_const }
- end
- end
- end
- driver_class = Jdbc.const_get(driver_class_const)
- raise "You must specify a driver for your JDBC connection" unless driver_class
- driver_class
- end
- end
-
- def connection(url, user, pass)
- # bypass DriverManager to get around problem with dynamically loaded jdbc drivers
- props = java.util.Properties.new
- props.setProperty("user", user)
- props.setProperty("password", pass)
- @driver.connect(url, props)
- end
- end
- end
-end
+++ /dev/null
-module ArJdbc
- # Defines an AR-JDBC extension. An extension consists of a
- # declaration using this method and an ArJdbc::XYZ module that
- # contains implementation and overrides for methods in
- # ActiveRecord::ConnectionAdapters::AbstractAdapter. When you
- # declare your extension, you provide a block that detects when a
- # database configured to use the extension is present and loads the
- # necessary code for it. AR-JDBC will patch the code into the base
- # ActiveRecord::ConnectionAdapters::JdbcAdapter by extending an
- # instance of it with your extension module.
- #
- # +name+ should be a symbol that is the name of a module to be
- # defined under the +ArJdbc+ module.
- #
- # +block+ should be a one- or two-arity block that receives the
- # dialect name or driver class name as the first argument, and
- # optionally the whole database configuration hash as a second
- # argument.
- #
- # Example:
- #
- # ArJdbc.extension :Frob do |name|
- # if name =~ /frob/i
- # # arjdbc/frob.rb should contain the implementation
- # require 'arjdbc/frob'
- # true
- # end
- # end
- def self.extension(name,&block)
- if const_defined?(name)
- mod = const_get(name)
- else
- mod = const_set(name, Module.new)
- end
- (class << mod; self; end).instance_eval do
- unless respond_to?(:adapter_matcher)
- define_method :adapter_matcher do |name, config|
- if block.arity == 1
- block.call(name) ? mod : false
- else
- block.call(name, config) ? mod : false
- end
- end
- end
- end
- end
-end
+++ /dev/null
-require 'java'
-require 'arjdbc/jdbc/adapter_java'
-
-module ActiveRecord
- module ConnectionAdapters
- module Jdbc
- Mutex = java.lang.Object.new
- DriverManager = java.sql.DriverManager
- Types = java.sql.Types
- end
-
- java_import "arjdbc.jdbc.JdbcConnectionFactory"
- end
-end
+++ /dev/null
-def redefine_task(*args, &block)
- task_name = Hash === args.first ? args.first.keys[0] : args.first
- existing_task = Rake.application.lookup task_name
- if existing_task
- class << existing_task
- public :instance_variable_set
- attr_reader :actions
- end
- existing_task.instance_variable_set "@prerequisites", FileList[]
- existing_task.actions.shift
- enhancements = existing_task.actions
- existing_task.instance_variable_set "@actions", []
- end
- redefined_task = task(*args, &block)
- enhancements.each {|enhancement| redefined_task.actions << enhancement}
-end
-
-def rails_env
- defined?(Rails.env) ? Rails.env : RAILS_ENV
-end
-
-namespace :db do
- redefine_task :create => :rails_env do
- create_database(ActiveRecord::Base.configurations[rails_env])
- end
- task :create => :load_config if Rake.application.lookup(:load_config)
-
- redefine_task :drop => :environment do
- config = ActiveRecord::Base.configurations[rails_env]
- begin
- db = find_database_name(config)
- ActiveRecord::Base.connection.drop_database(db)
- rescue
- drop_database(config.merge('adapter' => config['adapter'].sub(/^jdbc/, '')))
- end
- end
- task :drop => :load_config if Rake.application.lookup(:load_config)
-
- namespace :create do
- task :all => :rails_env
- end
-
- namespace :drop do
- task :all => :environment
- end
-
- class << self
- alias_method :previous_create_database, :create_database
- alias_method :previous_drop_database, :drop_database
- end
-
- def find_database_name(config)
- db = config['database']
- if config['adapter'] =~ /postgresql/i
- config = config.dup
- if config['url']
- url = config['url'].dup
- db = url[/\/([^\/]*)$/, 1]
- if db
- url[/\/([^\/]*)$/, 1] = 'postgres'
- config['url'] = url
- end
- else
- db = config['database']
- config['database'] = 'postgres'
- end
- ActiveRecord::Base.establish_connection(config)
- else
- ActiveRecord::Base.establish_connection(config)
- db = ActiveRecord::Base.connection.database_name
- end
- db
- end
-
- def create_database(config)
- begin
- ActiveRecord::Base.establish_connection(config)
- ActiveRecord::Base.connection
- rescue
- begin
- if url = config['url'] and url =~ /^(.*(?<!\/)\/)(?=\w)/
- url = $1
- end
-
- ActiveRecord::Base.establish_connection(config.merge({'database' => nil, 'url' => url}))
- ActiveRecord::Base.connection.create_database(config['database'])
- ActiveRecord::Base.establish_connection(config)
- rescue => e
- raise e unless config['adapter'] =~ /mysql|postgresql|sqlite/
- previous_create_database(config.merge('adapter' => config['adapter'].sub(/^jdbc/, '')))
- end
- end
- end
-
- def drop_database(config)
- previous_drop_database(config.merge('adapter' => config['adapter'].sub(/^jdbc/, '')))
- end
-
- namespace :structure do
- redefine_task :dump => :environment do
- abcs = ActiveRecord::Base.configurations
- ActiveRecord::Base.establish_connection(abcs[rails_env])
- File.open("db/#{rails_env}_structure.sql", "w+") { |f| f << ActiveRecord::Base.connection.structure_dump }
- if ActiveRecord::Base.connection.supports_migrations?
- File.open("db/#{rails_env}_structure.sql", "a") { |f| f << ActiveRecord::Base.connection.dump_schema_information }
- end
- end
- end
-
- namespace :test do
- redefine_task :clone_structure => [ "db:structure:dump", "db:test:purge" ] do
- abcs = ActiveRecord::Base.configurations
- abcs['test']['pg_params'] = '?allowEncodingChanges=true' if abcs['test']['adapter'] =~ /postgresql/i
- ActiveRecord::Base.establish_connection(abcs["test"])
- ActiveRecord::Base.connection.execute('SET foreign_key_checks = 0') if abcs["test"]["adapter"] =~ /mysql/i
- IO.readlines("db/#{rails_env}_structure.sql").join.split(";\n\n").each do |ddl|
- begin
- ActiveRecord::Base.connection.execute(ddl.chomp(';'))
- rescue Exception => ex
- puts ex.message
- end
- end
- end
-
- redefine_task :purge => :environment do
- abcs = ActiveRecord::Base.configurations
- db = find_database_name(abcs['test'])
- ActiveRecord::Base.connection.recreate_database(db)
- end
- end
-end
+++ /dev/null
-module ArJdbc
- module MissingFunctionalityHelper
- #Taken from SQLite adapter
-
- def alter_table(table_name, options = {}) #:nodoc:
- table_name = table_name.to_s.downcase
- altered_table_name = "altered_#{table_name}"
- caller = lambda {|definition| yield definition if block_given?}
-
- transaction do
- # A temporary table might improve performance here, but
- # it doesn't seem to maintain indices across the whole move.
- move_table(table_name, altered_table_name,
- options)
- move_table(altered_table_name, table_name, &caller)
- end
- end
-
- def move_table(from, to, options = {}, &block) #:nodoc:
- copy_table(from, to, options, &block)
- drop_table(from)
- end
-
- def copy_table(from, to, options = {}) #:nodoc:
- options = options.merge(:id => (!columns(from).detect{|c| c.name == 'id'}.nil? && 'id' == primary_key(from).to_s))
- create_table(to, options) do |definition|
- @definition = definition
- columns(from).each do |column|
- column_name = options[:rename] ?
- (options[:rename][column.name] ||
- options[:rename][column.name.to_sym] ||
- column.name) : column.name
-
- @definition.column(column_name, column.type,
- :limit => column.limit, :default => column.default,
- :null => column.null)
- end
- @definition.primary_key(primary_key(from)) if primary_key(from)
- yield @definition if block_given?
- end
-
- copy_table_indexes(from, to, options[:rename] || {})
- copy_table_contents(from, to,
- @definition.columns.map {|column| column.name},
- options[:rename] || {})
- end
-
- def copy_table_indexes(from, to, rename = {}) #:nodoc:
- indexes(from).each do |index|
- name = index.name.downcase
- if to == "altered_#{from}"
- name = "temp_#{name}"
- elsif from == "altered_#{to}"
- name = name[5..-1]
- end
-
- to_column_names = columns(to).map(&:name)
- columns = index.columns.map {|c| rename[c] || c }.select do |column|
- to_column_names.include?(column)
- end
-
- unless columns.empty?
- # index name can't be the same
- opts = { :name => name.gsub(/(_?)(#{from})_/, "\\1#{to}_") }
- opts[:unique] = true if index.unique
- add_index(to, columns, opts)
- end
- end
- end
-
- def copy_table_contents(from, to, columns, rename = {}) #:nodoc:
- column_mappings = Hash[*columns.map {|name| [name, name]}.flatten]
- rename.inject(column_mappings) {|map, a| map[a.last] = a.first; map}
- from_columns = columns(from).collect {|col| col.name}
- columns = columns.find_all{|col| from_columns.include?(column_mappings[col])}
- quoted_columns = columns.map { |col| quote_column_name(col) } * ','
-
- quoted_to = quote_table_name(to)
- execute("SELECT * FROM #{quote_table_name(from)}").each do |row|
- sql = "INSERT INTO #{quoted_to} (#{quoted_columns}) VALUES ("
- sql << columns.map {|col| quote row[column_mappings[col]]} * ', '
- sql << ')'
- execute sql
- end
- end
- end
-end
+++ /dev/null
-module ArJdbc
- module QuotedPrimaryKeyExtension
- def self.extended(base)
- # Rails 3 method Rails 2 method
- meth = [:arel_attributes_values, :attributes_with_quotes].detect do |m|
- base.private_instance_methods.include?(m.to_s)
- end
- pk_hash_key = "self.class.primary_key"
- pk_hash_value = '"?"'
- if meth == :arel_attributes_values
- pk_hash_key = "self.class.arel_table[#{pk_hash_key}]"
- pk_hash_value = "Arel::SqlLiteral.new(#{pk_hash_value})"
- end
- if meth
- base.module_eval <<-PK, __FILE__, __LINE__
- alias :#{meth}_pre_pk :#{meth}
- def #{meth}(include_primary_key = true, *args) #:nodoc:
- aq = #{meth}_pre_pk(include_primary_key, *args)
- if connection.is_a?(ArJdbc::Oracle) || connection.is_a?(ArJdbc::Mimer)
- aq[#{pk_hash_key}] = #{pk_hash_value} if include_primary_key && aq[#{pk_hash_key}].nil?
- end
- aq
- end
- PK
- end
- end
- end
-end
+++ /dev/null
-require 'rails/railtie'
-
-module ::ArJdbc
- class Railtie < ::Rails::Railtie
- rake_tasks do
- load File.expand_path('../rake_tasks.rb', __FILE__)
- end
- end
-end
+++ /dev/null
-if defined?(Rake.application) && Rake.application && ENV["SKIP_AR_JDBC_RAKE_REDEFINES"].nil?
- jdbc_rakefile = File.dirname(__FILE__) + "/jdbc.rake"
- if Rake.application.lookup("db:create")
- # rails tasks already defined; load the override tasks now
- load jdbc_rakefile
- else
- # rails tasks not loaded yet; load as an import
- Rake.application.add_import(jdbc_rakefile)
- end
-end
+++ /dev/null
-module Kernel
- # load a JDBC driver library/gem, failing silently. If failed, trust
- # that the driver jar is already present through some other means
- def jdbc_require_driver(path, gem_name = nil)
- gem_name ||= path.sub('/', '-')
- 2.times do
- begin
- require path
- break
- rescue LoadError
- require 'rubygems'
- begin; gem gem_name; rescue LoadError; end
- end
- end
- end
-end
+++ /dev/null
-module ActiveRecord
- module ConnectionAdapters
- # I want to use JDBC's DatabaseMetaData#getTypeInfo to choose the best native types to
- # use for ActiveRecord's Adapter#native_database_types in a database-independent way,
- # but apparently a database driver can return multiple types for a given
- # java.sql.Types constant. So this type converter uses some heuristics to try to pick
- # the best (most common) type to use. It's not great, it would be better to just
- # delegate to each database's existin AR adapter's native_database_types method, but I
- # wanted to try to do this in a way that didn't pull in all the other adapters as
- # dependencies. Suggestions appreciated.
- class JdbcTypeConverter
- # The basic ActiveRecord types, mapped to an array of procs that are used to #select
- # the best type. The procs are used as selectors in order until there is only one
- # type left. If all the selectors are applied and there is still more than one
- # type, an exception will be raised.
- AR_TO_JDBC_TYPES = {
- :string => [ lambda {|r| Jdbc::Types::VARCHAR == r['data_type'].to_i},
- lambda {|r| r['type_name'] =~ /^varchar/i},
- lambda {|r| r['type_name'] =~ /^varchar$/i},
- lambda {|r| r['type_name'] =~ /varying/i}],
- :text => [ lambda {|r| [Jdbc::Types::LONGVARCHAR, Jdbc::Types::CLOB].include?(r['data_type'].to_i)},
- lambda {|r| r['type_name'] =~ /^text$/i}, # For Informix
- lambda {|r| r['type_name'] =~ /sub_type 1$/i}, # For FireBird
- lambda {|r| r['type_name'] =~ /^(text|clob)$/i},
- lambda {|r| r['type_name'] =~ /^character large object$/i},
- lambda {|r| r['sql_data_type'] == 2005}],
- :integer => [ lambda {|r| Jdbc::Types::INTEGER == r['data_type'].to_i},
- lambda {|r| r['type_name'] =~ /^integer$/i},
- lambda {|r| r['type_name'] =~ /^int4$/i},
- lambda {|r| r['type_name'] =~ /^int$/i}],
- :decimal => [ lambda {|r| Jdbc::Types::DECIMAL == r['data_type'].to_i},
- lambda {|r| r['type_name'] =~ /^decimal$/i},
- lambda {|r| r['type_name'] =~ /^numeric$/i},
- lambda {|r| r['type_name'] =~ /^number$/i},
- lambda {|r| r['type_name'] =~ /^real$/i},
- lambda {|r| r['precision'] == '38'},
- lambda {|r| r['data_type'] == '2'}],
- :float => [ lambda {|r| [Jdbc::Types::FLOAT,Jdbc::Types::DOUBLE, Jdbc::Types::REAL].include?(r['data_type'].to_i)},
- lambda {|r| r['data_type'].to_i == Jdbc::Types::REAL}, #Prefer REAL to DOUBLE for Postgresql
- lambda {|r| r['type_name'] =~ /^float/i},
- lambda {|r| r['type_name'] =~ /^double$/i},
- lambda {|r| r['type_name'] =~ /^real$/i},
- lambda {|r| r['precision'] == '15'}],
- :datetime => [ lambda {|r| Jdbc::Types::TIMESTAMP == r['data_type'].to_i},
- lambda {|r| r['type_name'] =~ /^datetime$/i},
- lambda {|r| r['type_name'] =~ /^timestamp$/i},
- lambda {|r| r['type_name'] =~ /^date/i},
- lambda {|r| r['type_name'] =~ /^integer/i}], #Num of milliseconds for SQLite3 JDBC Driver
- :timestamp => [ lambda {|r| Jdbc::Types::TIMESTAMP == r['data_type'].to_i},
- lambda {|r| r['type_name'] =~ /^timestamp$/i},
- lambda {|r| r['type_name'] =~ /^datetime/i},
- lambda {|r| r['type_name'] =~ /^date/i},
- lambda {|r| r['type_name'] =~ /^integer/i}], #Num of milliseconds for SQLite3 JDBC Driver
- :time => [ lambda {|r| Jdbc::Types::TIME == r['data_type'].to_i},
- lambda {|r| r['type_name'] =~ /^time$/i},
- lambda {|r| r['type_name'] =~ /^datetime/i}, # For Informix
- lambda {|r| r['type_name'] =~ /^date/i},
- lambda {|r| r['type_name'] =~ /^integer/i}], #Num of milliseconds for SQLite3 JDBC Driver
- :date => [ lambda {|r| Jdbc::Types::DATE == r['data_type'].to_i},
- lambda {|r| r['type_name'] =~ /^date$/i},
- lambda {|r| r['type_name'] =~ /^date/i},
- lambda {|r| r['type_name'] =~ /^integer/i}], #Num of milliseconds for SQLite3 JDBC Driver3
- :binary => [ lambda {|r| [Jdbc::Types::LONGVARBINARY,Jdbc::Types::BINARY,Jdbc::Types::BLOB].include?(r['data_type'].to_i)},
- lambda {|r| r['type_name'] =~ /^blob/i},
- lambda {|r| r['type_name'] =~ /sub_type 0$/i}, # For FireBird
- lambda {|r| r['type_name'] =~ /^varbinary$/i}, # We want this sucker for Mimer
- lambda {|r| r['type_name'] =~ /^binary$/i}, ],
- :boolean => [ lambda {|r| [Jdbc::Types::TINYINT].include?(r['data_type'].to_i)},
- lambda {|r| r['type_name'] =~ /^bool/i},
- lambda {|r| r['data_type'] == '-7'},
- lambda {|r| r['type_name'] =~ /^tinyint$/i},
- lambda {|r| r['type_name'] =~ /^decimal$/i},
- lambda {|r| r['type_name'] =~ /^integer$/i}]
- }
-
- def initialize(types)
- @types = types
- @types.each {|t| t['type_name'] ||= t['local_type_name']} # Sybase driver seems to want 'local_type_name'
- end
-
- def choose_best_types
- type_map = {}
- @types.each do |row|
- name = row['type_name'].downcase
- k = name.to_sym
- type_map[k] = { :name => name }
- set_limit_to_nonzero_precision(type_map[k], row)
- end
-
- AR_TO_JDBC_TYPES.keys.each do |k|
- typerow = choose_type(k)
- type_map[k] = { :name => typerow['type_name'].downcase }
- case k
- when :integer, :string, :decimal
- set_limit_to_nonzero_precision(type_map[k], typerow)
- when :boolean
- type_map[k][:limit] = 1
- end
- end
- type_map
- end
-
- def choose_type(ar_type)
- procs = AR_TO_JDBC_TYPES[ar_type]
- types = @types
- procs.each do |p|
- new_types = types.reject {|r| r["data_type"].to_i == Jdbc::Types::OTHER}
- new_types = new_types.select(&p)
- new_types = new_types.inject([]) do |typs,t|
- typs << t unless typs.detect {|el| el['type_name'] == t['type_name']}
- typs
- end
- return new_types.first if new_types.length == 1
- types = new_types if new_types.length > 0
- end
- raise "unable to choose type for #{ar_type} from:\n#{types.collect{|t| t['type_name']}.inspect}"
- end
-
- def set_limit_to_nonzero_precision(map, row)
- if row['precision'] && row['precision'].to_i > 0
- map[:limit] = row['precision'].to_i
- end
- end
- end
- end
-end
+++ /dev/null
-require 'arjdbc/jdbc'
-require 'arjdbc/mimer/adapter'
+++ /dev/null
-module ArJdbc
- module Mimer
- def self.extended(mod)
- require 'arjdbc/jdbc/quoted_primary_key'
- ActiveRecord::Base.extend ArJdbc::QuotedPrimaryKeyExtension
- end
-
- def modify_types(tp)
- tp[:primary_key] = "INTEGER NOT NULL PRIMARY KEY"
- tp[:boolean][:limit] = nil
- tp[:string][:limit] = 255
- tp[:binary] = {:name => "BINARY VARYING", :limit => 4096}
- tp[:text] = {:name => "VARCHAR", :limit => 4096}
- tp[:datetime] = { :name => "TIMESTAMP" }
- tp[:timestamp] = { :name => "TIMESTAMP" }
- tp[:time] = { :name => "TIMESTAMP" }
- tp[:date] = { :name => "TIMESTAMP" }
- tp
- end
-
- def default_sequence_name(table, column) #:nodoc:
- "#{table}_seq"
- end
-
- def create_table(name, options = {}) #:nodoc:
- super(name, options)
- execute "CREATE SEQUENCE #{name}_seq" unless options[:id] == false
- end
-
- def drop_table(name, options = {}) #:nodoc:
- super(name) rescue nil
- execute "DROP SEQUENCE #{name}_seq" rescue nil
- end
-
- def change_column(table_name, column_name, type, options = {}) #:nodoc:
- execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} #{type_to_sql(type, options[:limit])}"
- end
-
- def change_column_default(table_name, column_name, default) #:nodoc:
- execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} SET DEFAULT #{quote(default)}"
- end
-
- def remove_index(table_name, options = {}) #:nodoc:
- execute "DROP INDEX #{index_name(table_name, options)}"
- end
-
- def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:
- if pk.nil? # Who called us? What does the sql look like? No idea!
- execute sql, name
- elsif id_value # Pre-assigned id
- log(sql, name) { @connection.execute_insert sql,pk }
- else # Assume the sql contains a bind-variable for the id
- id_value = select_one("SELECT NEXT_VALUE OF #{sequence_name} AS val FROM MIMER.ONEROW")['val']
- log(sql, name) {
- execute_prepared_insert(sql,id_value)
- }
- end
- id_value
- end
-
- def execute_prepared_insert(sql, id)
- @stmts ||= {}
- @stmts[sql] ||= @connection.ps(sql)
- stmt = @stmts[sql]
- stmt.setLong(1,id)
- stmt.executeUpdate
- id
- end
-
- def quote(value, column = nil) #:nodoc:
- return value.quoted_id if value.respond_to?(:quoted_id)
-
- if String === value && column && column.type == :binary
- return "X'#{quote_string(value.unpack("C*").collect {|v| v.to_s(16)}.join)}'"
- end
- case value
- when String
- %Q{'#{quote_string(value)}'}
- when NilClass
- 'NULL'
- when TrueClass
- '1'
- when FalseClass
- '0'
- when Numeric
- value.to_s
- when Date, Time
- %Q{TIMESTAMP '#{value.strftime("%Y-%m-%d %H:%M:%S")}'}
- else
- %Q{'#{quote_string(value.to_yaml)}'}
- end
- end
-
- def quoted_true
- '1'
- end
-
- def quoted_false
- '0'
- end
-
- def add_limit_offset!(sql, options) # :nodoc:
- @limit = options[:limit]
- @offset = options[:offset]
- end
-
- def select_all(sql, name = nil)
- @offset ||= 0
- if !@limit || @limit == -1
- range = @offset..-1
- else
- range = @offset...(@offset+@limit)
- end
- select(sql, name)[range]
- ensure
- @limit = @offset = nil
- end
-
- def select_one(sql, name = nil)
- @offset ||= 0
- select(sql, name)[@offset]
- ensure
- @limit = @offset = nil
- end
-
- def _execute(sql, name = nil)
- if sql =~ /^select/i
- @offset ||= 0
- if !@limit || @limit == -1
- range = @offset..-1
- else
- range = @offset...(@offset+@limit)
- end
- @connection.execute_query(sql)[range]
- else
- @connection.execute_update(sql)
- end
- ensure
- @limit = @offset = nil
- end
- end
-end
+++ /dev/null
-require 'arjdbc/jdbc'
-jdbc_require_driver 'jdbc/jtds', 'jdbc-mssql'
-require 'arjdbc/mssql/connection_methods'
-require 'arjdbc/mssql/adapter'
+++ /dev/null
-require 'arjdbc/mssql/tsql_helper'
-require 'arjdbc/mssql/limit_helpers'
-
-module ::ArJdbc
- module MsSQL
- include TSqlMethods
- include LimitHelpers
-
- def self.extended(mod)
- unless @lob_callback_added
- ActiveRecord::Base.class_eval do
- def after_save_with_mssql_lob
- self.class.columns.select { |c| c.sql_type =~ /image/i }.each do |c|
- value = self[c.name]
- value = value.to_yaml if unserializable_attribute?(c.name, c)
- next if value.nil? || (value == '')
-
- connection.write_large_object(c.type == :binary, c.name, self.class.table_name, self.class.primary_key, quote_value(id), value)
- end
- end
- end
-
- ActiveRecord::Base.after_save :after_save_with_mssql_lob
- @lob_callback_added = true
- end
- mod.add_version_specific_add_limit_offset
- end
-
- def self.column_selector
- [/sqlserver|tds|Microsoft SQL/i, lambda {|cfg,col| col.extend(::ArJdbc::MsSQL::Column)}]
- end
-
- def self.jdbc_connection_class
- ::ActiveRecord::ConnectionAdapters::MssqlJdbcConnection
- end
-
- def arel2_visitors
- require 'arel/visitors/sql_server'
- visitor_class = sqlserver_version == "2000" ? ::Arel::Visitors::SQLServer2000 : ::Arel::Visitors::SQLServer
- { 'mssql' => visitor_class, 'sqlserver' => visitor_class, 'jdbcmssql' => visitor_class}
- end
-
- def sqlserver_version
- @sqlserver_version ||= select_value("select @@version")[/Microsoft SQL Server\s+(\d{4})/, 1]
- end
-
- def add_version_specific_add_limit_offset
- if sqlserver_version == "2000"
- extend LimitHelpers::SqlServer2000AddLimitOffset
- else
- extend LimitHelpers::SqlServerAddLimitOffset
- end
- end
-
- def modify_types(tp) #:nodoc:
- super(tp)
- tp[:string] = {:name => "NVARCHAR", :limit => 255}
- if sqlserver_version == "2000"
- tp[:text] = {:name => "NTEXT"}
- else
- tp[:text] = {:name => "NVARCHAR(MAX)"}
- end
-
- # sonar
- tp[:big_integer] = { :name => "bigint"}
- # /sonar
-
- tp
- end
-
- def type_to_sql(type, limit = nil, precision = nil, scale = nil) #:nodoc:
- # MSSQL's NVARCHAR(n | max) column supports either a number between 1 and
- # 4000, or the word "MAX", which corresponds to 2**30-1 UCS-2 characters.
- #
- # It does not accept NVARCHAR(1073741823) here, so we have to change it
- # to NVARCHAR(MAX), even though they are logically equivalent.
- #
- # MSSQL Server 2000 is skipped here because I don't know how it will behave.
- #
- # See: http://msdn.microsoft.com/en-us/library/ms186939.aspx
- if type.to_s == 'string' and limit == 1073741823 and sqlserver_version != "2000"
- 'NVARCHAR(MAX)'
- elsif %w( boolean date datetime ).include?(type.to_s)
- super(type) # cannot specify limit/precision/scale with these types
- else
- super
- end
- end
-
- module Column
- attr_accessor :identity, :is_special
-
- def simplified_type(field_type)
- case field_type
- when /int|bigint|smallint|tinyint/i then :integer
- when /numeric/i then (@scale.nil? || @scale == 0) ? :integer : :decimal
- when /float|double|decimal|money|real|smallmoney/i then :decimal
- when /datetime|smalldatetime/i then :datetime
- when /timestamp/i then :timestamp
- when /time/i then :time
- when /date/i then :date
- when /text|ntext|xml/i then :text
- when /binary|image|varbinary/i then :binary
- when /char|nchar|nvarchar|string|varchar/i then (@limit == 1073741823 ? (@limit = nil; :text) : :string)
- when /bit/i then :boolean
- when /uniqueidentifier/i then :string
- end
- end
-
- def default_value(value)
- return $1 if value =~ /^\(N?'(.*)'\)$/
- value
- end
-
- def type_cast(value)
- return nil if value.nil? || value == "(null)" || value == "(NULL)"
- case type
- when :integer then value.to_i rescue unquote(value).to_i rescue value ? 1 : 0
- when :primary_key then value == true || value == false ? value == true ? 1 : 0 : value.to_i
- when :decimal then self.class.value_to_decimal(unquote(value))
- when :datetime then cast_to_datetime(value)
- when :timestamp then cast_to_time(value)
- when :time then cast_to_time(value)
- when :date then cast_to_date(value)
- when :boolean then value == true or (value =~ /^t(rue)?$/i) == 0 or unquote(value)=="1"
- when :binary then unquote value
- else value
- end
- end
-
- def extract_limit(sql_type)
- case sql_type
- when /text|ntext|xml|binary|image|varbinary|bit/
- nil
- else
- super
- end
- end
-
- def is_utf8?
- sql_type =~ /nvarchar|ntext|nchar/i
- end
-
- def unquote(value)
- value.to_s.sub(/\A\([\(\']?/, "").sub(/[\'\)]?\)\Z/, "")
- end
-
- def cast_to_time(value)
- return value if value.is_a?(Time)
- time_array = ParseDate.parsedate(value)
- return nil if !time_array.any?
- time_array[0] ||= 2000
- time_array[1] ||= 1
- time_array[2] ||= 1
- return Time.send(ActiveRecord::Base.default_timezone, *time_array) rescue nil
-
- # Try DateTime instead - the date may be outside the time period support by Time.
- DateTime.new(*time_array[0..5]) rescue nil
- end
-
- def cast_to_date(value)
- return value if value.is_a?(Date)
- return Date.parse(value) rescue nil
- end
-
- def cast_to_datetime(value)
- if value.is_a?(Time)
- if value.year != 0 and value.month != 0 and value.day != 0
- return value
- else
- return Time.mktime(2000, 1, 1, value.hour, value.min, value.sec) rescue nil
- end
- end
- if value.is_a?(DateTime)
- begin
- # Attempt to convert back to a Time, but it could fail for dates significantly in the past/future.
- return Time.mktime(value.year, value.mon, value.day, value.hour, value.min, value.sec)
- rescue ArgumentError
- return value
- end
- end
-
- return cast_to_time(value) if value.is_a?(Date) or value.is_a?(String) rescue nil
-
- return value.is_a?(Date) ? value : nil
- end
-
- # These methods will only allow the adapter to insert binary data with a length of 7K or less
- # because of a SQL Server statement length policy.
- def self.string_to_binary(value)
- ''
- end
-
- end
-
- def quote(value, column = nil)
- return value.quoted_id if value.respond_to?(:quoted_id)
-
- case value
- # SQL Server 2000 doesn't let you insert an integer into a NVARCHAR
- # column, so we include Integer here.
- when String, ActiveSupport::Multibyte::Chars, Integer
- value = value.to_s
- if column && column.type == :binary
- "'#{quote_string(ArJdbc::MsSQL::Column.string_to_binary(value))}'" # ' (for ruby-mode)
- elsif column && [:integer, :float].include?(column.type)
- value = column.type == :integer ? value.to_i : value.to_f
- value.to_s
- elsif !column.respond_to?(:is_utf8?) || column.is_utf8?
- "N'#{quote_string(value)}'" # ' (for ruby-mode)
- else
- super
- end
- when TrueClass then '1'
- when FalseClass then '0'
- else super
- end
- end
-
- def quote_string(string)
- string.gsub(/\'/, "''")
- end
-
- def quote_table_name(name)
- quote_column_name(name)
- end
-
- def quote_column_name(name)
- "[#{name}]"
- end
-
- def quoted_true
- quote true
- end
-
- def quoted_false
- quote false
- end
-
- def adapter_name #:nodoc:
- 'MsSQL'
- end
-
- def change_order_direction(order)
- order.split(",").collect do |fragment|
- case fragment
- when /\bDESC\b/i then fragment.gsub(/\bDESC\b/i, "ASC")
- when /\bASC\b/i then fragment.gsub(/\bASC\b/i, "DESC")
- else String.new(fragment).split(',').join(' DESC,') + ' DESC'
- end
- end.join(",")
- end
-
- def supports_ddl_transactions?
- true
- end
-
- def recreate_database(name)
- drop_database(name)
- create_database(name)
- end
-
- def drop_database(name)
- execute "USE master"
- execute "DROP DATABASE #{name}"
- end
-
- def create_database(name)
- execute "CREATE DATABASE #{name}"
- execute "USE #{name}"
- end
-
- def rename_table(name, new_name)
- clear_cached_table(name)
- execute "EXEC sp_rename '#{name}', '#{new_name}'"
- end
-
- # Adds a new column to the named table.
- # See TableDefinition#column for details of the options you can use.
- def add_column(table_name, column_name, type, options = {})
- clear_cached_table(table_name)
- add_column_sql = "ALTER TABLE #{table_name} ADD #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
- add_column_options!(add_column_sql, options)
- # TODO: Add support to mimic date columns, using constraints to mark them as such in the database
- # add_column_sql << " CONSTRAINT ck__#{table_name}__#{column_name}__date_only CHECK ( CONVERT(CHAR(12), #{quote_column_name(column_name)}, 14)='00:00:00:000' )" if type == :date
- execute(add_column_sql)
- end
-
- def rename_column(table, column, new_column_name)
- clear_cached_table(table)
- execute "EXEC sp_rename '#{table}.#{column}', '#{new_column_name}'"
- end
-
- def change_column(table_name, column_name, type, options = {}) #:nodoc:
- clear_cached_table(table_name)
- change_column_type(table_name, column_name, type, options)
- change_column_default(table_name, column_name, options[:default]) if options_include_default?(options)
- end
-
- def change_column_type(table_name, column_name, type, options = {}) #:nodoc:
- clear_cached_table(table_name)
- sql = "ALTER TABLE #{table_name} ALTER COLUMN #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
- if options.has_key?(:null)
- sql += (options[:null] ? " NULL" : " NOT NULL")
- end
- execute(sql)
- end
-
- def change_column_default(table_name, column_name, default) #:nodoc:
- clear_cached_table(table_name)
- remove_default_constraint(table_name, column_name)
- unless default.nil?
- execute "ALTER TABLE #{table_name} ADD CONSTRAINT DF_#{table_name}_#{column_name} DEFAULT #{quote(default)} FOR #{quote_column_name(column_name)}"
- end
- end
-
- def remove_column(table_name, column_name)
- clear_cached_table(table_name)
- remove_check_constraints(table_name, column_name)
- remove_default_constraint(table_name, column_name)
- execute "ALTER TABLE #{table_name} DROP COLUMN [#{column_name}]"
- end
-
- def remove_default_constraint(table_name, column_name)
- clear_cached_table(table_name)
- defaults = select "select def.name from sysobjects def, syscolumns col, sysobjects tab where col.cdefault = def.id and col.name = '#{column_name}' and tab.name = '#{table_name}' and col.id = tab.id"
- defaults.each {|constraint|
- execute "ALTER TABLE #{table_name} DROP CONSTRAINT #{constraint["name"]}"
- }
- end
-
- def remove_check_constraints(table_name, column_name)
- clear_cached_table(table_name)
- # TODO remove all constraints in single method
- constraints = select "SELECT CONSTRAINT_NAME FROM INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE where TABLE_NAME = '#{table_name}' and COLUMN_NAME = '#{column_name}'"
- constraints.each do |constraint|
- execute "ALTER TABLE #{table_name} DROP CONSTRAINT #{constraint["CONSTRAINT_NAME"]}"
- end
- end
-
- def remove_index(table_name, options = {})
- execute "DROP INDEX #{table_name}.#{index_name(table_name, options)}"
- end
-
- def columns(table_name, name = nil)
- # It's possible for table_name to be an empty string, or nil, if something attempts to issue SQL
- # which doesn't involve a table. IE. "SELECT 1" or "SELECT * from someFunction()".
- return [] if table_name.blank?
- table_name = table_name.to_s if table_name.is_a?(Symbol)
-
- # Remove []'s from around the table name, valid in a select statement, but not when matching metadata.
- table_name = table_name.gsub(/[\[\]]/, '')
-
- return [] if table_name =~ /^information_schema\./i
- @table_columns = {} unless @table_columns
- unless @table_columns[table_name]
- @table_columns[table_name] = super
- @table_columns[table_name].each do |col|
- col.identity = true if col.sql_type =~ /identity/i
- col.is_special = true if col.sql_type =~ /text|ntext|image|xml/i
- end
- end
- @table_columns[table_name]
- end
-
- def _execute(sql, name = nil)
- # Match the start of the sql to determine appropriate behaviour. Be aware of
- # multi-line sql which might begin with 'create stored_proc' and contain 'insert into ...' lines.
- # Possible improvements include ignoring comment blocks prior to the first statement.
- if sql.lstrip =~ /\Ainsert/i
- if query_requires_identity_insert?(sql)
- table_name = get_table_name(sql)
- with_identity_insert_enabled(table_name) do
- id = @connection.execute_insert(sql)
- end
- else
- @connection.execute_insert(sql)
- end
- elsif sql.lstrip =~ /\A(create|exec)/i
- @connection.execute_update(sql)
- elsif sql.lstrip =~ /\A\(?\s*(select|show)/i
- repair_special_columns(sql)
- @connection.execute_query(sql)
- else
- @connection.execute_update(sql)
- end
- end
-
- def select(sql, name = nil)
- log(sql, name) do
- @connection.execute_query(sql)
- end
- end
-
- #SELECT .. FOR UPDATE is not supported on Microsoft SQL Server
- def add_lock!(sql, options)
- sql
- end
-
- # Turns IDENTITY_INSERT ON for table during execution of the block
- # N.B. This sets the state of IDENTITY_INSERT to OFF after the
- # block has been executed without regard to its previous state
- def with_identity_insert_enabled(table_name, &block)
- set_identity_insert(table_name, true)
- yield
- ensure
- set_identity_insert(table_name, false)
- end
-
- def set_identity_insert(table_name, enable = true)
- execute "SET IDENTITY_INSERT #{table_name} #{enable ? 'ON' : 'OFF'}"
- rescue Exception => e
- raise ActiveRecord::ActiveRecordError, "IDENTITY_INSERT could not be turned #{enable ? 'ON' : 'OFF'} for table #{table_name}"
- end
-
- def identity_column(table_name)
- columns(table_name).each do |col|
- return col.name if col.identity
- end
- return nil
- end
-
- def query_requires_identity_insert?(sql)
- table_name = get_table_name(sql)
- id_column = identity_column(table_name)
- if sql.strip =~ /insert into [^ ]+ ?\((.+?)\)/i
- insert_columns = $1.split(/, */).map(&method(:unquote_column_name))
- return table_name if insert_columns.include?(id_column)
- end
- end
-
- def unquote_column_name(name)
- if name =~ /^\[.*\]$/
- name[1..-2]
- else
- name
- end
- end
-
- def get_special_columns(table_name)
- special = []
- columns(table_name).each do |col|
- special << col.name if col.is_special
- end
- special
- end
-
- def repair_special_columns(sql)
- special_cols = get_special_columns(get_table_name(sql))
- for col in special_cols.to_a
- sql.gsub!(Regexp.new(" #{col.to_s} = "), " #{col.to_s} LIKE ")
- sql.gsub!(/ORDER BY #{col.to_s}/i, '')
- end
- sql
- end
-
- def determine_order_clause(sql)
- return $1 if sql =~ /ORDER BY (.*)$/
- table_name = get_table_name(sql)
- "#{table_name}.#{determine_primary_key(table_name)}"
- end
-
- def determine_primary_key(table_name)
- primary_key = columns(table_name).detect { |column| column.primary || column.identity }
- return primary_key.name if primary_key
- # Look for an id column. Return it, without changing case, to cover dbs with a case-sensitive collation.
- columns(table_name).each { |column| return column.name if column.name =~ /^id$/i }
- # Give up and provide something which is going to crash almost certainly
- columns(table_name)[0].name
- end
-
- def clear_cached_table(name)
- (@table_columns ||= {}).delete(name.to_s)
- end
- end
-end
-
+++ /dev/null
-class ActiveRecord::Base
- class << self
- def mssql_connection(config)
- require "arjdbc/mssql"
- config[:host] ||= "localhost"
- config[:port] ||= 1433
- config[:driver] ||= "net.sourceforge.jtds.jdbc.Driver"
-
- url = "jdbc:jtds:sqlserver://#{config[:host]}:#{config[:port]}/#{config[:database]}"
-
- # Instance is often a preferrable alternative to port when dynamic ports are used.
- # If instance is specified then port is essentially ignored.
- url << ";instance=#{config[:instance]}" if config[:instance]
-
- # This will enable windows domain-based authentication and will require the JTDS native libraries be available.
- url << ";domain=#{config[:domain]}" if config[:domain]
-
- # AppName is shown in sql server as additional information against the connection.
- url << ";appname=#{config[:appname]}" if config[:appname]
- config[:url] ||= url
-
- if !config[:domain]
- config[:username] ||= "sa"
- config[:password] ||= ""
- end
- jdbc_connection(config)
- end
- alias_method :jdbcmssql_connection, :mssql_connection
- end
-end
+++ /dev/null
-module ::ArJdbc
- module MsSQL
- module LimitHelpers
- module_function
- def get_table_name(sql)
- if sql =~ /^\s*insert\s+into\s+([^\(\s,]+)\s*|^\s*update\s+([^\(\s,]+)\s*/i
- $1
- elsif sql =~ /\bfrom\s+([^\(\s,]+)\s*/i
- $1
- else
- nil
- end
- end
-
- module SqlServer2000ReplaceLimitOffset
- module_function
- def replace_limit_offset!(sql, limit, offset, order)
- if limit
- offset ||= 0
- start_row = offset + 1
- end_row = offset + limit.to_i
- find_select = /\b(SELECT(?:\s+DISTINCT)?)\b(.*)/im
- whole, select, rest_of_query = find_select.match(sql).to_a
- if (start_row == 1) && (end_row ==1)
- new_sql = "#{select} TOP 1 #{rest_of_query}"
- sql.replace(new_sql)
- else
- #UGLY
- #KLUDGY?
- #removing out stuff before the FROM...
- rest = rest_of_query[/FROM/i=~ rest_of_query.. -1]
- #need the table name for avoiding amiguity
- table_name = LimitHelpers.get_table_name(sql)
- primary_key = order[/(\w*id\w*)/i]
- #I am not sure this will cover all bases. but all the tests pass
- new_order = "ORDER BY #{order}, #{table_name}.#{primary_key}" if order.index("#{table_name}.#{primary_key}").nil?
- new_order ||= order
-
- if (rest_of_query.match(/WHERE/).nil?)
- new_sql = "#{select} TOP #{limit} #{rest_of_query} WHERE #{table_name}.#{primary_key} NOT IN (#{select} TOP #{offset} #{table_name}.#{primary_key} #{rest} #{new_order}) #{order} "
- else
- new_sql = "#{select} TOP #{limit} #{rest_of_query} AND #{table_name}.#{primary_key} NOT IN (#{select} TOP #{offset} #{table_name}.#{primary_key} #{rest} #{new_order}) #{order} "
- end
-
- sql.replace(new_sql)
- end
- end
- sql
- end
- end
-
- module SqlServer2000AddLimitOffset
- def add_limit_offset!(sql, options)
- if options[:limit]
- order = "ORDER BY #{options[:order] || determine_order_clause(sql)}"
- sql.sub!(/ ORDER BY.*$/i, '')
- SqlServerReplaceLimitOffset.replace_limit_offset!(sql, options[:limit], options[:offset], order)
- end
- end
- end
-
- module SqlServerReplaceLimitOffset
- module_function
- def replace_limit_offset!(sql, limit, offset, order)
- if limit
- offset ||= 0
- start_row = offset + 1
- end_row = offset + limit.to_i
- find_select = /\b(SELECT(?:\s+DISTINCT)?)\b(.*)/im
- whole, select, rest_of_query = find_select.match(sql).to_a
- rest_of_query.strip!
- if rest_of_query[0] == "1"
- rest_of_query[0] = "*"
- end
- if rest_of_query[0] == "*"
- from_table = LimitHelpers.get_table_name(rest_of_query)
- rest_of_query = from_table + '.' + rest_of_query
- end
- new_sql = "#{select} t.* FROM (SELECT ROW_NUMBER() OVER(#{order}) AS _row_num, #{rest_of_query}"
- new_sql << ") AS t WHERE t._row_num BETWEEN #{start_row.to_s} AND #{end_row.to_s}"
- sql.replace(new_sql)
- end
- sql
- end
- end
-
- module SqlServerAddLimitOffset
- def add_limit_offset!(sql, options)
- if options[:limit]
- order = "ORDER BY #{options[:order] || determine_order_clause(sql)}"
- sql.sub!(/ ORDER BY.*$/i, '')
- SqlServerReplaceLimitOffset.replace_limit_offset!(sql, options[:limit], options[:offset], order)
- end
- end
- end
- end
- end
-end
+++ /dev/null
-# Common methods for handling TSQL databases.
-module TSqlMethods
-
- def modify_types(tp) #:nodoc:
- tp[:primary_key] = "int NOT NULL IDENTITY(1, 1) PRIMARY KEY"
- tp[:integer][:limit] = nil
- tp[:boolean] = {:name => "bit"}
- tp[:binary] = { :name => "image"}
- tp
- end
-
- def type_to_sql(type, limit = nil, precision = nil, scale = nil) #:nodoc:
- limit = nil if %w(text binary).include? type.to_s
- return 'uniqueidentifier' if (type.to_s == 'uniqueidentifier')
- return super unless type.to_s == 'integer'
-
- if limit.nil? || limit == 4
- 'int'
- elsif limit == 2
- 'smallint'
- elsif limit == 1
- 'tinyint'
- else
- 'bigint'
- end
- end
-
- def add_limit_offset!(sql, options)
- if options[:limit] and options[:offset]
- total_rows = select_all("SELECT count(*) as TotalRows from (#{sql.gsub(/\bSELECT(\s+DISTINCT)?\b/i, "SELECT\\1 TOP 1000000000")}) tally")[0]["TotalRows"].to_i
- if (options[:limit] + options[:offset]) >= total_rows
- options[:limit] = (total_rows - options[:offset] >= 0) ? (total_rows - options[:offset]) : 0
- end
- sql.sub!(/^\s*SELECT(\s+DISTINCT)?/i, "SELECT * FROM (SELECT TOP #{options[:limit]} * FROM (SELECT\\1 TOP #{options[:limit] + options[:offset]} ")
- sql << ") AS tmp1"
- if options[:order]
- options[:order] = options[:order].split(',').map do |field|
- parts = field.split(" ")
- tc = parts[0]
- if sql =~ /\.\[/ and tc =~ /\./ # if column quoting used in query
- tc.gsub!(/\./, '\\.\\[')
- tc << '\\]'
- end
- if sql =~ /#{tc} AS (t\d_r\d\d?)/
- parts[0] = $1
- elsif parts[0] =~ /\w+\.(\w+)/
- parts[0] = $1
- end
- parts.join(' ')
- end.join(', ')
- sql << " ORDER BY #{change_order_direction(options[:order])}) AS tmp2 ORDER BY #{options[:order]}"
- else
- sql << " ) AS tmp2"
- end
- elsif sql !~ /^\s*SELECT (@@|COUNT\()/i
- sql.sub!(/^\s*SELECT(\s+DISTINCT)?/i) do
- "SELECT#{$1} TOP #{options[:limit]}"
- end unless options[:limit].nil?
- end
- end
-end
+++ /dev/null
-require 'arjdbc/jdbc'
-jdbc_require_driver 'jdbc/mysql'
-require 'arjdbc/mysql/connection_methods'
-require 'arjdbc/mysql/adapter'
+++ /dev/null
-require 'active_record/connection_adapters/abstract/schema_definitions'
-
-module ::ArJdbc
- module MySQL
- def self.column_selector
- [/mysql/i, lambda {|cfg,col| col.extend(::ArJdbc::MySQL::Column)}]
- end
-
- def self.extended(adapter)
- adapter.configure_connection
- end
-
- def configure_connection
- execute("SET SQL_AUTO_IS_NULL=0")
- end
-
- def self.jdbc_connection_class
- ::ActiveRecord::ConnectionAdapters::MySQLJdbcConnection
- end
-
- module Column
- def extract_default(default)
- if sql_type =~ /blob/i || type == :text
- if default.blank?
- return null ? nil : ''
- else
- raise ArgumentError, "#{type} columns cannot have a default value: #{default.inspect}"
- end
- elsif missing_default_forged_as_empty_string?(default)
- nil
- else
- super
- end
- end
-
- def has_default?
- return false if sql_type =~ /blob/i || type == :text #mysql forbids defaults on blob and text columns
- super
- end
-
- def simplified_type(field_type)
- case field_type
- when /tinyint\(1\)|bit/i then :boolean
- when /enum/i then :string
- when /decimal/i then :decimal
- else
- super
- end
- end
-
- def extract_limit(sql_type)
- case sql_type
- when /blob|text/i
- case sql_type
- when /tiny/i
- 255
- when /medium/i
- 16777215
- when /long/i
- 2147483647 # mysql only allows 2^31-1, not 2^32-1, somewhat inconsistently with the tiny/medium/normal cases
- else
- nil # we could return 65535 here, but we leave it undecorated by default
- end
- when /^bigint/i; 8
- when /^int/i; 4
- when /^mediumint/i; 3
- when /^smallint/i; 2
- when /^tinyint/i; 1
- when /^(bool|date|float|int|time)/i
- nil
- else
- super
- end
- end
-
- # MySQL misreports NOT NULL column default when none is given.
- # We can't detect this for columns which may have a legitimate ''
- # default (string) but we can for others (integer, datetime, boolean,
- # and the rest).
- #
- # Test whether the column has default '', is not null, and is not
- # a type allowing default ''.
- def missing_default_forged_as_empty_string?(default)
- type != :string && !null && default == ''
- end
- end
-
-
- def modify_types(tp)
- tp[:primary_key] = "int(11) DEFAULT NULL auto_increment PRIMARY KEY"
- tp[:decimal] = { :name => "decimal" }
- tp[:timestamp] = { :name => "datetime" }
- tp[:datetime][:limit] = nil
-
- # sonar
- # Ticket http://tools.assembla.com/sonar/ticket/200
- # Problem with mysql TEXT columns. ActiveRecord :text type is mapped to TEXT type (65535 characters).
- # But we would like the bigger MEDIUMTEXT for the snapshot_sources table (16777215 characters).
- # This hack works only for ActiveRecord-JDBC (Jruby use).
- # See http://www.headius.com/jrubywiki/index.php/Adding_Datatypes_to_ActiveRecord-JDBC
- tp[:text] = { :name => "mediumtext" }
- tp[:binary] = { :name => "longblob" }
- tp[:big_integer] = { :name => "bigint"}
- # /sonar
-
- tp
- end
-
- def adapter_name #:nodoc:
- 'MySQL'
- end
-
- def arel2_visitors
- {'jdbcmysql' => ::Arel::Visitors::MySQL}
- end
-
- def case_sensitive_equality_operator
- "= BINARY"
- end
-
- def limited_update_conditions(where_sql, quoted_table_name, quoted_primary_key)
- where_sql
- end
-
- # QUOTING ==================================================
-
- def quote(value, column = nil)
- return value.quoted_id if value.respond_to?(:quoted_id)
-
- if column && column.type == :primary_key
- value.to_s
- elsif column && String === value && column.type == :binary && column.class.respond_to?(:string_to_binary)
- s = column.class.string_to_binary(value).unpack("H*")[0]
- "x'#{s}'"
- elsif BigDecimal === value
- "'#{value.to_s("F")}'"
- else
- super
- end
- end
-
- def quoted_true
- "1"
- end
-
- def quoted_false
- "0"
- end
-
- def begin_db_transaction #:nodoc:
- @connection.begin
- rescue Exception
- # Transactions aren't supported
- end
-
- def commit_db_transaction #:nodoc:
- @connection.commit
- rescue Exception
- # Transactions aren't supported
- end
-
- def rollback_db_transaction #:nodoc:
- @connection.rollback
- rescue Exception
- # Transactions aren't supported
- end
-
- def supports_savepoints? #:nodoc:
- true
- end
-
- def create_savepoint
- execute("SAVEPOINT #{current_savepoint_name}")
- end
-
- def rollback_to_savepoint
- execute("ROLLBACK TO SAVEPOINT #{current_savepoint_name}")
- end
-
- def release_savepoint
- execute("RELEASE SAVEPOINT #{current_savepoint_name}")
- end
-
- def disable_referential_integrity(&block) #:nodoc:
- old = select_value("SELECT @@FOREIGN_KEY_CHECKS")
- begin
- update("SET FOREIGN_KEY_CHECKS = 0")
- yield
- ensure
- update("SET FOREIGN_KEY_CHECKS = #{old}")
- end
- end
-
- # SCHEMA STATEMENTS ========================================
-
- def structure_dump #:nodoc:
- if supports_views?
- sql = "SHOW FULL TABLES WHERE Table_type = 'BASE TABLE'"
- else
- sql = "SHOW TABLES"
- end
-
- select_all(sql).inject("") do |structure, table|
- table.delete('Table_type')
-
- hash = show_create_table(table.to_a.first.last)
-
- if(table = hash["Create Table"])
- structure += table + ";\n\n"
- elsif(view = hash["Create View"])
- structure += view + ";\n\n"
- end
- end
- end
-
- def jdbc_columns(table_name, name = nil)#:nodoc:
- sql = "SHOW FIELDS FROM #{quote_table_name(table_name)}"
- execute(sql, :skip_logging).map do |field|
- ::ActiveRecord::ConnectionAdapters::MysqlColumn.new(field["Field"], field["Default"], field["Type"], field["Null"] == "YES")
- end
- end
-
- def recreate_database(name, options = {}) #:nodoc:
- drop_database(name)
- create_database(name, options)
- end
-
- def create_database(name, options = {}) #:nodoc:
- if options[:collation]
- execute "CREATE DATABASE `#{name}` DEFAULT CHARACTER SET `#{options[:charset] || 'utf8'}` COLLATE `#{options[:collation]}`"
- else
- execute "CREATE DATABASE `#{name}` DEFAULT CHARACTER SET `#{options[:charset] || 'utf8'}`"
- end
- end
-
- def drop_database(name) #:nodoc:
- execute "DROP DATABASE IF EXISTS `#{name}`"
- end
-
- def current_database
- select_one("SELECT DATABASE() as db")["db"]
- end
-
- def create_table(name, options = {}) #:nodoc:
- super(name, {:options => "ENGINE=InnoDB"}.merge(options))
- end
-
- def rename_table(name, new_name)
- execute "RENAME TABLE #{quote_table_name(name)} TO #{quote_table_name(new_name)}"
- end
-
- def add_column(table_name, column_name, type, options = {})
- add_column_sql = "ALTER TABLE #{quote_table_name(table_name)} ADD #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
- add_column_options!(add_column_sql, options)
- add_column_position!(add_column_sql, options)
- execute(add_column_sql)
- end
-
- def change_column_default(table_name, column_name, default) #:nodoc:
- column = column_for(table_name, column_name)
- change_column table_name, column_name, column.sql_type, :default => default
- end
-
- def change_column_null(table_name, column_name, null, default = nil)
- column = column_for(table_name, column_name)
-
- unless null || default.nil?
- execute("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote(default)} WHERE #{quote_column_name(column_name)} IS NULL")
- end
-
- change_column table_name, column_name, column.sql_type, :null => null
- end
-
- def change_column(table_name, column_name, type, options = {}) #:nodoc:
- column = column_for(table_name, column_name)
-
- unless options_include_default?(options)
- options[:default] = column.default
- end
-
- unless options.has_key?(:null)
- options[:null] = column.null
- end
-
- change_column_sql = "ALTER TABLE #{quote_table_name(table_name)} CHANGE #{quote_column_name(column_name)} #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
- add_column_options!(change_column_sql, options)
- add_column_position!(change_column_sql, options)
- execute(change_column_sql)
- end
-
- def rename_column(table_name, column_name, new_column_name) #:nodoc:
- options = {}
- if column = columns(table_name).find { |c| c.name == column_name.to_s }
- options[:default] = column.default
- options[:null] = column.null
- else
- raise ActiveRecord::ActiveRecordError, "No such column: #{table_name}.#{column_name}"
- end
- current_type = select_one("SHOW COLUMNS FROM #{quote_table_name(table_name)} LIKE '#{column_name}'")["Type"]
- rename_column_sql = "ALTER TABLE #{quote_table_name(table_name)} CHANGE #{quote_column_name(column_name)} #{quote_column_name(new_column_name)} #{current_type}"
- add_column_options!(rename_column_sql, options)
- execute(rename_column_sql)
- end
-
- def add_limit_offset!(sql, options) #:nodoc:
- limit, offset = options[:limit], options[:offset]
- if limit && offset
- sql << " LIMIT #{offset.to_i}, #{sanitize_limit(limit)}"
- elsif limit
- sql << " LIMIT #{sanitize_limit(limit)}"
- elsif offset
- sql << " OFFSET #{offset.to_i}"
- end
- sql
- end
-
- def show_variable(var)
- res = execute("show variables like '#{var}'")
- row = res.detect {|row| row["Variable_name"] == var }
- row && row["Value"]
- end
-
- def charset
- show_variable("character_set_database")
- end
-
- def collation
- show_variable("collation_database")
- end
-
- def type_to_sql(type, limit = nil, precision = nil, scale = nil)
- return super unless type.to_s == 'integer'
-
- case limit
- when 1; 'tinyint'
- when 2; 'smallint'
- when 3; 'mediumint'
- when nil, 4, 11; 'int(11)' # compatibility with MySQL default
- when 5..8; 'bigint'
- else raise(ActiveRecordError, "No integer type has byte size #{limit}")
- end
- end
-
- def add_column_position!(sql, options)
- if options[:first]
- sql << " FIRST"
- elsif options[:after]
- sql << " AFTER #{quote_column_name(options[:after])}"
- end
- end
-
- protected
- def translate_exception(exception, message)
- return super unless exception.respond_to?(:errno)
-
- case exception.errno
- when 1062
- ::ActiveRecord::RecordNotUnique.new(message, exception)
- when 1452
- ::ActiveRecord::InvalidForeignKey.new(message, exception)
- else
- super
- end
- end
-
- private
- def column_for(table_name, column_name)
- unless column = columns(table_name).find { |c| c.name == column_name.to_s }
- raise "No such column: #{table_name}.#{column_name}"
- end
- column
- end
-
- def show_create_table(table)
- select_one("SHOW CREATE TABLE #{quote_table_name(table)}")
- end
-
- def supports_views?
- false
- end
- end
-end
-
-module ActiveRecord::ConnectionAdapters
- # Remove any vestiges of core/Ruby MySQL adapter
- remove_const(:MysqlColumn) if const_defined?(:MysqlColumn)
- remove_const(:MysqlAdapter) if const_defined?(:MysqlAdapter)
-
- class MysqlColumn < JdbcColumn
- include ArJdbc::MySQL::Column
-
- def initialize(name, *args)
- if Hash === name
- super
- else
- super(nil, name, *args)
- end
- end
-
- def call_discovered_column_callbacks(*)
- end
- end
-
- class MysqlAdapter < JdbcAdapter
- include ArJdbc::MySQL
-
- def initialize(*args)
- super
- configure_connection
- end
-
- def adapter_spec(config)
- # return nil to avoid extending ArJdbc::MySQL, which we've already done
- end
-
- def jdbc_connection_class(spec)
- ::ArJdbc::MySQL.jdbc_connection_class
- end
-
- def jdbc_column_class
- ActiveRecord::ConnectionAdapters::MysqlColumn
- end
-
- alias_chained_method :columns, :query_cache, :jdbc_columns
- end
-end
-
-module Mysql # :nodoc:
- remove_const(:Error) if const_defined?(:Error)
-
- class Error < ::ActiveRecord::JDBCError
- end
-
- def self.client_version
- 50400 # faked out for AR tests
- end
-end
+++ /dev/null
-# Don't need to load native mysql adapter
-$LOADED_FEATURES << "active_record/connection_adapters/mysql_adapter.rb"
-$LOADED_FEATURES << "active_record/connection_adapters/mysql2_adapter.rb"
-
-class ActiveRecord::Base
- class << self
- def mysql_connection(config)
- require "arjdbc/mysql"
- config[:port] ||= 3306
- options = (config[:options] ||= {})
- options['zeroDateTimeBehavior'] ||= 'convertToNull'
- options['jdbcCompliantTruncation'] ||= 'false'
- options['useUnicode'] ||= 'true'
- options['characterEncoding'] = config[:encoding] || 'utf8'
- config[:url] ||= "jdbc:mysql://#{config[:host]}:#{config[:port]}/#{config[:database]}"
- config[:driver] ||= "com.mysql.jdbc.Driver"
- config[:adapter_class] = ActiveRecord::ConnectionAdapters::MysqlAdapter
- connection = jdbc_connection(config)
- ::ArJdbc::MySQL.kill_cancel_timer(connection.raw_connection)
- connection
- end
- alias_method :jdbcmysql_connection, :mysql_connection
- alias_method :mysql2_connection, :mysql_connection
- end
-end
-
-
+++ /dev/null
-require 'arjdbc/jdbc'
-require 'arjdbc/oracle/connection_methods'
-require 'arjdbc/oracle/adapter'
+++ /dev/null
-module ActiveRecord::ConnectionAdapters
- OracleAdapter = Class.new(AbstractAdapter) unless const_defined?(:OracleAdapter)
-end
-
-module ::ArJdbc
- module Oracle
- def self.extended(mod)
- unless @lob_callback_added
- ActiveRecord::Base.class_eval do
- def after_save_with_oracle_lob
- self.class.columns.select { |c| c.sql_type =~ /LOB\(|LOB$/i }.each do |c|
- value = self[c.name]
- value = value.to_yaml if unserializable_attribute?(c.name, c)
- next if value.nil? || (value == '')
-
- connection.write_large_object(c.type == :binary, c.name, self.class.table_name, self.class.primary_key, quote_value(id), value)
- end
- end
- end
-
- ActiveRecord::Base.after_save :after_save_with_oracle_lob
- @lob_callback_added = true
- end
- require 'arjdbc/jdbc/quoted_primary_key'
- ActiveRecord::Base.extend ArJdbc::QuotedPrimaryKeyExtension
- (class << mod; self; end).class_eval do
- alias_chained_method :insert, :query_dirty, :ora_insert
- alias_chained_method :columns, :query_cache, :ora_columns
- end
- end
-
- def self.column_selector
- [/oracle/i, lambda {|cfg,col| col.extend(::ArJdbc::Oracle::Column)}]
- end
-
- def self.jdbc_connection_class
- ::ActiveRecord::ConnectionAdapters::OracleJdbcConnection
- end
-
- module Column
- def primary=(val)
- super
- if val && @sql_type =~ /^NUMBER$/i
- @type = :integer
- end
- end
-
- def type_cast(value)
- return nil if value.nil?
- case type
- when :datetime then ArJdbc::Oracle::Column.string_to_time(value, self.class)
- else
- super
- end
- end
-
- def type_cast_code(var_name)
- case type
- when :datetime then "ArJdbc::Oracle::Column.string_to_time(#{var_name}, self.class)"
- else
- super
- end
- end
-
- def self.string_to_time(string, klass)
- time = klass.string_to_time(string)
- guess_date_or_time(time)
- end
-
- def self.guess_date_or_time(value)
- return value if Date === value
- (value && value.hour == 0 && value.min == 0 && value.sec == 0) ?
- Date.new(value.year, value.month, value.day) : value
- end
-
- private
- def simplified_type(field_type)
- case field_type
- when /^number\(1\)$/i then :boolean
- when /char/i then :string
- when /float|double/i then :float
- when /int/i then :integer
- when /num|dec|real/i then extract_scale(field_type) == 0 ? :integer : :decimal
- when /date|time/i then :datetime
- when /clob/i then :text
- when /blob/i then :binary
- end
- end
-
- # Post process default value from JDBC into a Rails-friendly format (columns{-internal})
- def default_value(value)
- return nil unless value
-
- # Not sure why we need this for Oracle?
- value = value.strip
-
- return nil if value == "null"
-
- # sysdate default should be treated like a null value
- return nil if value.downcase == "sysdate"
-
- # jdbc returns column default strings with actual single quotes around the value.
- return $1 if value =~ /^'(.*)'$/
-
- value
- end
- end
-
- def adapter_name
- 'Oracle'
- end
-
- def arel2_visitors
- { 'oracle' => Arel::Visitors::Oracle }
- end
-
- # TODO: use this instead of the QuotedPrimaryKey logic and execute_id_insert?
- # def prefetch_primary_key?(table_name = nil)
- # columns(table_name).detect {|c| c.primary } if table_name
- # end
-
- def table_alias_length
- 30
- end
-
- def default_sequence_name(table, column = nil) #:nodoc:
- "#{table}_seq"
- end
-
- def create_table(name, options = {}) #:nodoc:
- super(name, options)
- seq_name = options[:sequence_name] || default_sequence_name(name)
- start_value = options[:sequence_start_value] || 10000
- raise ActiveRecord::StatementInvalid.new("name #{seq_name} too long") if seq_name.length > table_alias_length
- execute "CREATE SEQUENCE #{seq_name} START WITH #{start_value}" unless options[:id] == false
- end
-
- def rename_table(name, new_name) #:nodoc:
- execute "RENAME #{name} TO #{new_name}"
- execute "RENAME #{name}_seq TO #{new_name}_seq" rescue nil
- end
-
- def drop_table(name, options = {}) #:nodoc:
- super(name) rescue nil
- seq_name = options[:sequence_name] || default_sequence_name(name)
- execute "DROP SEQUENCE #{seq_name}" rescue nil
- end
-
- def recreate_database(name)
- tables.each{ |table| drop_table(table) }
- end
-
- def drop_database(name)
- recreate_database(name)
- end
-
- def next_sequence_value(sequence_name)
- # avoid #select or #select_one so that the sequence values aren't cached
- execute("select #{sequence_name}.nextval id from dual").first['id'].to_i
- end
-
- def sql_literal?(value)
- defined?(::Arel::SqlLiteral) && ::Arel::SqlLiteral === value
- end
-
- def ora_insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:
- if (id_value && !sql_literal?(id_value)) || pk.nil?
- # Pre-assigned id or table without a primary key
- # Presence of #to_sql means an Arel literal bind variable
- # that should use #execute_id_insert below
- execute sql, name
- else
- # Assume the sql contains a bind-variable for the id
- # Extract the table from the insert sql. Yuck.
- table = sql.split(" ", 4)[2].gsub('"', '')
- sequence_name ||= default_sequence_name(table)
- id_value = next_sequence_value(sequence_name)
- log(sql, name) do
- @connection.execute_id_insert(sql,id_value)
- end
- end
- id_value
- end
-
- def indexes(table, name = nil)
- @connection.indexes(table, name, @connection.connection.meta_data.user_name)
- end
-
- def _execute(sql, name = nil)
- case sql.strip
- when /\A\(?\s*(select|show)/i then
- @connection.execute_query(sql)
- else
- @connection.execute_update(sql)
- end
- end
-
- def modify_types(tp)
- tp[:primary_key] = "NUMBER(38) NOT NULL PRIMARY KEY"
- tp[:integer] = { :name => "NUMBER", :limit => 38 }
-
- # sonar
- tp[:datetime] = { :name => "TIMESTAMP" }
- tp[:timestamp] = { :name => "TIMESTAMP" }
- # /sonar
-
- tp[:time] = { :name => "DATE" }
- tp[:date] = { :name => "DATE" }
-
- # sonar
- # New type
- tp[:big_integer] = { :name => "NUMBER", :limit => 38 }
- # /sonar
- tp
- end
-
- def add_limit_offset!(sql, options) #:nodoc:
- offset = options[:offset] || 0
-
- if limit = options[:limit]
- sql.replace "select * from (select raw_sql_.*, rownum raw_rnum_ from (#{sql}) raw_sql_ where rownum <= #{offset+limit}) where raw_rnum_ > #{offset}"
- elsif offset > 0
- sql.replace "select * from (select raw_sql_.*, rownum raw_rnum_ from (#{sql}) raw_sql_) where raw_rnum_ > #{offset}"
- end
- end
-
- def current_database #:nodoc:
- select_one("select sys_context('userenv','db_name') db from dual")["db"]
- end
-
- def remove_index(table_name, options = {}) #:nodoc:
- execute "DROP INDEX #{index_name(table_name, options)}"
- end
-
- def change_column_default(table_name, column_name, default) #:nodoc:
- execute "ALTER TABLE #{table_name} MODIFY #{column_name} DEFAULT #{quote(default)}"
- end
-
- def add_column_options!(sql, options) #:nodoc:
- # handle case of defaults for CLOB columns, which would otherwise get "quoted" incorrectly
- if options_include_default?(options) && (column = options[:column]) && column.type == :text
- sql << " DEFAULT #{quote(options.delete(:default))}"
- end
- super
- end
-
- def change_column(table_name, column_name, type, options = {}) #:nodoc:
- change_column_sql = "ALTER TABLE #{table_name} MODIFY #{column_name} #{type_to_sql(type, options[:limit])}"
- add_column_options!(change_column_sql, options)
- execute(change_column_sql)
- end
-
- def rename_column(table_name, column_name, new_column_name) #:nodoc:
- execute "ALTER TABLE #{table_name} RENAME COLUMN #{column_name} to #{new_column_name}"
- end
-
- def remove_column(table_name, column_name) #:nodoc:
- execute "ALTER TABLE #{table_name} DROP COLUMN #{column_name}"
- end
-
- def structure_dump #:nodoc:
- s = select_all("select sequence_name from user_sequences").inject("") do |structure, seq|
- structure << "create sequence #{seq.to_a.first.last};\n\n"
- end
-
- select_all("select table_name from user_tables").inject(s) do |structure, table|
- ddl = "create table #{table.to_a.first.last} (\n "
- cols = select_all(%Q{
- select column_name, data_type, data_length, data_precision, data_scale, data_default, nullable
- from user_tab_columns
- where table_name = '#{table.to_a.first.last}'
- order by column_id
- }).map do |row|
- row = row.inject({}) do |h,args|
- h[args[0].downcase] = args[1]
- h
- end
- col = "#{row['column_name'].downcase} #{row['data_type'].downcase}"
- if row['data_type'] =='NUMBER' and !row['data_precision'].nil?
- col << "(#{row['data_precision'].to_i}"
- col << ",#{row['data_scale'].to_i}" if !row['data_scale'].nil?
- col << ')'
- elsif row['data_type'].include?('CHAR')
- col << "(#{row['data_length'].to_i})"
- end
- col << " default #{row['data_default']}" if !row['data_default'].nil?
- col << ' not null' if row['nullable'] == 'N'
- col
- end
- ddl << cols.join(",\n ")
- ddl << ");\n\n"
- structure << ddl
- end
- end
-
- def structure_drop #:nodoc:
- s = select_all("select sequence_name from user_sequences").inject("") do |drop, seq|
- drop << "drop sequence #{seq.to_a.first.last};\n\n"
- end
-
- select_all("select table_name from user_tables").inject(s) do |drop, table|
- drop << "drop table #{table.to_a.first.last} cascade constraints;\n\n"
- end
- end
-
- # SELECT DISTINCT clause for a given set of columns and a given ORDER BY clause.
- #
- # Oracle requires the ORDER BY columns to be in the SELECT list for DISTINCT
- # queries. However, with those columns included in the SELECT DISTINCT list, you
- # won't actually get a distinct list of the column you want (presuming the column
- # has duplicates with multiple values for the ordered-by columns. So we use the
- # FIRST_VALUE function to get a single (first) value for each column, effectively
- # making every row the same.
- #
- # distinct("posts.id", "posts.created_at desc")
- def distinct(columns, order_by)
- return "DISTINCT #{columns}" if order_by.blank?
-
- # construct a valid DISTINCT clause, ie. one that includes the ORDER BY columns, using
- # FIRST_VALUE such that the inclusion of these columns doesn't invalidate the DISTINCT
- order_columns = order_by.split(',').map { |s| s.strip }.reject(&:blank?)
- order_columns = order_columns.zip((0...order_columns.size).to_a).map do |c, i|
- "FIRST_VALUE(#{c.split.first}) OVER (PARTITION BY #{columns} ORDER BY #{c}) AS alias_#{i}__"
- end
- sql = "DISTINCT #{columns}, "
- sql << order_columns * ", "
- end
-
- # ORDER BY clause for the passed order option.
- #
- # Uses column aliases as defined by #distinct.
- def add_order_by_for_association_limiting!(sql, options)
- return sql if options[:order].blank?
-
- order = options[:order].split(',').collect { |s| s.strip }.reject(&:blank?)
- order.map! {|s| $1 if s =~ / (.*)/}
- order = order.zip((0...order.size).to_a).map { |s,i| "alias_#{i}__ #{s}" }.join(', ')
-
- sql << "ORDER BY #{order}"
- end
-
- def tables
- @connection.tables(nil, oracle_schema)
- end
-
- def ora_columns(table_name, name=nil)
- @connection.columns_internal(table_name, name, oracle_schema)
- end
-
- # QUOTING ==================================================
- #
- # see: abstract/quoting.rb
-
- # See ACTIVERECORD_JDBC-33 for details -- better to not quote
- # table names, esp. if they have schemas.
- def quote_table_name(name) #:nodoc:
- name.to_s
- end
-
- # Camelcase column names need to be quoted.
- # Nonquoted identifiers can contain only alphanumeric characters from your
- # database character set and the underscore (_), dollar sign ($), and pound sign (#).
- # Database links can also contain periods (.) and "at" signs (@).
- # Oracle strongly discourages you from using $ and # in nonquoted identifiers.
- # Source: http://download.oracle.com/docs/cd/B28359_01/server.111/b28286/sql_elements008.htm
- def quote_column_name(name) #:nodoc:
- name.to_s =~ /^[a-z0-9_$#]+$/ ? name.to_s : "\"#{name}\""
- end
-
- def quote_string(string) #:nodoc:
- string.gsub(/'/, "''")
- end
-
- def quote(value, column = nil) #:nodoc:
- # Arel 2 passes SqlLiterals through
- return value if sql_literal?(value)
-
- if column && [:text, :binary].include?(column.type)
- if /(.*?)\([0-9]+\)/ =~ column.sql_type
- %Q{empty_#{ $1.downcase }()}
- else
- %Q{empty_#{ column.sql_type.downcase rescue 'blob' }()}
- end
- else
- if column.respond_to?(:primary) && column.primary && column.klass != String
- return value.to_i.to_s
- end
- quoted = super
- if value.acts_like?(:date)
- quoted = %Q{DATE'#{quoted_date(value)}'}
- elsif value.acts_like?(:time)
- quoted = %Q{TIMESTAMP'#{quoted_date(value)}'}
- end
- quoted
- end
- end
-
- def quoted_true #:nodoc:
- '1'
- end
-
- def quoted_false #:nodoc:
- '0'
- end
-
- private
- # In Oracle, schemas are usually created under your username:
- # http://www.oracle.com/technology/obe/2day_dba/schema/schema.htm
- # But allow separate configuration as "schema:" anyway (GH #53)
- def oracle_schema
- if @config[:schema]
- @config[:schema].to_s
- elsif @config[:username]
- @config[:username].to_s
- end
- end
-
- def select(sql, name=nil)
- records = execute(sql,name)
- records.each do |col|
- col.delete('raw_rnum_')
- end
- records
- end
- end
-end
-
+++ /dev/null
-class ActiveRecord::Base
- class << self
- def oracle_connection(config)
- config[:port] ||= 1521
- config[:url] ||= "jdbc:oracle:thin:@#{config[:host]}:#{config[:port]}:#{config[:database]}"
- config[:driver] ||= "oracle.jdbc.driver.OracleDriver"
- jdbc_connection(config)
- end
- end
-end
-
+++ /dev/null
-require 'arjdbc/jdbc'
-jdbc_require_driver 'jdbc/postgres'
-require 'arjdbc/postgresql/connection_methods'
-require 'arjdbc/postgresql/adapter'
+++ /dev/null
-module ActiveRecord::ConnectionAdapters
- PostgreSQLAdapter = Class.new(AbstractAdapter) unless const_defined?(:PostgreSQLAdapter)
-end
-
-module ::ArJdbc
- module PostgreSQL
- def self.extended(mod)
- (class << mod; self; end).class_eval do
- alias_chained_method :insert, :query_dirty, :pg_insert
- alias_chained_method :columns, :query_cache, :pg_columns
- end
- end
-
- def self.column_selector
- [/postgre/i, lambda {|cfg,col| col.extend(::ArJdbc::PostgreSQL::Column)}]
- end
-
- def self.jdbc_connection_class
- ::ActiveRecord::ConnectionAdapters::PostgresJdbcConnection
- end
-
- module Column
- def type_cast(value)
- case type
- when :boolean then cast_to_boolean(value)
- else super
- end
- end
-
- def extract_limit(sql_type)
- case sql_type
- when /^int2/i; 2
- when /^smallint/i; 2
- when /^int4/i; nil
- when /^integer/i; nil
- when /^int8/i; 8
- when /^bigint/i; 8
- when /^(bool|text|date|time|bytea)/i; nil # ACTIVERECORD_JDBC-135,139
- else super
- end
- end
-
- def simplified_type(field_type)
- return :integer if field_type =~ /^(big|)serial/i
- return :string if field_type =~ /\[\]$/i || field_type =~ /^interval/i
- return :string if field_type =~ /^(?:point|lseg|box|"?path"?|polygon|circle)/i
- return :datetime if field_type =~ /^timestamp/i
- return :float if field_type =~ /^(?:real|double precision)$/i
- return :binary if field_type =~ /^bytea/i
- return :boolean if field_type =~ /^bool/i
- return :decimal if field_type == 'numeric(131089)'
- super
- end
-
- def cast_to_boolean(value)
- return nil if value.nil?
- if value == true || value == false
- value
- else
- %w(true t 1).include?(value.to_s.downcase)
- end
- end
-
- # Post process default value from JDBC into a Rails-friendly format (columns{-internal})
- def default_value(value)
- # Boolean types
- return "t" if value =~ /true/i
- return "f" if value =~ /false/i
-
- # Char/String/Bytea type values
- return $1 if value =~ /^'(.*)'::(bpchar|text|character varying|bytea)$/
-
- # Numeric values
- return value.delete("()") if value =~ /^\(?-?[0-9]+(\.[0-9]*)?\)?/
-
- # Fixed dates / timestamp
- return $1 if value =~ /^'(.+)'::(date|timestamp)/
-
- # Anything else is blank, some user type, or some function
- # and we can't know the value of that, so return nil.
- return nil
- end
- end
-
- def modify_types(tp)
- tp[:primary_key] = "serial primary key"
-
- # sonar
- # tp[:string][:limit] = 255
- # /sonar
-
- tp[:integer][:limit] = nil
- tp[:boolean] = { :name => "boolean" }
- tp[:float] = { :name => "float" }
- tp[:text] = { :name => "text" }
- tp[:datetime] = { :name => "timestamp" }
- tp[:timestamp] = { :name => "timestamp" }
- tp[:time] = { :name => "time" }
- tp[:date] = { :name => "date" }
- tp[:decimal] = { :name => "decimal" }
-
- # sonar
- # New type
- tp[:big_integer] = { :name => "int8", :limit => nil }
- # /sonar
-
- tp
- end
-
- def adapter_name #:nodoc:
- 'PostgreSQL'
- end
-
- def arel2_visitors
- {'jdbcpostgresql' => ::Arel::Visitors::PostgreSQL}
- end
-
- def postgresql_version
- @postgresql_version ||=
- begin
- value = select_value('SELECT version()')
- if value =~ /PostgreSQL (\d+)\.(\d+)\.(\d+)/
- ($1.to_i * 10000) + ($2.to_i * 100) + $3.to_i
- else
- 0
- end
- end
- end
-
- # Does PostgreSQL support migrations?
- def supports_migrations?
- true
- end
-
- # Does PostgreSQL support standard conforming strings?
- def supports_standard_conforming_strings?
- # Temporarily set the client message level above error to prevent unintentional
- # error messages in the logs when working on a PostgreSQL database server that
- # does not support standard conforming strings.
- client_min_messages_old = client_min_messages
- self.client_min_messages = 'panic'
-
- # postgres-pr does not raise an exception when client_min_messages is set higher
- # than error and "SHOW standard_conforming_strings" fails, but returns an empty
- # PGresult instead.
- has_support = select('SHOW standard_conforming_strings').to_a[0][0] rescue false
- self.client_min_messages = client_min_messages_old
- has_support
- end
-
- def supports_insert_with_returning?
- postgresql_version >= 80200
- end
-
- def supports_ddl_transactions?
- false
- end
-
- def supports_savepoints?
- true
- end
-
- def supports_count_distinct? #:nodoc:
- false
- end
-
- def create_savepoint
- execute("SAVEPOINT #{current_savepoint_name}")
- end
-
- def rollback_to_savepoint
- execute("ROLLBACK TO SAVEPOINT #{current_savepoint_name}")
- end
-
- def release_savepoint
- execute("RELEASE SAVEPOINT #{current_savepoint_name}")
- end
-
- # Returns the configured supported identifier length supported by PostgreSQL,
- # or report the default of 63 on PostgreSQL 7.x.
- def table_alias_length
- @table_alias_length ||= (postgresql_version >= 80000 ? select_one('SHOW max_identifier_length')['max_identifier_length'].to_i : 63)
- end
-
- def default_sequence_name(table_name, pk = nil)
- default_pk, default_seq = pk_and_sequence_for(table_name)
- default_seq || "#{table_name}_#{pk || default_pk || 'id'}_seq"
- end
-
- # Resets sequence to the max value of the table's pk if present.
- def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:
- unless pk and sequence
- default_pk, default_sequence = pk_and_sequence_for(table)
- pk ||= default_pk
- sequence ||= default_sequence
- end
- if pk
- if sequence
- quoted_sequence = quote_column_name(sequence)
-
- select_value <<-end_sql, 'Reset sequence'
- SELECT setval('#{quoted_sequence}', (SELECT COALESCE(MAX(#{quote_column_name pk})+(SELECT increment_by FROM #{quoted_sequence}), (SELECT min_value FROM #{quoted_sequence})) FROM #{quote_table_name(table)}), false)
- end_sql
- else
- @logger.warn "#{table} has primary key #{pk} with no default sequence" if @logger
- end
- end
- end
-
- # Find a table's primary key and sequence.
- def pk_and_sequence_for(table) #:nodoc:
- # First try looking for a sequence with a dependency on the
- # given table's primary key.
- result = select(<<-end_sql, 'PK and serial sequence')[0]
- SELECT attr.attname, seq.relname
- FROM pg_class seq,
- pg_attribute attr,
- pg_depend dep,
- pg_namespace name,
- pg_constraint cons
- WHERE seq.oid = dep.objid
- AND seq.relkind = 'S'
- AND attr.attrelid = dep.refobjid
- AND attr.attnum = dep.refobjsubid
- AND attr.attrelid = cons.conrelid
- AND attr.attnum = cons.conkey[1]
- AND cons.contype = 'p'
- AND dep.refobjid = '#{quote_table_name(table)}'::regclass
- end_sql
-
- if result.nil? or result.empty?
- # If that fails, try parsing the primary key's default value.
- # Support the 7.x and 8.0 nextval('foo'::text) as well as
- # the 8.1+ nextval('foo'::regclass).
- result = select(<<-end_sql, 'PK and custom sequence')[0]
- SELECT attr.attname,
- CASE
- WHEN split_part(def.adsrc, '''', 2) ~ '.' THEN
- substr(split_part(def.adsrc, '''', 2),
- strpos(split_part(def.adsrc, '''', 2), '.')+1)
- ELSE split_part(def.adsrc, '''', 2)
- END as relname
- FROM pg_class t
- JOIN pg_attribute attr ON (t.oid = attrelid)
- JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)
- JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])
- WHERE t.oid = '#{quote_table_name(table)}'::regclass
- AND cons.contype = 'p'
- AND def.adsrc ~* 'nextval'
- end_sql
- end
-
- [result["attname"], result["relname"]]
- rescue
- nil
- end
-
- def pg_insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)
- # Extract the table from the insert sql. Yuck.
- table = sql.split(" ", 4)[2].gsub('"', '')
-
- # Try an insert with 'returning id' if available (PG >= 8.2)
- if supports_insert_with_returning? && id_value.nil?
- pk, sequence_name = *pk_and_sequence_for(table) unless pk
- if pk
- id_value = select_value("#{sql} RETURNING #{quote_column_name(pk)}")
- clear_query_cache #FIXME: Why now?
- return id_value
- end
- end
-
- # Otherwise, plain insert
- execute(sql, name)
-
- # Don't need to look up id_value if we already have it.
- # (and can't in case of non-sequence PK)
- unless id_value
- # If neither pk nor sequence name is given, look them up.
- unless pk || sequence_name
- pk, sequence_name = *pk_and_sequence_for(table)
- end
-
- # If a pk is given, fallback to default sequence name.
- # Don't fetch last insert id for a table without a pk.
- if pk && sequence_name ||= default_sequence_name(table, pk)
- id_value = last_insert_id(table, sequence_name)
- end
- end
- id_value
- end
-
- def pg_columns(table_name, name=nil)
- schema_name = @config[:schema_search_path]
- if table_name =~ /\./
- parts = table_name.split(/\./)
- table_name = parts.pop
- schema_name = parts.join(".")
- end
- schema_list = if schema_name.nil?
- []
- else
- schema_name.split(/\s*,\s*/)
- end
- while schema_list.size > 1
- s = schema_list.shift
- begin
- return @connection.columns_internal(table_name, name, s)
- rescue ActiveRecord::JDBCError=>ignored_for_next_schema
- end
- end
- s = schema_list.shift
- return @connection.columns_internal(table_name, name, s)
- end
-
- # From postgresql_adapter.rb
- def indexes(table_name, name = nil)
- result = select_rows(<<-SQL, name)
- SELECT i.relname, d.indisunique, a.attname
- FROM pg_class t, pg_class i, pg_index d, pg_attribute a
- WHERE i.relkind = 'i'
- AND d.indexrelid = i.oid
- AND d.indisprimary = 'f'
- AND t.oid = d.indrelid
- AND t.relname = '#{table_name}'
- AND a.attrelid = t.oid
- AND ( d.indkey[0]=a.attnum OR d.indkey[1]=a.attnum
- OR d.indkey[2]=a.attnum OR d.indkey[3]=a.attnum
- OR d.indkey[4]=a.attnum OR d.indkey[5]=a.attnum
- OR d.indkey[6]=a.attnum OR d.indkey[7]=a.attnum
- OR d.indkey[8]=a.attnum OR d.indkey[9]=a.attnum )
- ORDER BY i.relname
- SQL
-
- current_index = nil
- indexes = []
-
- result.each do |row|
- if current_index != row[0]
- indexes << ::ActiveRecord::ConnectionAdapters::IndexDefinition.new(table_name, row[0], row[1] == "t", [])
- current_index = row[0]
- end
-
- indexes.last.columns << row[2]
- end
-
- indexes
- end
-
- def last_insert_id(table, sequence_name)
- Integer(select_value("SELECT currval('#{sequence_name}')"))
- end
-
- def recreate_database(name)
- drop_database(name)
- create_database(name)
- end
-
- def create_database(name, options = {})
- execute "CREATE DATABASE \"#{name}\" ENCODING='#{options[:encoding] || 'utf8'}'"
- end
-
- def drop_database(name)
- execute "DROP DATABASE IF EXISTS \"#{name}\""
- end
-
- def create_schema(schema_name, pg_username)
- execute("CREATE SCHEMA \"#{schema_name}\" AUTHORIZATION \"#{pg_username}\"")
- end
-
- def drop_schema(schema_name)
- execute("DROP SCHEMA \"#{schema_name}\"")
- end
-
- def all_schemas
- select('select nspname from pg_namespace').map {|r| r["nspname"] }
- end
-
- def primary_key(table)
- pk_and_sequence = pk_and_sequence_for(table)
- pk_and_sequence && pk_and_sequence.first
- end
-
- def structure_dump
- database = @config[:database]
- if database.nil?
- if @config[:url] =~ /\/([^\/]*)$/
- database = $1
- else
- raise "Could not figure out what database this url is for #{@config["url"]}"
- end
- end
-
- ENV['PGHOST'] = @config[:host] if @config[:host]
- ENV['PGPORT'] = @config[:port].to_s if @config[:port]
- ENV['PGPASSWORD'] = @config[:password].to_s if @config[:password]
- search_path = @config[:schema_search_path]
- search_path = "--schema=#{search_path}" if search_path
-
- @connection.connection.close
- begin
- definition = `pg_dump -i -U "#{@config[:username]}" -s -x -O #{search_path} #{database}`
- raise "Error dumping database" if $?.exitstatus == 1
-
- # need to patch away any references to SQL_ASCII as it breaks the JDBC driver
- definition.gsub(/SQL_ASCII/, 'UNICODE')
- ensure
- reconnect!
- end
- end
-
- # SELECT DISTINCT clause for a given set of columns and a given ORDER BY clause.
- #
- # PostgreSQL requires the ORDER BY columns in the select list for distinct queries, and
- # requires that the ORDER BY include the distinct column.
- #
- # distinct("posts.id", "posts.created_at desc")
- def distinct(columns, order_by)
- return "DISTINCT #{columns}" if order_by.blank?
-
- # construct a clean list of column names from the ORDER BY clause, removing
- # any asc/desc modifiers
- order_columns = order_by.split(',').collect { |s| s.split.first }
- order_columns.delete_if(&:blank?)
- order_columns = order_columns.zip((0...order_columns.size).to_a).map { |s,i| "#{s} AS alias_#{i}" }
-
- # return a DISTINCT ON() clause that's distinct on the columns we want but includes
- # all the required columns for the ORDER BY to work properly
- sql = "DISTINCT ON (#{columns}) #{columns}, "
- sql << order_columns * ', '
- end
-
- # ORDER BY clause for the passed order option.
- #
- # PostgreSQL does not allow arbitrary ordering when using DISTINCT ON, so we work around this
- # by wrapping the sql as a sub-select and ordering in that query.
- def add_order_by_for_association_limiting!(sql, options)
- return sql if options[:order].blank?
-
- order = options[:order].split(',').collect { |s| s.strip }.reject(&:blank?)
- order.map! { |s| 'DESC' if s =~ /\bdesc$/i }
- order = order.zip((0...order.size).to_a).map { |s,i| "id_list.alias_#{i} #{s}" }.join(', ')
-
- sql.replace "SELECT * FROM (#{sql}) AS id_list ORDER BY #{order}"
- end
-
- def quote(value, column = nil) #:nodoc:
- return super unless column
-
- if value.kind_of?(String) && column.type == :binary
- "E'#{escape_bytea(value)}'"
- elsif value.kind_of?(String) && column.sql_type == 'xml'
- "xml '#{quote_string(value)}'"
- elsif value.kind_of?(Numeric) && column.sql_type == 'money'
- # Not truly string input, so doesn't require (or allow) escape string syntax.
- "'#{value}'"
- elsif value.kind_of?(String) && column.sql_type =~ /^bit/
- case value
- when /^[01]*$/
- "B'#{value}'" # Bit-string notation
- when /^[0-9A-F]*$/i
- "X'#{value}'" # Hexadecimal notation
- end
- else
- super
- end
- end
-
- def escape_bytea(s)
- if s
- result = ''
- s.each_byte { |c| result << sprintf('\\\\%03o', c) }
- result
- end
- end
-
- def quote_table_name(name)
- schema, name_part = extract_pg_identifier_from_name(name.to_s)
-
- unless name_part
- quote_column_name(schema)
- else
- table_name, name_part = extract_pg_identifier_from_name(name_part)
- "#{quote_column_name(schema)}.#{quote_column_name(table_name)}"
- end
- end
-
- def quote_column_name(name)
- %("#{name}")
- end
-
- def quoted_date(value) #:nodoc:
- if value.acts_like?(:time) && value.respond_to?(:usec)
- "#{super}.#{sprintf("%06d", value.usec)}"
- else
- super
- end
- end
-
- def disable_referential_integrity(&block) #:nodoc:
- execute(tables.collect { |name| "ALTER TABLE #{quote_table_name(name)} DISABLE TRIGGER ALL" }.join(";"))
- yield
- ensure
- execute(tables.collect { |name| "ALTER TABLE #{quote_table_name(name)} ENABLE TRIGGER ALL" }.join(";"))
- end
-
- def rename_table(name, new_name)
- execute "ALTER TABLE #{name} RENAME TO #{new_name}"
- end
-
- # Adds a new column to the named table.
- # See TableDefinition#column for details of the options you can use.
- def add_column(table_name, column_name, type, options = {})
- default = options[:default]
- notnull = options[:null] == false
-
- # Add the column.
- execute("ALTER TABLE #{quote_table_name(table_name)} ADD COLUMN #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}")
-
- change_column_default(table_name, column_name, default) if options_include_default?(options)
- change_column_null(table_name, column_name, false, default) if notnull
- end
-
- # Changes the column of a table.
- def change_column(table_name, column_name, type, options = {})
- quoted_table_name = quote_table_name(table_name)
-
- begin
- execute "ALTER TABLE #{quoted_table_name} ALTER COLUMN #{quote_column_name(column_name)} TYPE #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
- rescue ActiveRecord::StatementInvalid => e
- raise e if postgresql_version > 80000
- # This is PostgreSQL 7.x, so we have to use a more arcane way of doing it.
- begin
- begin_db_transaction
- tmp_column_name = "#{column_name}_ar_tmp"
- add_column(table_name, tmp_column_name, type, options)
- execute "UPDATE #{quoted_table_name} SET #{quote_column_name(tmp_column_name)} = CAST(#{quote_column_name(column_name)} AS #{type_to_sql(type, options[:limit], options[:precision], options[:scale])})"
- remove_column(table_name, column_name)
- rename_column(table_name, tmp_column_name, column_name)
- commit_db_transaction
- rescue
- rollback_db_transaction
- end
- end
-
- change_column_default(table_name, column_name, options[:default]) if options_include_default?(options)
- change_column_null(table_name, column_name, options[:null], options[:default]) if options.key?(:null)
- end
-
- # Changes the default value of a table column.
- def change_column_default(table_name, column_name, default)
- execute "ALTER TABLE #{quote_table_name(table_name)} ALTER COLUMN #{quote_column_name(column_name)} SET DEFAULT #{quote(default)}"
- end
-
- def change_column_null(table_name, column_name, null, default = nil)
- unless null || default.nil?
- execute("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote(default)} WHERE #{quote_column_name(column_name)} IS NULL")
- end
- execute("ALTER TABLE #{quote_table_name(table_name)} ALTER #{quote_column_name(column_name)} #{null ? 'DROP' : 'SET'} NOT NULL")
- end
-
- def rename_column(table_name, column_name, new_column_name) #:nodoc:
- execute "ALTER TABLE #{quote_table_name(table_name)} RENAME COLUMN #{quote_column_name(column_name)} TO #{quote_column_name(new_column_name)}"
- end
-
- def remove_index(table_name, options) #:nodoc:
- execute "DROP INDEX #{index_name(table_name, options)}"
- end
-
- def type_to_sql(type, limit = nil, precision = nil, scale = nil) #:nodoc:
- return super unless type.to_s == 'integer'
-
- if limit.nil? || limit == 4
- 'integer'
- elsif limit < 4
- 'smallint'
- else
- 'bigint'
- end
- end
-
- def tables
- @connection.tables(database_name, nil, nil, ["TABLE"])
- end
-
- private
- def translate_exception(exception, message)
- case exception.message
- when /duplicate key value violates unique constraint/
- ::ActiveRecord::RecordNotUnique.new(message, exception)
- when /violates foreign key constraint/
- ::ActiveRecord::InvalidForeignKey.new(message, exception)
- else
- super
- end
- end
-
- def extract_pg_identifier_from_name(name)
- match_data = name[0,1] == '"' ? name.match(/\"([^\"]+)\"/) : name.match(/([^\.]+)/)
-
- if match_data
- rest = name[match_data[0].length..-1]
- rest = rest[1..-1] if rest[0,1] == "."
- [match_data[1], (rest.length > 0 ? rest : nil)]
- end
- end
- end
-end
-
+++ /dev/null
-# Don't need to load native postgres adapter
-$LOADED_FEATURES << "active_record/connection_adapters/postgresql_adapter.rb"
-
-class ActiveRecord::Base
- class << self
- def postgresql_connection(config)
- require "arjdbc/postgresql"
- config[:host] ||= "localhost"
- config[:port] ||= 5432
- config[:url] ||= "jdbc:postgresql://#{config[:host]}:#{config[:port]}/#{config[:database]}"
- config[:url] << config[:pg_params] if config[:pg_params]
- config[:driver] ||= "org.postgresql.Driver"
- conn = jdbc_connection(config)
- conn.execute("SET SEARCH_PATH TO #{config[:schema_search_path]}") if config[:schema_search_path]
- conn
- end
- alias_method :jdbcpostgresql_connection, :postgresql_connection
- end
-end
-
-
+++ /dev/null
-require 'arjdbc/jdbc'
-jdbc_require_driver 'jdbc/sqlite3'
-require 'arjdbc/sqlite3/connection_methods'
-require 'arjdbc/sqlite3/adapter'
+++ /dev/null
-require 'arjdbc/jdbc/missing_functionality_helper'
-
-module ActiveRecord::ConnectionAdapters
- Sqlite3Adapter = Class.new(AbstractAdapter) unless const_defined?(:Sqlite3Adapter)
-end
-
-module ::ArJdbc
- module SQLite3
- def self.column_selector
- [/sqlite/i, lambda {|cfg,col| col.extend(::ArJdbc::SQLite3::Column)}]
- end
-
- def self.jdbc_connection_class
- ::ActiveRecord::ConnectionAdapters::Sqlite3JdbcConnection
- end
-
- module Column
- def init_column(name, default, *args)
- @default = '' if default =~ /NULL/
- end
-
- def type_cast(value)
- return nil if value.nil?
- case type
- when :string then value
- when :primary_key then defined?(value.to_i) ? value.to_i : (value ? 1 : 0)
- when :float then value.to_f
- when :decimal then self.class.value_to_decimal(value)
- when :boolean then self.class.value_to_boolean(value)
- else super
- end
- end
-
- private
- def simplified_type(field_type)
- case field_type
- when /boolean/i then :boolean
- when /text/i then :text
- when /varchar/i then :string
- when /int/i then :integer
- when /float/i then :float
- when /real|decimal/i then @scale == 0 ? :integer : :decimal
- when /datetime/i then :datetime
- when /date/i then :date
- when /time/i then :time
- when /blob/i then :binary
- end
- end
-
- def extract_limit(sql_type)
- return nil if sql_type =~ /^(real)\(\d+/i
- super
- end
-
- def extract_precision(sql_type)
- case sql_type
- when /^(real)\((\d+)(,\d+)?\)/i then $2.to_i
- else super
- end
- end
-
- def extract_scale(sql_type)
- case sql_type
- when /^(real)\((\d+)\)/i then 0
- when /^(real)\((\d+)(,(\d+))\)/i then $4.to_i
- else super
- end
- end
-
- # Post process default value from JDBC into a Rails-friendly format (columns{-internal})
- def default_value(value)
- # jdbc returns column default strings with actual single quotes around the value.
- return $1 if value =~ /^'(.*)'$/
-
- value
- end
- end
-
- def adapter_name #:nodoc:
- 'SQLite'
- end
-
- def arel2_visitors
- {'jdbcsqlite3' => ::Arel::Visitors::SQLite}
- end
-
- def supports_ddl_transactions?
- true # sqlite_version >= '2.0.0'
- end
-
- def supports_add_column?
- sqlite_version >= '3.1.6'
- end
-
- def supports_count_distinct? #:nodoc:
- sqlite_version >= '3.2.6'
- end
-
- def supports_autoincrement? #:nodoc:
- sqlite_version >= '3.1.0'
- end
-
- def sqlite_version
- @sqlite_version ||= select_value('select sqlite_version(*)')
- end
-
- def modify_types(tp)
- tp[:primary_key] = "integer primary key autoincrement not null"
- tp[:string] = { :name => "varchar", :limit => 255 }
- tp[:text] = { :name => "text" }
- tp[:float] = { :name => "float" }
- tp[:decimal] = { :name => "decimal" }
- tp[:datetime] = { :name => "datetime" }
- tp[:timestamp] = { :name => "datetime" }
- tp[:time] = { :name => "time" }
- tp[:date] = { :name => "date" }
- tp[:boolean] = { :name => "boolean" }
- tp[:binary] = { :name => "blob" }
- tp
- end
-
- def quote_column_name(name) #:nodoc:
- %Q("#{name}")
- end
-
- def quote_string(str)
- str.gsub(/'/, "''")
- end
-
- def quoted_true
- %Q{'t'}
- end
-
- def quoted_false
- %Q{'f'}
- end
-
- # Quote date/time values for use in SQL input. Includes microseconds
- # if the value is a Time responding to usec.
- def quoted_date(value) #:nodoc:
- if value.respond_to?(:usec)
- "#{super}.#{sprintf("%06d", value.usec)}"
- else
- super
- end
- end
-
- def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:
- @connection.execute_update(sql)
- id_value || last_insert_id
- end
-
- def last_insert_id
- Integer(select_value("SELECT last_insert_rowid()"))
- end
-
- def tables(name = nil) #:nodoc:
- sql = <<-SQL
- SELECT name
- FROM sqlite_master
- WHERE type = 'table' AND NOT name = 'sqlite_sequence'
- SQL
-
- select_rows(sql, name).map do |row|
- row[0]
- end
- end
-
- def indexes(table_name, name = nil)
- result = select_rows("SELECT name, sql FROM sqlite_master WHERE tbl_name = #{quote_table_name(table_name)} AND type = 'index'", name)
-
- result.collect do |row|
- name = row[0]
- index_sql = row[1]
- unique = (index_sql =~ /unique/i)
- cols = index_sql.match(/\((.*)\)/)[1].gsub(/,/,' ').split.map do |c|
- match = /^"(.+)"$/.match(c); match ? match[1] : c
- end
- ::ActiveRecord::ConnectionAdapters::IndexDefinition.new(table_name, name, unique, cols)
- end
- end
-
- def primary_key(table_name) #:nodoc:
- column = table_structure(table_name).find {|field| field['pk'].to_i == 1}
- column ? column['name'] : nil
- end
-
- def recreate_database(name)
- tables.each{ |table| drop_table(table) }
- end
-
- def _execute(sql, name = nil)
- result = super
- ActiveRecord::ConnectionAdapters::JdbcConnection::insert?(sql) ? last_insert_id : result
- end
-
- def select(sql, name=nil)
- execute(sql, name).map do |row|
- record = {}
- row.each_key do |key|
- if key.is_a?(String)
- record[key.sub(/^"?\w+"?\./, '')] = row[key]
- end
- end
- record
- end
- end
-
- def table_structure(table_name)
- structure = @connection.execute_query("PRAGMA table_info(#{quote_table_name(table_name)})")
- raise ActiveRecord::StatementInvalid, "Could not find table '#{table_name}'" if structure.empty?
- structure
- end
-
- def jdbc_columns(table_name, name = nil) #:nodoc:
- table_structure(table_name).map do |field|
- ::ActiveRecord::ConnectionAdapters::SQLite3Column.new(@config, field['name'], field['dflt_value'], field['type'], field['notnull'] == 0)
- end
- end
-
- def primary_key(table_name) #:nodoc:
- column = table_structure(table_name).find { |field|
- field['pk'].to_i == 1
- }
- column && column['name']
- end
-
- def remove_index!(table_name, index_name) #:nodoc:
- execute "DROP INDEX #{quote_column_name(index_name)}"
- end
-
- def rename_table(name, new_name)
- execute "ALTER TABLE #{quote_table_name(name)} RENAME TO #{quote_table_name(new_name)}"
- end
-
- # See: http://www.sqlite.org/lang_altertable.html
- # SQLite has an additional restriction on the ALTER TABLE statement
- def valid_alter_table_options( type, options)
- type.to_sym != :primary_key
- end
-
- def add_column(table_name, column_name, type, options = {}) #:nodoc:
- if supports_add_column? && valid_alter_table_options( type, options )
- super(table_name, column_name, type, options)
- else
- alter_table(table_name) do |definition|
- definition.column(column_name, type, options)
- end
- end
- end
-
- def remove_column(table_name, *column_names) #:nodoc:
- raise ArgumentError.new("You must specify at least one column name. Example: remove_column(:people, :first_name)") if column_names.empty?
- column_names.flatten.each do |column_name|
- alter_table(table_name) do |definition|
- definition.columns.delete(definition[column_name])
- end
- end
- end
- alias :remove_columns :remove_column
-
- def change_column_default(table_name, column_name, default) #:nodoc:
- alter_table(table_name) do |definition|
- definition[column_name].default = default
- end
- end
-
- def change_column_null(table_name, column_name, null, default = nil)
- unless null || default.nil?
- execute("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote(default)} WHERE #{quote_column_name(column_name)} IS NULL")
- end
- alter_table(table_name) do |definition|
- definition[column_name].null = null
- end
- end
-
- def change_column(table_name, column_name, type, options = {}) #:nodoc:
- alter_table(table_name) do |definition|
- include_default = options_include_default?(options)
- definition[column_name].instance_eval do
- self.type = type
- self.limit = options[:limit] if options.include?(:limit)
- self.default = options[:default] if include_default
- self.null = options[:null] if options.include?(:null)
- end
- end
- end
-
- def rename_column(table_name, column_name, new_column_name) #:nodoc:
- unless columns(table_name).detect{|c| c.name == column_name.to_s }
- raise ActiveRecord::ActiveRecordError, "Missing column #{table_name}.#{column_name}"
- end
- alter_table(table_name, :rename => {column_name.to_s => new_column_name.to_s})
- end
-
- # SELECT ... FOR UPDATE is redundant since the table is locked.
- def add_lock!(sql, options) #:nodoc:
- sql
- end
-
- def empty_insert_statement_value
- "VALUES(NULL)"
- end
-
- protected
- include ArJdbc::MissingFunctionalityHelper
-
- def translate_exception(exception, message)
- case exception.message
- when /column(s)? .* (is|are) not unique/
- ActiveRecord::RecordNotUnique.new(message, exception)
- else
- super
- end
- end
- end
-end
-
-module ActiveRecord::ConnectionAdapters
- remove_const(:SQLite3Adapter) if const_defined?(:SQLite3Adapter)
- remove_const(:SQLiteAdapter) if const_defined?(:SQLiteAdapter)
-
- class SQLite3Column < JdbcColumn
- include ArJdbc::SQLite3::Column
-
- def initialize(name, *args)
- if Hash === name
- super
- else
- super(nil, name, *args)
- end
- end
-
- def call_discovered_column_callbacks(*)
- end
-
- def self.string_to_binary(value)
- "\000b64" + [value].pack('m*').split("\n").join('')
- end
-
- def self.binary_to_string(value)
- if value.respond_to?(:force_encoding) && value.encoding != Encoding::ASCII_8BIT
- value = value.force_encoding(Encoding::ASCII_8BIT)
- end
-
- if value[0..3] == "\000b64"
- value[4..-1].unpack('m*').first
- else
- value
- end
- end
- end
-
- class SQLite3Adapter < JdbcAdapter
- include ArJdbc::SQLite3
-
- def adapter_spec(config)
- # return nil to avoid extending ArJdbc::SQLite3, which we've already done
- end
-
- def jdbc_connection_class(spec)
- ::ArJdbc::SQLite3.jdbc_connection_class
- end
-
- def jdbc_column_class
- ActiveRecord::ConnectionAdapters::SQLite3Column
- end
-
- alias_chained_method :columns, :query_cache, :jdbc_columns
- end
-
- SQLiteAdapter = SQLite3Adapter
-end
-
-# Fake out sqlite3/version driver for AR tests
-$LOADED_FEATURES << 'sqlite3/version.rb'
-module SQLite3
- module Version
- VERSION = '1.2.6' # query_cache_test.rb requires SQLite3::Version::VERSION > '1.2.5'
- end
-end
+++ /dev/null
-# Don't need to load native sqlite3 adapter
-$LOADED_FEATURES << "active_record/connection_adapters/sqlite_adapter.rb"
-$LOADED_FEATURES << "active_record/connection_adapters/sqlite3_adapter.rb"
-
-class ActiveRecord::Base
- class << self
- def sqlite3_connection(config)
- require "arjdbc/sqlite3"
-
- parse_sqlite3_config!(config)
- database = config[:database]
- database = '' if database == ':memory:'
- config[:url] ||= "jdbc:sqlite:#{database}"
- config[:driver] ||= "org.sqlite.JDBC"
- config[:adapter_class] = ActiveRecord::ConnectionAdapters::SQLite3Adapter
- jdbc_connection(config)
- end
-
- def parse_sqlite3_config!(config)
- config[:database] ||= config[:dbfile]
-
- # Allow database path relative to RAILS_ROOT, but only if
- # the database path is not the special path that tells
- # Sqlite to build a database only in memory.
- rails_root_defined = defined?(Rails.root) || Object.const_defined?(:RAILS_ROOT)
- if rails_root_defined && ':memory:' != config[:database]
- rails_root = defined?(Rails.root) ? Rails.root : RAILS_ROOT
- config[:database] = File.expand_path(config[:database], rails_root)
- end
- end
-
- alias_method :jdbcsqlite3_connection, :sqlite3_connection
- end
-end
+++ /dev/null
-require 'arjdbc/jdbc'
-require 'arjdbc/sybase/adapter.rb'
+++ /dev/null
-module ArJdbc
- module Sybase
- def add_limit_offset!(sql, options) # :nodoc:
- @limit = options[:limit]
- @offset = options[:offset]
- if use_temp_table?
- # Use temp table to hack offset with Sybase
- sql.sub!(/ FROM /i, ' INTO #artemp FROM ')
- elsif zero_limit?
- # "SET ROWCOUNT 0" turns off limits, so we havesy
- # to use a cheap trick.
- if sql =~ /WHERE/i
- sql.sub!(/WHERE/i, 'WHERE 1 = 2 AND ')
- elsif sql =~ /ORDER\s+BY/i
- sql.sub!(/ORDER\s+BY/i, 'WHERE 1 = 2 ORDER BY')
- else
- sql << 'WHERE 1 = 2'
- end
- end
- end
-
- # If limit is not set at all, we can ignore offset;
- # if limit *is* set but offset is zero, use normal select
- # with simple SET ROWCOUNT. Thus, only use the temp table
- # if limit is set and offset > 0.
- def use_temp_table?
- !@limit.nil? && !@offset.nil? && @offset > 0
- end
-
- def zero_limit?
- !@limit.nil? && @limit == 0
- end
-
- def modify_types(tp) #:nodoc:
- tp[:primary_key] = "NUMERIC(22,0) IDENTITY PRIMARY KEY"
- tp[:integer][:limit] = nil
- tp[:boolean] = {:name => "bit"}
- tp[:binary] = {:name => "image"}
- tp
- end
-
- def remove_index(table_name, options = {})
- execute "DROP INDEX #{table_name}.#{index_name(table_name, options)}"
- end
- end
-end
+++ /dev/null
-module ArJdbc
- module Version
- VERSION = "1.1.3"
- end
-end
-# Compatibility with older versions of ar-jdbc for other extensions out there
-JdbcAdapter = ArJdbc
-JdbcSpec = ArJdbc
+++ /dev/null
-class JdbcGenerator < Rails::Generators::Base
- def self.source_root
- @source_root ||= File.expand_path('../../../../rails_generators/templates', __FILE__)
- end
-
- def create_jdbc_files
- directory '.', '.'
- end
-end
+++ /dev/null
-warn "DEPRECATED: require 'arjdbc' instead of 'jdbc_adapter'."
-require 'arjdbc'
+++ /dev/null
-warn "DEPRECATED: require 'arjdbc/rake_tasks' instead of 'jdbc_adapter/rake_tasks'."
-require 'arjdbc/jdbc/rake_tasks'
-
+++ /dev/null
-warn "DEPRECATED: require 'arjdbc/version' instead of 'jdbc_adapter/version'."
-require 'arjdbc/version'
-
+++ /dev/null
-# Stub library for postgresql -- allows Rails to load
-# postgresql_adapter without error. Other than postgres-pr, there's no
-# other way to use PostgreSQL on JRuby anyway, right? If you've
-# installed ar-jdbc you probably want to use that to connect to pg.
-#
-# If by chance this library is installed in another Ruby and this file
-# got required then we'll just continue to try to load the next pg.rb
-# in the $LOAD_PATH.
-
-unless defined?(JRUBY_VERSION)
- gem 'pg' if respond_to?(:gem) # make sure pg gem is activated
- after_current_file = false
- $LOAD_PATH.each do |p|
- require_file = File.join(p, 'pg.rb')
-
- if File.expand_path(require_file) == File.expand_path(__FILE__)
- after_current_file = true
- next
- end
-
- if after_current_file && File.exist?(require_file)
- load require_file
- break
- end
- end
-end