/* Copyright (c) 2003, 2005, Oracle. All rights reserved. */
/*
DESCRIPTION
The SampleShapefileToJGeomFeature class provides an example of how to transform
ESRI Shapefiles into Oracle's native Spatial (or Locator) type, SDO_GEOMETRY (and,
Java type, JGeometry) using new Java utilities as part of Oracle10g. Prior to Oracle10g,
Oracle offered a C++-based conversion tool, called SHP2SDO, that could only be used as a
command line tool. Today, with the advent of the new Java-based Shapefile adapter, customers
and partners alike will be able to create their own applications and interfaces for converting
ESRI Shapefiles directly into Oracle's SDO_GEOMETRY database type or JGeometry java type.
NOTES
In order to use the SampleShapefileToJGeomFeature example and/or the new Java-based Shapefile
adapter the following requirements must be met:
1) Must have an Oracle9i or 10g database with Locator or Spatial installed
2) Must have a user/password account for the database with connect and resource privileges
3) Must be using the following supplied Oracle10g utilities and JDBC libraries (JARs):
ojdbc14.jar,sdoutl.jar,sdoapi.jar
4) Must have installed the Sun JDK version 1.4.2_04 or higher
5) Must have a sample ESRI Shapefile set (which includes all the following file types:
.shp, .shx, and .dbf files)
To compile and run this sample, please see the README file.
MODIFIED (MM/DD/YY)
dabugov 08/03/05 - If 0 srid, set to NULL in metadata. Fix default tolerance typo.
nalexand 07/21/05 - Add final conn.commit() before conn.close()
dgeringe 07/06/05 - Add -c for commit interval
dgeringe 07/06/05 - Print total number of records converted
nalexand 06/07/05 - Add getMin/MaxX(), getMin/MaxY() for default proj extents
nalexand 06/07/05 - Migrate during INSERT, to correct polygon rotation
nalexand 06/03/05 - Migrate before INSERT, to correct polygon rotation
nalexand 06/02/05 - Construct dimArrays
dgeringe 05/25/05 - Change -y to accept lower_bound,upper_bound (instead of the reverse)
dgeringe 05/25/05 - Change -x to accept lower_bound,upper_bound (instead of the reverse)
dgeringe 05/25/05 - Add -a and -n parameters, for append to existing table
jlokitz 02/23/05 - Add command line parameters, metadata and comments
nalexand 02/02/05 - Fix multipolygons
nalexand 08/24/04 - Trap geom errors and continue processing
nalexand 02/05/04 - Add Metadata for minZ, maxZ, minM, maxM
nalexand 01/28/04 - Read hash table
nalexand 01/16/04 - Add getRelSchema() -- overload for m_idName
nalexand 01/13/04 - Modify SampleShapefileToJGeom.java to support features
nalexand 12/09/03 - Modify SampleShapefileToSDO.java to support JGeometry
ranwei 05/16/00 - Creation (SampleShapefileToSDO.java)
*/
/**
* @version $Header: SampleShapefileToJGeomFeature.java 21-jul-2005.08:36:15 nalexand Exp $
* @author nalexand
* @since release specific (what release of product did this appear in)
*/
import java.util.HashMap;
import java.util.StringTokenizer;
import java.util.Vector;
import oracle.spatial.util.*;
import java.io.*;
import java.sql.*;
import java.util.Enumeration;
import java.util.Hashtable;
import oracle.sql.STRUCT;
import oracle.jdbc.driver.*;
import oracle.jdbc.OracleConnection;
import oracle.spatial.geometry.JGeometry;
/**
* This sample program reads all attributes and geometries from a Shapefile, and
* converts them to spatial features. These features are written to a database
* table. Shapefiles with measure data are converted to LRS geometries in the
* database.
*/
public class SampleShapefileToJGeomFeature {
private static String m_host;
private static String m_port;
private static String m_sid;
private static String m_user;
private static String m_password;
private static String m_tableName;
private static String m_shapefileName;
private static String m_idName = null;
private static int m_srid = 0;
private static String geomMetaDataTable = "user_sdo_geom_metadata";
private static String m_geom = "geometry";
private static String min_x = "-180";
private static String min_y = "-90";
private static String max_x = "180";
private static String max_y = "90";
private static String m_tolerance = "0.05";
private static String mg_tolerance = "0.000000005";
private static int m_start_id = 1;
private static int m_commit_interval = -1;
private static String dimArray = null;
private static String dimArrayMig = null;
private static boolean defaultX = true;
private static boolean defaultY = true;
public static void main(String args[]) throws Exception {
String usage = "The following key/value is missing: ";
HashMap hm = new HashMap();
String value;
String key;
Vector v = new Vector();
int skip_create_table = 0;
// The following logic statements (if, else) check for requisite command
// line parameters.
if (args.length <= 1) {
System.err
.println("USAGE: java -cp [ORACLE_HOME]/jdbc/lib/ojdbc14.jar;./sdoutl.jar;./sdoapi.jar SampleShapefileToJGeomFeature -h db_host -p db_port -s db_sid -u db_username -d db_password -t db_table -f shapefile_name [-i table_id_column_name][-r srid][-g db_geometry_column][-x max_x,min_x][-y max_y,min_y][-o tolerance]");
System.out.println("Usage explanation (parameters used):");
System.out
.println("<-h>: Host machine with existing Oracle database");
System.out
.println("<-p>: Host machine's port with existing Oracle database (e.g. 1521)");
System.out
.println("<-s>: Host machine's SID with existing Oracle database");
System.out.println("<-u>: Database user");
System.out.println("<-d>: Database user's password");
System.out.println("<-t>: Table name for the result");
System.out
.println("<-f>: File name of an input Shapefile (without extension)");
System.out
.println("[-i]: Column name for unique numeric ID; if required");
System.out
.println("[-r]: Valid Oracle SRID for coordinate system; use 0 if unknown");
System.out
.println("[-g]: Preferred or valid SDO_GEOMETRY column name");
System.out
.println("[-x]: Bounds for the X dimension; use -180,180 if unknown");
System.out
.println("[-y]: Bounds for the Y dimension; use -90,90 if unknown");
System.out
.println("[-o]: Load tolerance fields (x and y) in metadata, if not specified, tolerance fields are 0.05");
System.out
.println("[-a]: Append shapefile data to an existing table");
System.out
.println("[-n]: Start ID for column specified in -i parameter");
System.out
.println("[-c]: Commit interval. Default, only commits at the end of a run.");
System.exit(1);
}
else {
for (int j = 0; args.length > j; j++) {
v.add(j, args[j]);
}
for (Enumeration e = v.elements(); e.hasMoreElements();) {
try {
key = (String) e.nextElement();
if (key.equals("-a"))
skip_create_table = 1;
else {
value = (String) e.nextElement();
if (key != null && value != null)
hm.put(key, value);
}
} catch (Exception ex) {
System.out
.println("One of your key-value pairs failed. Please try again");
System.exit(1);
}
}
}
if (hm.containsKey("-h")) {
System.out.println("host: " + (String) hm.get("-h"));
m_host = (String) hm.get("-h");
} else {
System.out.println(usage + "-h db_host");
System.exit(1);
}
if (hm.containsKey("-p")) {
System.out.println("port: " + (String) hm.get("-p"));
m_port = (String) hm.get("-p");
} else {
System.out.println(usage + "-p db_port");
System.exit(1);
}
if (hm.containsKey("-s")) {
System.out.println("sid: " + (String) hm.get("-s"));
m_sid = (String) hm.get("-s");
} else {
System.out.println(usage + "-s db_sid");
System.exit(1);
}
if (hm.containsKey("-u")) {
System.out.println("db_username: " + (String) hm.get("-u"));
m_user = (String) hm.get("-u");
} else {
System.out.println(usage + "-u db_username");
System.exit(1);
}
if (hm.containsKey("-d")) {
System.out.println("db_password: " + (String) hm.get("-d"));
m_password = (String) hm.get("-d");
} else {
System.out.println(usage + "-d password");
System.exit(1);
}
if (hm.containsKey("-t")) {
System.out.println("db_tablename: " + (String) hm.get("-t"));
m_tableName = (String) hm.get("-t");
} else {
System.out.println(usage + "-t tablename");
System.exit(1);
}
if (hm.containsKey("-f")) {
System.out.println("shapefile_name: " + (String) hm.get("-f"));
m_shapefileName = (String) hm.get("-f");
} else {
System.out.println(usage + "-f shapefile_name");
System.exit(1);
}
if (hm.containsKey("-i")) {
System.out
.println("table_id_column_name: " + (String) hm.get("-i"));
m_idName = (String) hm.get("-i");
}
if (hm.containsKey("-r")) {
System.out.println("SRID: " + (String) hm.get("-r"));
m_srid = Integer.parseInt((String) hm.get("-r"));
}
if (hm.containsKey("-g")) {
System.out.println("db_geometry_column: " + (String) hm.get("-g"));
m_geom = (String) hm.get("-g");
}
if (hm.containsKey("-x")) {
System.out.println("X: " + (String) hm.get("-x"));
StringTokenizer stx = new StringTokenizer((String) hm.get("-x"),
",");
while (stx.hasMoreTokens()) {
min_x = stx.nextToken();
max_x = stx.nextToken();
defaultX = false;
}
}
if (hm.containsKey("-y")) {
System.out.println("Y: " + (String) hm.get("-y"));
StringTokenizer sty = new StringTokenizer((String) hm.get("-y"),
",");
while (sty.hasMoreTokens()) {
min_y = sty.nextToken();
max_y = sty.nextToken();
defaultY = false;
}
}
if (hm.containsKey("-o")) {
System.out.println("tolerance: " + (String) hm.get("-o"));
m_tolerance = (String) hm.get("-o");
}
if (hm.containsKey("-n")) {
System.out.println("start_id: " + (String) hm.get("-n"));
m_start_id = Integer.parseInt((String) hm.get("-n"));
}
if (hm.containsKey("-c")) {
System.out.println("commit_interval: " + (String) hm.get("-c"));
m_commit_interval = Integer.parseInt((String) hm.get("-c"));
}
// Make connection to DB
System.out.println("Connecting to Oracle10g using...");
System.out.println(m_host + ", " + m_port + ", " + m_sid + ", "
+ m_user + ", " + m_password + ", " + m_tableName + ", "
+ m_shapefileName + ", " + m_idName + ", " + m_srid);
String url = "jdbc:oracle:thin:@ " + m_host + ":" + m_port + ":"
+ m_sid;
OracleConnection conn = null;
try {
DriverManager.registerDriver(new OracleDriver());
conn = (OracleConnection) DriverManager.getConnection(url, m_user,
m_password);
conn.setAutoCommit(false);
} catch (Exception e) {
e.printStackTrace();
}
// Open dbf and input files
System.out.println("Nome do Shape "+m_shapefileName);
DBFReaderJGeom dbfr = new DBFReaderJGeom(m_shapefileName);
ShapefileReaderJGeom sfh = new ShapefileReaderJGeom(m_shapefileName);
ShapefileFeatureJGeom sf = new ShapefileFeatureJGeom();
// Get type, measure and Z info from shapefile
int shpFileType = sfh.getShpFileType();
double minMeasure = sfh.getMinMeasure();
double maxMeasure = sfh.getMaxMeasure();
if (maxMeasure <= -10E38)
maxMeasure = Double.NaN;
double minZ = sfh.getMinZ();
double maxZ = sfh.getMaxZ();
// Get X,Y extents if srid is not geodetic
if (defaultX && m_srid != 0) {
PreparedStatement psSrid = conn
.prepareStatement("SELECT COUNT(*) cnt FROM MDSYS.GEODETIC_SRIDS WHERE srid = ?");
psSrid.setInt(1, m_srid);
ResultSet rs = psSrid.executeQuery();
if (rs.next()) {
if (rs.getInt("cnt") == 0) {
min_x = String.valueOf(sfh.getMinX());
max_x = String.valueOf(sfh.getMaxX());
// System.out.println("X: " + min_x +", "+ max_x);
}
}
psSrid.close();
}
if (defaultY && m_srid != 0) {
PreparedStatement psSrid = conn
.prepareStatement("SELECT COUNT(*) cnt FROM MDSYS.GEODETIC_SRIDS WHERE srid = ?");
psSrid.setInt(1, m_srid);
ResultSet rs = psSrid.executeQuery();
if (rs.next()) {
if (rs.getInt("cnt") == 0) {
min_y = String.valueOf(sfh.getMinY());
max_y = String.valueOf(sfh.getMaxY());
// System.out.println("Y: " + min_y +", "+ max_y);
}
}
psSrid.close();
}
// Get dimension of shapefile
int shpDims = sfh.getShpDims(shpFileType, maxMeasure);
// Construct dimArrarys
if (shpDims == 2 || shpDims == 0) {
dimArray = "MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X', "
+ min_x + ", " + max_x + ", " + m_tolerance + "), "
+ "MDSYS.SDO_DIM_ELEMENT('Y', " + min_y + ", " + max_y
+ ", " + m_tolerance + "))";
dimArrayMig = "MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X', "
+ min_x + ", " + max_x + ", " + mg_tolerance + "), "
+ "MDSYS.SDO_DIM_ELEMENT('Y', " + min_y + ", " + max_y
+ ", " + mg_tolerance + "))";
} else if (shpDims == 3 && Double.isNaN(maxMeasure)) {
dimArray = "MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X', "
+ min_x + ", " + max_x + ", " + m_tolerance + "), "
+ "MDSYS.SDO_DIM_ELEMENT('Y', " + min_y + ", " + max_y
+ ", " + m_tolerance + "), "
+ "MDSYS.SDO_DIM_ELEMENT('Z', " + minZ + ", " + maxZ + ", "
+ m_tolerance + "))";
dimArrayMig = "MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X', "
+ min_x + ", " + max_x + ", " + mg_tolerance + "), "
+ "MDSYS.SDO_DIM_ELEMENT('Y', " + min_y + ", " + max_y
+ ", " + mg_tolerance + "), "
+ "MDSYS.SDO_DIM_ELEMENT('Z', " + minZ + ", " + maxZ + ", "
+ mg_tolerance + "))";
} else if (shpDims == 3) {
dimArray = "MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X', "
+ min_x + ", " + max_x + ", " + m_tolerance + "), "
+ "MDSYS.SDO_DIM_ELEMENT('Y', " + min_y + ", " + max_y
+ ", " + m_tolerance + "), "
+ "MDSYS.SDO_DIM_ELEMENT('M', " + minMeasure + ", "
+ maxMeasure + ", " + m_tolerance + "))";
dimArrayMig = "MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X', "
+ min_x + ", " + max_x + ", " + mg_tolerance + "), "
+ "MDSYS.SDO_DIM_ELEMENT('Y', " + min_y + ", " + max_y
+ ", " + mg_tolerance + "), "
+ "MDSYS.SDO_DIM_ELEMENT('M', " + minMeasure + ", "
+ maxMeasure + ", " + mg_tolerance + "))";
} else if (shpDims == 4) {
dimArray = "MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X', "
+ min_x + ", " + max_x + ", " + m_tolerance + "), "
+ "MDSYS.SDO_DIM_ELEMENT('Y', " + min_y + ", " + max_y
+ ", " + m_tolerance + "), "
+ "MDSYS.SDO_DIM_ELEMENT('Z', " + minZ + ", " + maxZ + ", "
+ m_tolerance + "), " + "MDSYS.SDO_DIM_ELEMENT('M', "
+ minMeasure + ", " + maxMeasure + ", " + m_tolerance
+ "))";
dimArrayMig = "MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X', "
+ min_x + ", " + max_x + ", " + mg_tolerance + "), "
+ "MDSYS.SDO_DIM_ELEMENT('Y', " + min_y + ", " + max_y
+ ", " + mg_tolerance + "), "
+ "MDSYS.SDO_DIM_ELEMENT('Z', " + minZ + ", " + maxZ + ", "
+ mg_tolerance + "), " + "MDSYS.SDO_DIM_ELEMENT('M', "
+ minMeasure + ", " + maxMeasure + ", " + mg_tolerance
+ "))";
}
// Call create table
if (skip_create_table == 0)
prepareTableForData(conn, dbfr, sf, sfh);
else
System.out.println("Appending to existing table\n");
// //////////////////////////////////////////////////////////////////////////
// Conversion from Feature to DB
// //////////////////////////////////////////////////////////////////////////
int error_cnt = 0;
int numFields = dbfr.numFields();
int numRecords = dbfr.numRecords();
byte[] fieldTypes = new byte[numFields];
for (int field = 0; field < numFields; field++)
fieldTypes[field] = dbfr.getFieldType(field);
Hashtable ht = null;
// Get first feature record to determine num of columns
ht = sf.fromRecordToFeature(dbfr, sfh, fieldTypes, numFields, 0, m_srid);
// Num of columns
int val = ht.size();
String params = null;
String paramsM = null;
if (m_idName == null)
params = "(";
else
params = "(?,";
for (int i = 0; i < val; i++) {
if (i == 0)
params = params + " ?";
else
params = params + ", ?";
}
params = params + ")";
paramsM = params.substring(0, (params.length() - 2))
+ "MDSYS.SDO_MIGRATE.TO_CURRENT(?, " + dimArrayMig + "))";
String[] colNames = sf.getOraFieldNames(dbfr, fieldTypes, numFields);
// Create prepared statements
String insertRec = "INSERT INTO " + m_tableName + " VALUES" + params;
PreparedStatement ps = conn.prepareStatement(insertRec);
PreparedStatement psCom = conn.prepareStatement("COMMIT");
String insertMig = "INSERT INTO " + m_tableName + " VALUES" + paramsM;
PreparedStatement psMig = conn.prepareStatement(insertMig);
ResultSet resMig = null;
STRUCT str = null;
for (int i = 0; i < numRecords; i++) {
// Edit to adjust, or comment to remove screen output; default 10
if ((i + 1) % 10 == 0)
System.out.println("Converting record " + (i + 1));
// ////////////////////////////////////////////////////////////////////////////
ht = sf.fromRecordToFeature(dbfr, sfh, fieldTypes, numFields, i,
m_srid);
if (m_idName == null) {
try {
// Migrate geometry if polygon, polygonz, or polygonm
if (shpFileType == 5 || shpFileType == 15
|| shpFileType == 25) {
for (int j = 0; j < colNames.length; j++) {
if ((ht.get(colNames[j]) instanceof String))
psMig.setString((j + 1),
(String) ht.get(colNames[j]));
else if ((ht.get(colNames[j]) instanceof Integer))
psMig.setInt((j + 1), ((Integer) ht
.get(colNames[j])).intValue());
else if ((ht.get(colNames[j]) instanceof Double))
psMig.setDouble((j + 1), ((Double) ht
.get(colNames[j])).doubleValue());
else
throw new RuntimeException(
"Unsupported Column Type");
}// end for_colNames
str = (STRUCT) JGeometry.store(
(JGeometry) ht.get("geometry"), conn);
psMig.setObject((colNames.length + 1), str);
psMig.executeUpdate();
} else {
for (int j = 0; j < colNames.length; j++) {
if ((ht.get(colNames[j]) instanceof String))
ps.setString((j + 1),
(String) ht.get(colNames[j]));
else if ((ht.get(colNames[j]) instanceof Integer))
ps.setInt((j + 1), ((Integer) ht
.get(colNames[j])).intValue());
else if ((ht.get(colNames[j]) instanceof Double))
ps.setDouble((j + 1), ((Double) ht
.get(colNames[j])).doubleValue());
else
throw new RuntimeException(
"Unsupported Column Type");
}// end for_colNames
str = (STRUCT) JGeometry.store(
(JGeometry) ht.get("geometry"), conn);
ps.setObject((colNames.length + 1), str);
ps.executeUpdate();
}
} catch (SQLException e) {
error_cnt = error_cnt + 1;
System.out.println(e + "\nRecord " + (i + 1)
+ " not converted.");
}
}// if_m_idName
else {
int id = i + m_start_id;
try {
// Migrate geometry if polygon, polygonz, or polygonm
if (shpFileType == 5 || shpFileType == 15
|| shpFileType == 25) {
psMig.setInt(1, id);
for (int j = 0; j < colNames.length; j++) {
if ((ht.get(colNames[j]) instanceof String))
psMig.setString((j + 2),
(String) ht.get(colNames[j]));
else if ((ht.get(colNames[j]) instanceof Integer))
psMig.setInt((j + 2), ((Integer) ht
.get(colNames[j])).intValue());
else if ((ht.get(colNames[j]) instanceof Double))
psMig.setDouble((j + 2), ((Double) ht
.get(colNames[j])).doubleValue());
else
throw new RuntimeException(
"Unsupported Column Type");
}// end for_colNames
str = (STRUCT) JGeometry.store(
(JGeometry) ht.get("geometry"), conn);
psMig.setObject((colNames.length + 2), str);
psMig.executeUpdate();
} else {
ps.setInt(1, id);
for (int j = 0; j < colNames.length; j++) {
if ((ht.get(colNames[j]) instanceof String))
ps.setString((j + 2),
(String) ht.get(colNames[j]));
else if ((ht.get(colNames[j]) instanceof Integer))
ps.setInt((j + 2), ((Integer) ht
.get(colNames[j])).intValue());
else if ((ht.get(colNames[j]) instanceof Double))
ps.setDouble((j + 2), ((Double) ht
.get(colNames[j])).doubleValue());
else
throw new RuntimeException(
"Unsupported Column Type");
}// end for_colNames
str = (STRUCT) JGeometry.store(
(JGeometry) ht.get("geometry"), conn);
ps.setObject((colNames.length + 2), str);
ps.executeUpdate();
}
} catch (SQLException e) {
error_cnt = error_cnt + 1;
System.out.println(e + "\nRecord " + (i + 1)
+ " not converted.");
}
}
// Edit to adjust, or comment to remove COMMIT interval; default
// 1000
if (m_commit_interval == -1) {
if ((i + 1) % 1000 == 0)
conn.commit();
} else {
if ((i + 1) % m_commit_interval == 0)
conn.commit();
}
// ///////////////////////////////////////////////////////////////////////////
}// end_for_each_record
conn.commit();
dbfr.closeDBF();
sfh.closeShapefile();
ps.close();
psMig.close();
psCom.close();
conn.close();
if (error_cnt > 0)
System.out.println(error_cnt + " record(s) not converted.");
System.out.println((numRecords - error_cnt) + " record(s) converted.");
System.out.println("Done.");
}// end Main[]
protected static void prepareTableForData(Connection conn,
DBFReaderJGeom dbfr, ShapefileFeatureJGeom sf,
ShapefileReaderJGeom sfh) throws IOException, SQLException {
// //////////////////////////////////////////////////////////////////////////
// Preparation of the database
// //////////////////////////////////////////////////////////////////////////
// Drop table
System.out.println("Dropping old table...");
Statement stmt = null;
String update;
try {
stmt = conn.createStatement();
update = "DROP TABLE " + m_tableName;
stmt.executeUpdate(update);
stmt.close();
} catch (SQLException de) {
System.out.println(de);
}
// Delete reference to it from metadata table
try {
stmt = conn.createStatement();
// For Oracle Spatial 8.1.6+ databases
update = "DELETE FROM " + geomMetaDataTable
+ " WHERE table_name = '" + m_tableName.toUpperCase() + "'";
stmt.executeUpdate(update);
stmt.close();
} catch (SQLException de) {
System.out.println(de);
}
try {
// Try to find and replace instances of "geometry" with m_geom
String relSchema = sf.getRelSchema(dbfr, m_idName);
String updatedRelSchema = replaceAllWords1(relSchema, "geometry",
m_geom);
// System.out.println(updatedRelSchema);
// Create feature table
System.out.println("Creating new table...");
// System.out.println("RelSchema: " + sf.getRelSchema(dbfr,
// m_idName));
stmt = conn.createStatement();
update = "CREATE TABLE " + m_tableName + " (" +
/* sf.getRelSchema(dbfr, m_idName) */updatedRelSchema + ")";
stmt.executeUpdate(update);
stmt.close();
} catch (SQLException de) {
System.out.println(de);
}
if (m_srid != 0) {
try {
// Add reference to geometry metadata table.
stmt = conn.createStatement();
update = "INSERT INTO " + geomMetaDataTable + " VALUES ('"
+ m_tableName + "', '" + m_geom.toUpperCase() + "', "
+ dimArray + ", " + m_srid + ")";
stmt.executeUpdate(update);
stmt.close();
} catch (SQLException de) {
System.out.println(de);
}
} else {
try {
// Add reference to geometry metadata table.
stmt = conn.createStatement();
update = "INSERT INTO " + geomMetaDataTable + " VALUES ('"
+ m_tableName + "', '" + m_geom.toUpperCase() + "', "
+ dimArray + ", NULL)";
stmt.executeUpdate(update);
stmt.close();
} catch (SQLException de) {
System.out.println(de);
}
}
}
static String replaceAllWords1(String original, String find,
String replacement) {
String result = "";
String delimiters = "+-*/(),. ";
StringTokenizer st = new StringTokenizer(original, delimiters, true);
while (st.hasMoreTokens()) {
String w = st.nextToken();
if (w.equals(find)) {
result = result + replacement;
} else {
result = result + w;
}
}
return result;
}
}
Esse codigo ai em cima que , eu peguei na internet , sempre q eu tento executar no eclipse sempre da FileNotFoundException mesmo os arquivos estando na mesma pasta do SampleShapefileToJGeomFeature.java alguem pode me dar uma luz