Revision 44669 trunk/org.gvsig.desktop/org.gvsig.desktop.compat.cdc/org.gvsig.fmap.dal/org.gvsig.fmap.dal.file/org.gvsig.fmap.dal.file.dbf/src/main/java/org/gvsig/fmap/dal/store/dbf/utils/DbaseFileHeader.java

View differences:

DbaseFileHeader.java
39 39
import java.util.SortedMap;
40 40

  
41 41
import org.gvsig.fmap.dal.DataTypes;
42
import org.gvsig.fmap.dal.exception.DataException;
42 43
import org.gvsig.fmap.dal.exception.UnsupportedVersionException;
44
import org.gvsig.fmap.dal.feature.EditableFeatureAttributeDescriptor;
45
import org.gvsig.fmap.dal.feature.EditableFeatureType;
43 46
import org.gvsig.fmap.dal.feature.FeatureAttributeDescriptor;
44 47
import org.gvsig.fmap.dal.feature.FeatureType;
45 48
import org.gvsig.fmap.dal.feature.exception.AttributeFeatureTypeNotSuportedException;
49
import org.gvsig.fmap.dal.feature.exception.UnknownDataTypeException;
50
import org.gvsig.fmap.dal.store.dbf.DBFStoreProvider;
51
import org.gvsig.fmap.dal.store.dbf.DuplicatedFieldNameException;
52
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_BOOLEAN;
53
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_DATE;
54
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_FLOAT;
55
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_NUMBER;
56
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_STRING;
46 57
import org.gvsig.tools.ToolsLocator;
58
import org.gvsig.tools.dataTypes.DataType;
47 59
import org.gvsig.utils.bigfile.BigByteBuffer2;
60
import org.slf4j.Logger;
61
import org.slf4j.LoggerFactory;
62
import static org.gvsig.fmap.dal.store.dbf.utils.FieldFormatter.TIMESTAMP_SIZE;
63
import static org.gvsig.fmap.dal.store.dbf.utils.FieldFormatter.TIME_SIZE;
48 64

  
49

  
50

  
51 65
/**
52
 * Class to represent the header of a Dbase III file. Creation date: (5/15/2001
53
 * 5:15:30 PM)
66
 * Class to represent the header of a Dbase III file.
54 67
 */
68
@SuppressWarnings("UseSpecificCatch")
55 69
public class DbaseFileHeader {
56
    // Constant for the size of a record
57
    private final int FILE_DESCRIPTOR_SIZE = 32;
58 70

  
59
	// type of the file, must be 03h
60
	private static final byte MAGIC = 0x03;
71
  private static final Logger LOGGER = LoggerFactory.getLogger(DbaseFileHeader.class);
61 72

  
62
	private static final int MINIMUM_HEADER = 33;
73
  // Constant for the size of a record
74
  private final int FILE_DESCRIPTOR_SIZE = 32;
63 75

  
64
    // type of the file, must be 03h
65
    private int myFileType = 0x03;
76
  // type of the file, must be 03h
77
  private static final byte MAGIC = 0x03;
66 78

  
67
    // Date the file was last updated.
68
    private Date myUpdateDate = new Date();
79
  private static final int MINIMUM_HEADER = 33;
69 80

  
70
    // Number of records in the datafile
71
	private int myNumRecords = 0;
81
  // type of the file, must be 03h
82
  private int myFileType = 0x03;
72 83

  
73
    // Length of the header structure
74
    private int myHeaderLength;
84
  // Date the file was last updated.
85
  private Date myUpdateDate = new Date();
75 86

  
76
    /**
77
     * Length of the records. Set to 1 as the default value as if there is
78
     * not any defined column, at least the deleted status initial byte
79
     * is taken into account.
80
     */
81
    private int myRecordLength = 1;
87
  // Number of records in the datafile
88
  private int myNumRecords = 0;
82 89

  
83
    // Number of fields in the record.
84
    private int myNumFields;
90
  // Length of the header structure
91
  private int myHeaderLength;
85 92

  
86
    // collection of header records.
87
    private DbaseFieldDescriptor[] myFieldDescriptions;
93
  /**
94
   * Length of the records. Set to 1 as the default value as if there is not any
95
   * defined column, at least the deleted status initial byte is taken into
96
   * account.
97
   */
98
  private int myRecordLength = 1;
88 99

  
89
	private int myLanguageID = 0x00;
90
	/**
91
	 * Only considered when myLanguageID = 0x00; 
92
	 */
93
	private String charset = null;
100
  // Number of fields in the record.
101
  private int myNumFields;
94 102

  
95
	private List<String>   encodingSupportedByString = null;
103
  // collection of header records.
104
  private DbaseFieldDescriptor[] myFieldDescriptions;
96 105

  
97
	private int origLanguageID = 0x00;
98
	
99
	/**
100
	 * Headers must always be encoded using ASCII/ISO-8859-1, regardless the
101
	 * encoding of the records
102
	 */
103
	private static final Charset headerCharset = Charset.forName("ISO-8859-1");
106
  private int myLanguageID = 0x00;
107
  /**
108
   * Only considered when myLanguageID = 0x00;
109
   */
110
  private String charset = null;
104 111

  
105
    /**
106
     * DbaseFileHreader constructor comment.
107
     */
108
    public DbaseFileHeader() {
109
        super();
112
  private List<String> encodingSupportedByString = null;
110 113

  
111
        encodingSupportedByString = new ArrayList<String>();
112
		SortedMap<String, Charset> m = Charset.availableCharsets();
113
		Set<String> k = m.keySet();
114
		Iterator<String> it = k.iterator();
115
		while(it.hasNext()) {
116
			encodingSupportedByString.add(it.next());
117
		}
118
    }
114
  private int origLanguageID = 0x00;
119 115

  
120
    /**
121
     * Add a column to this DbaseFileHeader. The type is one of (C N L or D)
122
     * character, number, logical(true/false), or date. The Field length is
123
     * the total length in bytes reserved for this column. The decimal count
124
     * only applies to numbers(N), and floating point values (F), and refers
125
     * to the number of characters to reserve after the decimal point.
126
     *
127
     * @param inFieldName DOCUMENT ME!
128
     * @param inFieldType DOCUMENT ME!
129
     * @param inFieldLength DOCUMENT ME!
130
     * @param inDecimalCount DOCUMENT ME!
131
     * @throws org.gvsig.fmap.dal.feature.exception.AttributeFeatureTypeNotSuportedException
116
  /**
117
   * Headers must always be encoded using ASCII/ISO-8859-1, regardless the
118
   * encoding of the records
119
   */
120
  private static final Charset HEADER_CHARSET = Charset.forName("ISO-8859-1");
132 121

  
133
     */
134
    public void addColumn(String inFieldName, char inFieldType,
135
        int inFieldLength, int inDecimalCount)
136
			throws AttributeFeatureTypeNotSuportedException {
137
        if (inFieldLength <= 0) {
138
            inFieldLength = 1;
139
        }
122
  /**
123
   * DbaseFileHreader constructor comment.
124
   */
125
  public DbaseFileHeader() {
126
    super();
140 127

  
141
        if (myFieldDescriptions == null) {
142
            myFieldDescriptions = new DbaseFieldDescriptor[0];
143
        }
128
    encodingSupportedByString = new ArrayList<>();
129
    SortedMap<String, Charset> m = Charset.availableCharsets();
130
    Set<String> k = m.keySet();
131
    Iterator<String> it = k.iterator();
132
    while (it.hasNext()) {
133
      encodingSupportedByString.add(it.next());
134
    }
135
  }
144 136

  
145
        int tempLength = 1; // the length is used for the offset, and there is a * for deleted as the first byte
146
        DbaseFieldDescriptor[] tempFieldDescriptors = new DbaseFieldDescriptor[myFieldDescriptions.length +
147
            1];
137
  /**
138
   * Add a column to this DbaseFileHeader.The type is one of (C N L or D)
139
 character, number, logical(true/false), or date. The Field length is the
140
 total length in bytes reserved for this column. The decimal count only
141
 applies to numbers(N), and floating point values (F), and refers to the
142
 number of characters to reserve after the decimal point.
143
   *
144
   * @param fieldName DOCUMENT ME!
145
   * @param fieldType DOCUMENT ME!
146
   * @param fieldSize DOCUMENT ME!
147
   * @param fieldPrecision
148
   * @param fieldScale DOCUMENT ME!
149
   * @throws
150
   * org.gvsig.fmap.dal.feature.exception.AttributeFeatureTypeNotSuportedException
151
   *
152
   */
153
  public void addColumn(String fieldName, char fieldType,
154
          int fieldSize, int fieldPrecision, int fieldScale)
155
          throws AttributeFeatureTypeNotSuportedException {
156
//    if (fieldSize <= 0) {
157
//      fieldSize = 1;
158
//    }
148 159

  
149
        for (int i = 0; i < myFieldDescriptions.length; i++) {
150
            myFieldDescriptions[i].myFieldDataAddress = tempLength;
151
            tempLength = tempLength + myFieldDescriptions[i].myFieldLength;
152
            tempFieldDescriptors[i] = myFieldDescriptions[i];
153
        }
160
    if (myFieldDescriptions == null) {
161
      myFieldDescriptions = new DbaseFieldDescriptor[0];
162
    }
154 163

  
155
        tempFieldDescriptors[myFieldDescriptions.length] = new DbaseFieldDescriptor();
156
        tempFieldDescriptors[myFieldDescriptions.length].myFieldLength = inFieldLength;
157
        tempFieldDescriptors[myFieldDescriptions.length].myDecimalCount = inDecimalCount;
158
        tempFieldDescriptors[myFieldDescriptions.length].myFieldDataAddress = tempLength;
164
    int tempLength = 1; // the length is used for the offset, and there is a * for deleted as the first byte
165
    DbaseFieldDescriptor[] tempFieldDescriptors = new DbaseFieldDescriptor[myFieldDescriptions.length
166
            + 1];
159 167

  
160
        // set the field name
161
        String tempFieldName = inFieldName;
168
    for (int i = 0; i < myFieldDescriptions.length; i++) {
169
      myFieldDescriptions[i].setOffsetInRecord(tempLength);
170
      tempLength = tempLength + myFieldDescriptions[i].getSize();
171
      tempFieldDescriptors[i] = myFieldDescriptions[i];
172
    }
162 173

  
163
        if (tempFieldName == null) {
164
            tempFieldName = "NoName";
165
        }
174
    tempFieldDescriptors[myFieldDescriptions.length] = new DbaseFieldDescriptor();
175
    tempFieldDescriptors[myFieldDescriptions.length].setType(fieldType);
176
    tempFieldDescriptors[myFieldDescriptions.length].setSize(fieldSize);
177
    tempFieldDescriptors[myFieldDescriptions.length].setPrecision(fieldPrecision);
178
    tempFieldDescriptors[myFieldDescriptions.length].setScale(fieldScale);
179
    tempFieldDescriptors[myFieldDescriptions.length].setOffsetInRecord(tempLength);
166 180

  
167
        if (tempFieldName.length() > DbaseFile.MAX_FIELD_NAME_LENGTH) {
168
            tempFieldName = tempFieldName.substring(0, DbaseFile.MAX_FIELD_NAME_LENGTH);
169
            warn("FieldName " + inFieldName +
170
                " is longer than "+DbaseFile.MAX_FIELD_NAME_LENGTH+" characters, truncating to " +
171
                tempFieldName);
172
        }
181
    // set the field name
182
    String tempFieldName = fieldName;
173 183

  
174
        tempFieldDescriptors[myFieldDescriptions.length].myFieldName = tempFieldName;
175
        tempFieldDescriptors[myFieldDescriptions.length].myFieldName_trim = tempFieldName
176
				.trim();
184
    if (tempFieldName == null) {
185
      tempFieldName = "NoName";
186
    }
177 187

  
178
        // the field type
179
        if ((inFieldType == 'C') || (inFieldType == 'c')) {
180
            tempFieldDescriptors[myFieldDescriptions.length].myFieldType = 'C';
188
    if (tempFieldName.length() > DbaseFile.MAX_FIELD_NAME_LENGTH) {
189
      tempFieldName = tempFieldName.substring(0, DbaseFile.MAX_FIELD_NAME_LENGTH);
190
      warn("FieldName " + fieldName
191
              + " is longer than " + DbaseFile.MAX_FIELD_NAME_LENGTH + " characters, truncating to "
192
              + tempFieldName);
193
    }
181 194

  
182
            if (inFieldLength > 254) {
183
                warn("Field Length for " + inFieldName + " set to " +
184
                    inFieldLength +
185
                    " Which is longer than 254, not consistent with dbase III");
186
            }
187
        } else if ((inFieldType == 'S') || (inFieldType == 's')) {
188
            tempFieldDescriptors[myFieldDescriptions.length].myFieldType = 'C';
189
            warn("Field type for " + inFieldName +
190
                " set to S which is flat out wrong people!, I am setting this to C, in the hopes you meant character.");
195
    tempFieldDescriptors[myFieldDescriptions.length].setName(tempFieldName);
196
    tempFieldDescriptors[myFieldDescriptions.length].setName_trim(tempFieldName.trim());
191 197

  
192
            if (inFieldLength > 254) {
193
                warn("Field Length for " + inFieldName + " set to " +
194
                    inFieldLength +
195
                    " Which is longer than 254, not consistent with dbase III");
196
            }
198
    // the length of a record
199
    tempLength += tempFieldDescriptors[myFieldDescriptions.length].getSize();
197 200

  
198
            tempFieldDescriptors[myFieldDescriptions.length].myFieldLength = 8;
199
        } else if ((inFieldType == 'D') || (inFieldType == 'd')) {
200
            tempFieldDescriptors[myFieldDescriptions.length].myFieldType = 'D';
201
    // set the new fields.
202
    myFieldDescriptions = tempFieldDescriptors;
203
    myHeaderLength = 33 + (32 * myFieldDescriptions.length);
204
    myNumFields = myFieldDescriptions.length;
205
    myRecordLength = tempLength;
206
  }
201 207

  
202
            if (inFieldLength != 8) {
203
                warn("Field Length for " + inFieldName + " set to " +
204
                    inFieldLength + " Setting to 8 digets YYYYMMDD");
205
            }
208
  /**
209
   * Remove a column from this DbaseFileHeader.
210
   *
211
   * @param inFieldName
212
   *
213
   * @return index of the removed column, -1 if no found
214
   */
215
  public int removeColumn(String inFieldName) {
216
    int retCol = -1;
217
    int tempLength = 1;
218
    DbaseFieldDescriptor[] tempFieldDescriptors = new DbaseFieldDescriptor[myFieldDescriptions.length
219
            - 1];
206 220

  
207
            tempFieldDescriptors[myFieldDescriptions.length].myFieldLength = 8;
208
        } else if ((inFieldType == 'F') || (inFieldType == 'f')) {
209
            tempFieldDescriptors[myFieldDescriptions.length].myFieldType = 'F';
221
    for (int i = 0, j = 0; i < myFieldDescriptions.length; i++) {
222
      if (!inFieldName.equalsIgnoreCase(
223
              myFieldDescriptions[i].getName().trim())) {
224
        // if this is the last field and we still haven't found the
225
        // named field
226
        if ((i == j) && (i == (myFieldDescriptions.length - 1))) {
227
//                    System.err.println("Could not find a field named '" +
228
//                        inFieldName + "' for removal");
210 229

  
211
            if (inFieldLength > 20) {
212
                warn("Field Length for " + inFieldName + " set to " +
213
                    inFieldLength +
214
                    " Preserving length, but should be set to Max of 20 not valid for dbase IV, and UP specification, not present in dbaseIII.");
215
            }
216
        } else if ((inFieldType == 'N') || (inFieldType == 'n')) {
217
            tempFieldDescriptors[myFieldDescriptions.length].myFieldType = 'N';
218

  
219
            if (inFieldLength > 18) {
220
                warn("Field Length for " + inFieldName + " set to " +
221
                    inFieldLength +
222
                    " Preserving length, but should be set to Max of 18 for dbase III specification.");
223
            }
224

  
225
            if (inDecimalCount < 0) {
226
                warn("Field Decimal Position for " + inFieldName + " set to " +
227
                    inDecimalCount +
228
                    " Setting to 0 no decimal data will be saved.");
229
                tempFieldDescriptors[myFieldDescriptions.length].myDecimalCount = 0;
230
            }
231
//
232
//            if (inDecimalCount > (inFieldLength - 1)) {
233
//                warn("Field Decimal Position for " + inFieldName + " set to " +
234
//                    inDecimalCount + " Setting to " + (inFieldLength - 1) +
235
//                    " no non decimal data will be saved.");
236
//                tempFieldDescriptors[myFieldDescriptions.length].myDecimalCount = inFieldLength -
237
//                    1;
238
//            }
239
        } else if ((inFieldType == 'L') || (inFieldType == 'l')) {
240
            tempFieldDescriptors[myFieldDescriptions.length].myFieldType = 'L';
241

  
242
            if (inFieldLength != 1) {
243
                warn("Field Length for " + inFieldName + " set to " +
244
                    inFieldLength +
245
                    " Setting to length of 1 for logical fields.");
246
            }
247

  
248
            tempFieldDescriptors[myFieldDescriptions.length].myFieldLength = 1;
249
        } else {
250
            throw new AttributeFeatureTypeNotSuportedException(tempFieldName,
251
					inFieldType, ToolsLocator.getDataTypesManager().getTypeName(inFieldType), "DBF");
230
          return retCol;
252 231
        }
253 232

  
254
        // the length of a record
255
        tempLength = tempLength +
256
            tempFieldDescriptors[myFieldDescriptions.length].myFieldLength;
233
        tempFieldDescriptors[j] = myFieldDescriptions[i];
234
        tempFieldDescriptors[j].setOffsetInRecord(tempLength);
235
        tempLength += tempFieldDescriptors[j].getSize();
257 236

  
258
        // set the new fields.
259
        myFieldDescriptions = tempFieldDescriptors;
260
        myHeaderLength = 33 + (32 * myFieldDescriptions.length);
261
        myNumFields = myFieldDescriptions.length;
262
        myRecordLength = tempLength;
237
        // only increment j on non-matching fields
238
        j++;
239
      } else {
240
        retCol = i;
241
      }
263 242
    }
264 243

  
265
    /**
266
     * Remove a column from this DbaseFileHeader.
267
     *
268
     * @param inFieldName DOCUMENT ME!
269
     *
270
     * @return index of the removed column, -1 if no found
271
     */
272
    public int removeColumn(String inFieldName) {
273
        int retCol = -1;
274
        int tempLength = 1;
275
        DbaseFieldDescriptor[] tempFieldDescriptors = new DbaseFieldDescriptor[myFieldDescriptions.length -
276
            1];
244
    // set the new fields.
245
    myFieldDescriptions = tempFieldDescriptors;
246
    myHeaderLength = 33 + (32 * myFieldDescriptions.length);
247
    myNumFields = myFieldDescriptions.length;
248
    myRecordLength = tempLength;
277 249

  
278
        for (int i = 0, j = 0; i < myFieldDescriptions.length; i++) {
279
            if (!inFieldName.equalsIgnoreCase(
280
                        myFieldDescriptions[i].myFieldName.trim())) {
281
                // if this is the last field and we still haven't found the
282
                // named field
283
                if ((i == j) && (i == (myFieldDescriptions.length - 1))) {
284
//                    System.err.println("Could not find a field named '" +
285
//                        inFieldName + "' for removal");
250
    return retCol;
251
  }
286 252

  
287
                    return retCol;
288
                }
253
  private void warn(String inWarn) {
254
    LOGGER.warn(inWarn);
255
  }
289 256

  
290
                tempFieldDescriptors[j] = myFieldDescriptions[i];
291
                tempFieldDescriptors[j].myFieldDataAddress = tempLength;
292
                tempLength += tempFieldDescriptors[j].myFieldLength;
257
  /**
258
   * Return the Field Descriptor for the given field.
259
   *
260
   * @param index, the index of the requeted field description
261
   * @return the dbase field descriptor.
262
   */
263
  public DbaseFieldDescriptor getFieldDescription(int index) {
264
    return myFieldDescriptions[index];
265
  }
293 266

  
294
                // only increment j on non-matching fields
295
                j++;
296
            } else {
297
                retCol = i;
298
            }
299
        }
267
  public DbaseFieldDescriptor getFieldDescription(String name) {
268
    int index = this.getFieldIndex(name);
269
    return myFieldDescriptions[index];
270
  }
300 271

  
301
        // set the new fields.
302
        myFieldDescriptions = tempFieldDescriptors;
303
        myHeaderLength = 33 + (32 * myFieldDescriptions.length);
304
        myNumFields = myFieldDescriptions.length;
305
        myRecordLength = tempLength;
306

  
307
        return retCol;
272
  public int getFieldIndex(String name) {
273
    for (int i = 0; i < myFieldDescriptions.length; i++) {
274
      if (myFieldDescriptions[i].getName_trim()
275
              .equalsIgnoreCase(name)) {
276
        return i;
277
      }
308 278
    }
279
    return -1;
280
  }
309 281

  
310
    /**
311
     * DOCUMENT ME!
312
     *
313
     * @param inWarn DOCUMENT ME!
314
     */
315
    private void warn(String inWarn) {
316
        //TODO Descomentar esto cuando tenga la clase warning support
317
        //    	warnings.warn(inWarn);
318
    }
282
  /**
283
   * Return the date this file was last updated.
284
   *
285
   * @return
286
   */
287
  public Date getLastUpdateDate() {
288
    return myUpdateDate;
289
  }
319 290

  
320
    /**
321
     * Return the Field Descriptor for the given field.
322
     * @param inIndex, the index of the requeted field description
323
     * @return the dbase field descriptor.
324
     */
325
    public DbaseFieldDescriptor getFieldDescription(int inIndex) {
326
        return myFieldDescriptions[inIndex];
327
    }
291
  /**
292
   * Return the number of fields in the records.
293
   *
294
   * @return
295
   */
296
  public int getNumFields() {
297
    return myNumFields;
298
  }
328 299

  
329
    // Retrieve the length of the field at the given index
330
    public int getFieldLength(int inIndex) {
331
        return myFieldDescriptions[inIndex].myFieldLength;
332
    }
300
  /**
301
   * Return the number of records in the file
302
   *
303
   * @return
304
   */
305
  public int getNumRecords() {
306
    return myNumRecords;
307
  }
333 308

  
334
    // Retrieve the location of the decimal point within the field.
335
    public int getFieldDecimalCount(int inIndex) {
336
        return myFieldDescriptions[inIndex].myDecimalCount;
337
    }
309
  /**
310
   * Return the length of the records in bytes.
311
   *
312
   * @return
313
   */
314
  public int getRecordLength() {
315
    return myRecordLength;
316
  }
338 317

  
339
    // Retrieve the Name of the field at the given index
340
    public String getFieldName(int inIndex) {
341
        return myFieldDescriptions[inIndex].myFieldName;
342
    }
318
  /**
319
   * Return the length of the header
320
   *
321
   * @return
322
   */
323
  public int getHeaderLength() {
324
    return myHeaderLength;
325
  }
343 326

  
344
    public int getFieldIndex(String name) {
345
		for (int i = 0; i < myFieldDescriptions.length; i++) {
346
			if (myFieldDescriptions[i].myFieldName_trim
347
					.equalsIgnoreCase(name)) {
348
				return i;
349
			}
350
		}
351
		return -1;
352
	}
327
  /**
328
   * Read the header data from the DBF file.
329
   *
330
   * @param in
331
   * @param charsName
332
   * @param allowDuplicatedFieldNames
333
   * @throws UnsupportedVersionException
334
   * @throws AttributeFeatureTypeNotSuportedException
335
   *
336
   */
337
  public void read(BigByteBuffer2 in, String charsName, boolean allowDuplicatedFieldNames)
338
          throws UnsupportedVersionException, 
339
            AttributeFeatureTypeNotSuportedException {
340
    // type of file.
341
    myFileType = in.get();
353 342

  
354
    // Retrieve the type of field at the given index
355
    public char getFieldType(int inIndex) {
356
        return myFieldDescriptions[inIndex].myFieldType;
343
    if (myFileType != 0x03) {
344
      throw new UnsupportedVersionException("DBF", Integer.toHexString(myFileType));
357 345
    }
358 346

  
359
    /**
360
     * Return the date this file was last updated.
361
     *
362
     * @return DOCUMENT ME!
363
     */
364
    public Date getLastUpdateDate() {
365
        return myUpdateDate;
366
    }
347
    // parse the update date information.
348
    int tempUpdateYear = in.get();
349
    int tempUpdateMonth = in.get();
350
    int tempUpdateDay = in.get();
351
    tempUpdateYear = tempUpdateYear + 1900;
367 352

  
368
     /**
369
     * Return the number of fields in the records.
370
     *
371
     * @return DOCUMENT ME!
372
     */
373
    public int getNumFields() {
374
        return myNumFields;
375
    }
353
    Calendar c = Calendar.getInstance();
354
    c.set(Calendar.YEAR, tempUpdateYear);
355
    c.set(Calendar.MONTH, tempUpdateMonth - 1);
356
    c.set(Calendar.DATE, tempUpdateDay);
357
    myUpdateDate = c.getTime();
376 358

  
377
    /**
378
     * Return the number of records in the file
379
     *
380
     * @return DOCUMENT ME!
381
     */
382
    public int getNumRecords() {
383
        return myNumRecords;
384
    }
359
    // read the number of records.
360
    in.order(ByteOrder.LITTLE_ENDIAN);
361
    myNumRecords = in.getInt();
385 362

  
386
    /**
387
     * Return the length of the records in bytes.
388
     *
389
     * @return DOCUMENT ME!
390
     */
391
    public int getRecordLength() {
392
        return myRecordLength;
393
    }
363
    // read the length of the header structure.
364
    myHeaderLength = in.getShort();
394 365

  
395
    /**
396
     * Return the length of the header
397
     *
398
     * @return DOCUMENT ME!
399
     */
400
    public int getHeaderLength() {
401
        return myHeaderLength;
402
    }
366
    // read the length of a record
367
    myRecordLength = in.getShort(); //posicon 0h
403 368

  
404
	/**
405
	 * Read the header data from the DBF file.
406
	 *
407
	 * @param in
408
	 *            DOCUMENT ME!
409
         * @param charsName
410
	 * @throws UnsupportedVersionException
411
	 * @throws UnsupportedEncodingException
369
    in.order(ByteOrder.BIG_ENDIAN);
412 370

  
413
	 */
414
    public void readHeader(BigByteBuffer2 in, String charsName, boolean allowDuplicatedFieldNames)
415
			throws UnsupportedVersionException, UnsupportedEncodingException {
416
        // type of file.
417
        myFileType = in.get();
371
    // read the language bit (LDID) 
372
    in.position(29);
373
    origLanguageID = byteAsUnsigned(in.get());
374
    if (charsName != null) {
375
      // ignore the language bit, use the provided charset name
376
      myLanguageID = DbaseCodepage.getLdid(charsName);
377
      this.charset = charsName;
378
    } else {
379
      // use the read the language bit
380
      myLanguageID = origLanguageID;
381
      charsName = getCharsetName();
382
    }
418 383

  
419
        if (myFileType != 0x03) {
420
            throw new UnsupportedVersionException("DBF", Integer.toHexString(myFileType));
421
        }
384
    // Posicionamos para empezar a leer los campos.
385
    in.position(32);
422 386

  
423
        // parse the update date information.
424
        int tempUpdateYear = in.get();
425
        int tempUpdateMonth = in.get();
426
        int tempUpdateDay = in.get();
427
        tempUpdateYear = tempUpdateYear + 1900;
387
    // calculate the number of Fields in the header
388
    myNumFields = (myHeaderLength - FILE_DESCRIPTOR_SIZE - 1) / FILE_DESCRIPTOR_SIZE;
428 389

  
429
        Calendar c = Calendar.getInstance();
430
        c.set(Calendar.YEAR, tempUpdateYear);
431
        c.set(Calendar.MONTH, tempUpdateMonth - 1);
432
        c.set(Calendar.DATE, tempUpdateDay);
433
        myUpdateDate = c.getTime();
390
    // read all of the header records
391
    myFieldDescriptions = new DbaseFieldDescriptor[myNumFields];
392
    int fieldOffset = 0;
434 393

  
435
        // read the number of records.
436
        in.order(ByteOrder.LITTLE_ENDIAN);
437
        myNumRecords = in.getInt();
394
    List fieldNames = new ArrayList<>();
438 395

  
439
        // read the length of the header structure.
440
        myHeaderLength = in.getShort();
396
    // FIXME: should field names be always read using ISO8859-1??
397
    for (int i = 0; i < myNumFields; i++) {
398
      myFieldDescriptions[i] = new DbaseFieldDescriptor();
441 399

  
442
        // read the length of a record
443
        myRecordLength = in.getShort(); //posicon 0h
400
      // read the field name
401
      byte[] buffer = new byte[11];
402
      in.get(buffer);
403
      String fieldName;
404
      fieldName = new String(buffer, HEADER_CHARSET);
444 405

  
445
        in.order(ByteOrder.BIG_ENDIAN);
406
      if (allowDuplicatedFieldNames) {
407
        fieldName = getUniqueFieldName(fieldName, fieldNames);
408
      }
409
      fieldNames.add(fieldName);
446 410

  
447
        // read the language bit (LDID) 
448
        in.position(29);
449
    	origLanguageID  = byteAsUnsigned(in.get());
450
        if (charsName != null) {
451
        	// ignore the language bit, use the provided charset name
452
        	myLanguageID = DbaseCodepage.getLdid(charsName);
453
        	this.charset = charsName;
454
		}
455
        else {
456
            // use the read the language bit
457
        	myLanguageID = origLanguageID;
458
        	charsName = getCharsetName();
459
        }
411
      myFieldDescriptions[i].setName(fieldName);
460 412

  
461
        // Posicionamos para empezar a leer los campos.
462
        in.position(32);
413
      myFieldDescriptions[i].setName_trim(myFieldDescriptions[i].getName().trim());
463 414

  
464
        // calculate the number of Fields in the header
465
        myNumFields = (myHeaderLength - FILE_DESCRIPTOR_SIZE - 1) / FILE_DESCRIPTOR_SIZE;
415
      // read the field type
416
      myFieldDescriptions[i].setType((char) in.get());
466 417

  
467
        // read all of the header records
468
        myFieldDescriptions = new DbaseFieldDescriptor[myNumFields];
469
        int fieldOffset = 0;
418
      // read the field data address, offset from the start of the record.
419
      myFieldDescriptions[i].setOffsetInRecord(in.getInt());
470 420

  
471
        List fieldNames = new ArrayList<String>();
421
      // read the field length in bytes
422
      int tempLength = in.get();
472 423

  
473
        // FIXME: should field names be always read using ISO8859-1??
474
        for (int i = 0; i < myNumFields; i++) {
475
            myFieldDescriptions[i] = new DbaseFieldDescriptor();
424
      if (tempLength < 0) {
425
        tempLength = tempLength + 256;
426
      }
476 427

  
477
            // read the field name
478
            byte[] buffer = new byte[11];
479
            in.get(buffer);
480
            String fieldName;
481
            fieldName = new String(buffer, headerCharset);
428
      myFieldDescriptions[i].setSize(tempLength);
482 429

  
483
            if(allowDuplicatedFieldNames){
484
                fieldName = getUniqueFieldName(fieldName, fieldNames);
485
            }
486
            fieldNames.add(fieldName);
430
      // read the field decimal count in bytes
431
      myFieldDescriptions[i].setScale(in.get());
487 432

  
488
            myFieldDescriptions[i].myFieldName = fieldName;
433
      myFieldDescriptions[i].setOffsetInRecord(fieldOffset);
434
      
435
      myFieldDescriptions[i].calculateScaleAndPrecission();
436
      
437
      fieldOffset += tempLength;
489 438

  
490
            myFieldDescriptions[i].myFieldName_trim = myFieldDescriptions[i].myFieldName
491
					.trim();
439
      // read the reserved bytes.
440
      in.position(in.position() + 14);
492 441

  
493
            // read the field type
494
            myFieldDescriptions[i].myFieldType = (char) in.get();
442
    }
495 443

  
496
            // read the field data address, offset from the start of the record.
497
            myFieldDescriptions[i].myFieldDataAddress = in.getInt();
444
    // Last byte is a marker for the end of the field definitions.
445
    in.get();
446
  }
498 447

  
499
            // read the field length in bytes
500
            int tempLength = in.get();
448
  /**
449
   * Set the number of records in the file
450
   *
451
   * @param inNumRecords
452
   */
453
  public void setNumRecords(int inNumRecords) {
454
    myNumRecords = inNumRecords;
455
  }
501 456

  
502
            if (tempLength < 0) {
503
                tempLength = tempLength + 256;
504
            }
457
  /**
458
   * Returns the value of the unsigned byte as a short Bytes are always signed
459
   * in Java, so if we are reading a C unsigned byte with value > 128, it will
460
   * appear as a negative value.
461
   *
462
   * In this case, we need to get the original unsigned value and return it as
463
   * short or int, as byte will never correctly store the value in Java.
464
   *
465
   * @return
466
   */
467
  private int byteAsUnsigned(byte b) {
468
    int i;
469
    if (b < 0) {
470
      i = b & 0xFF;
471
    } else {
472
      i = b;
473
    }
474
    return i;
475
  }
505 476

  
506
            myFieldDescriptions[i].myFieldLength = tempLength;
477
  /**
478
   * Gets the Language driver IDs (code page) defined on the file header (or
479
   * guessed from the provided charset)
480
   *
481
   * Some examples: 01h	DOS USA	code page 437 02h	DOS Multilingual code page 850
482
   * 03h	Windows ANSI code page 1252 04h	Standard Macintosh 64h	EE MS-DOS code
483
   * page 852 65h	Nordic MS-DOS code page 865 66h	Russian MS-DOS code page 866
484
   * 67h	Icelandic MS-DOS 68h	Kamenicky (Czech) MS-DOS 69h	Mazovia (Polish)
485
   * MS-DOS 6Ah	Greek MS-DOS (437G) 6Bh	Turkish MS-DOS 96h	Russian Macintosh 97h
486
   * Eastern European Macintosh 98h	Greek Macintosh C8h	Windows EE	code page
487
   * 1250 C9h	Russian Windows CAh	Turkish Windows CBh	Greek Windows
488
   *
489
   * See the java equivalences in
490
   * {@link DbaseCodepage#dbfLdid} & {@link DbaseCodepage#ldidJava} objects.
491
   *
492
   * See some others here:
493
   * https://github.com/infused/dbf/blob/master/docs/supported_encodings.csv
494
   *
495
   * @return
496
   */
497
  public int getLanguageID() {
507 498

  
508
            // read the field decimal count in bytes
509
            myFieldDescriptions[i].myDecimalCount = in.get();
499
    return myLanguageID;
500
  }
510 501

  
511
            // NUEVO: Calculamos los offsets aqu? para no
512
            // tener que recalcular cada vez que nos piden
513
            // algo.
514
            myFieldDescriptions[i].myFieldDataAddress = fieldOffset;
515
            fieldOffset += tempLength;
516
            // Fin NUEVO
517
            // read the reserved bytes.
518
            in.position(in.position() + 14);
519

  
520
        }
521

  
522
        // Last byte is a marker for the end of the field definitions.
523
        in.get();
502
  /**
503
   * Write the header data to the DBF file.
504
   *
505
   * @param out A channel to write to. If you have an OutputStream you can
506
   * obtain the correct channel by using
507
   * java.nio.Channels.newChannel(OutputStream out).
508
   *
509
   * @throws IOException If errors occur.
510
   */
511
  public void write(FileChannel out) throws IOException {
512
    // take care of the annoying case where no records have been added...
513
    if (myHeaderLength <= 0) {
514
      myHeaderLength = MINIMUM_HEADER;
524 515
    }
525 516

  
526
    /**
527
     * Set the number of records in the file
528
     *
529
     * @param inNumRecords DOCUMENT ME!
530
     */
531
    public void setNumRecords(int inNumRecords) {
532
        myNumRecords = inNumRecords;
533
    }
534
    
535
    /**
536
     * Returns the value of the unsigned byte as a short
537
     * Bytes are always signed in Java, so if we are reading a C unsigned byte
538
     * with value > 128, it will appear as a negative value.
539
     * 
540
     * In this case, we need to get the original unsigned value and return it as
541
     * short or int, as byte will never correctly store the value in Java. 
542
     * 
543
     * @return
544
     */
545
    private int byteAsUnsigned(byte b) {
546
    	int i;
547
    	if (b<0) {
548
    		i = b & 0xFF;
549
    	}
550
    	else {
551
    		i = b;
552
    	}
553
    	return i;
554
    }
517
    // Desde el principio
518
    out.position(0);
555 519

  
556
    /**
557
     * Class for holding the information assicated with a record.
558
     */
559
    public class DbaseFieldDescriptor {
560
        // Field Name
561
        String myFieldName;
520
    ByteBuffer buffer = ByteBuffer.allocateDirect(myHeaderLength);
521
    buffer.order(ByteOrder.LITTLE_ENDIAN);
562 522

  
563
        String myFieldName_trim;
523
    // write the output file type.
524
    buffer.put(MAGIC);
564 525

  
565
        // Field Type (C N L D F or M)
566
        char myFieldType;
526
    // write the date stuff
527
    Calendar c = Calendar.getInstance();
528
    c.setTime(new Date());
529
    buffer.put((byte) (c.get(Calendar.YEAR) % 100));
530
    buffer.put((byte) (c.get(Calendar.MONTH) + 1));
531
    buffer.put((byte) (c.get(Calendar.DAY_OF_MONTH)));
567 532

  
568
        // Field Data Address offset from the start of the record.
569
        int myFieldDataAddress;
533
    // write the number of records in the datafile.
534
    buffer.putInt(myNumRecords);
570 535

  
571
        // Length of the data in bytes
572
        int myFieldLength;
536
    // write the length of the header structure.
537
    buffer.putShort((short) myHeaderLength);
573 538

  
574
        // Field decimal count in Binary, indicating where the decimal is
575
        int myDecimalCount;
576
    }
539
    // write the length of a record
540
    buffer.putShort((short) myRecordLength);
577 541

  
578
	/**
579
	 * Gets the Language driver IDs (code page) defined on the file header (or guessed
580
	 * from the provided charset)
581
	 * 
582
	 * Some examples:
583
	 * 	01h		DOS USA	code page 437
584
		02h		DOS Multilingual code page 850
585
		03h		Windows ANSI code page 1252
586
		04h		Standard Macintosh
587
		64h		EE MS-DOS code page 852
588
		65h		Nordic MS-DOS code page 865
589
		66h		Russian MS-DOS code page 866
590
		67h		Icelandic MS-DOS
591
		68h		Kamenicky (Czech) MS-DOS
592
		69h		Mazovia (Polish) MS-DOS
593
		6Ah		Greek MS-DOS (437G)
594
		6Bh		Turkish MS-DOS
595
		96h		Russian Macintosh
596
		97h		Eastern European Macintosh
597
		98h		Greek Macintosh
598
		C8h		Windows EE	code page 1250
599
		C9h		Russian Windows
600
		CAh		Turkish Windows
601
		CBh		Greek Windows
602
		
603
		See the java equivalences in {@link DbaseCodepage#dbfLdid} & {@link DbaseCodepage#ldidJava} objects.
604
		 
605
		See some others here: https://github.com/infused/dbf/blob/master/docs/supported_encodings.csv
606
	 * @return
607
	 */
608
	public int getLanguageID() {
609
		
610
		return myLanguageID;
611
	}
542
    // write the reserved bytes in the header
543
    ((Buffer) buffer).position(((Buffer) buffer).position() + 17);
612 544

  
545
    // write the language id
546
    buffer.put((byte) getLanguageID());
613 547

  
548
    // write the reserved bytes in the header		
549
    ((Buffer) buffer).position(((Buffer) buffer).position() + 2);
614 550

  
615
	public static DbaseFileHeader createDbaseHeader(FeatureType featureType)
616
			throws AttributeFeatureTypeNotSuportedException {
617
		return createDbaseHeader(featureType, null);
618
	}
619
	
620
	public static DbaseFileHeader createDbaseHeader(FeatureType featureType, String charsetName)
621
			throws AttributeFeatureTypeNotSuportedException {
622
		DbaseFileHeader header = new DbaseFileHeader();
623
		Iterator iterator=featureType.iterator();
624
		header.myLanguageID = DbaseCodepage.getLdid(charsetName);
625
		header.charset = charsetName;
626
		while (iterator.hasNext()) {
627
			FeatureAttributeDescriptor descriptor = (FeatureAttributeDescriptor) iterator.next();
551
    // write all of the header records
552
    int tempOffset = 0;
628 553

  
629
                        if(descriptor.isComputed()){
630
                            continue;
631
                        }
632
			int type = descriptor.getType();
633
			String colName = descriptor.getName();
554
    if (myFieldDescriptions != null) {
555
      for (int i = 0; i < myFieldDescriptions.length; i++) {
556
        // write the field name
557
        for (int j = 0; j < DbaseFile.MAX_FIELD_NAME_LENGTH + 1; j++) {
558
          if (myFieldDescriptions[i].getName().length() > j) {
559
            buffer.put((byte) myFieldDescriptions[i].getName().charAt(j));
560
          } else {
561
            buffer.put((byte) 0);
562
          }
563
        }
634 564

  
635
			int fieldLen = descriptor.getSize();
636
			int decimales = descriptor.getPrecision();
637
                        switch (type) {
638
                            case DataTypes.DOUBLE:
639
                            case DataTypes.FLOAT:
640
                                if( decimales<1 ) {
641
                                    decimales=1;
642
                                }
643
                                if( fieldLen < 1 || fieldLen>18 ) {
644
                                    fieldLen = 18 - decimales;
645
                                    if( fieldLen<1 ) {
646
                                        fieldLen = 10;
647
                                        decimales = 8;
648
                                    }
649
                                }
650
                                header.addColumn(colName, 'N', fieldLen, decimales);
651
                                break;
652
                            case DataTypes.INT:
653
                            case DataTypes.LONG:
654
                                if( fieldLen < 1 || fieldLen>18 ) {
655
                                    fieldLen = 18;
656
                                }
657
                                header.addColumn(colName, 'N', fieldLen, 0);
658
                                break;
659
                            case DataTypes.DATE:
660
                                header.addColumn(colName, 'D', fieldLen, 0);
661
                                break;
662
                            case DataTypes.BOOLEAN:
663
                                header.addColumn(colName, 'L', 1, 0);
664
                                break;
665
                            case DataTypes.STRING:
666
                                header.addColumn(colName, 'C', Math.min(254, fieldLen), 0);
667
                                break;
668
                            default:
669
                                // Si no sabemos lo que es intentaremos guardarlo como un string
670
                                header.addColumn(colName, 'C', Math.min(254, fieldLen<10?10:fieldLen), 0);
671
                                break;
672
                        }
565
        // write the field type
566
        buffer.put((byte) myFieldDescriptions[i].getType());
673 567

  
674
		}
675
		return header;
676
	}
677
	
678
	/**
679
	 * Write the header data to the DBF file.
680
	 *
681
	 * @param out
682
	 *            A channel to write to. If you have an OutputStream you can
683
	 *            obtain the correct channel by using
684
	 *            java.nio.Channels.newChannel(OutputStream out).
685
	 *
686
	 * @throws IOException
687
	 *             If errors occur.
688
	 */
689
	public void writeHeader(FileChannel out) throws IOException {
690
		// take care of the annoying case where no records have been added...
691
		if (myHeaderLength <= 0) {
692
			myHeaderLength = MINIMUM_HEADER;
693
		}
568
        // // write the field data address, offset from the start of the
569
        // record.
570
        buffer.putInt(tempOffset);
571
        tempOffset += myFieldDescriptions[i].getSize();
694 572

  
695
		// Desde el principio
696
		out.position(0);
573
        // write the length of the field.
574
        buffer.put((byte) myFieldDescriptions[i].getSize());
697 575

  
698
		ByteBuffer buffer = ByteBuffer.allocateDirect(myHeaderLength);
699
		buffer.order(ByteOrder.LITTLE_ENDIAN);
576
        // write the decimal count.
577
        buffer.put((byte) myFieldDescriptions[i].getScale());
700 578

  
701
		// write the output file type.
702
		buffer.put(MAGIC);
579
        // write the reserved bytes.
580
        // for (in j=0; jj<14; j++) out.writeByteLE(0);
581
        ((Buffer) buffer).position(((Buffer) buffer).position() + 14);
582
      }
583
    }
584
    // write the end of the field definitions marker
585
    buffer.put((byte) 0x0D);
703 586

  
704
		// write the date stuff
705
		Calendar c = Calendar.getInstance();
706
		c.setTime(new Date());
707
		buffer.put((byte) (c.get(Calendar.YEAR) % 100));
708
		buffer.put((byte) (c.get(Calendar.MONTH) + 1));
709
		buffer.put((byte) (c.get(Calendar.DAY_OF_MONTH)));
587
    ((Buffer) buffer).position(0);
710 588

  
711
		// write the number of records in the datafile.
712
		buffer.putInt(myNumRecords);
589
    int r = buffer.remaining();
713 590

  
714
		// write the length of the header structure.
715
		buffer.putShort((short) myHeaderLength);
591
    while ((r -= out.write(buffer)) > 0) {
592
      // do nothing
593
    }
594
  }
716 595

  
717
		// write the length of a record
718
		buffer.putShort((short) myRecordLength);
596
  public String getCharsetName() {
597
    return getCharsetName(getLanguageID());
598
  }
719 599

  
720
		// write the reserved bytes in the header
721
		((Buffer)buffer).position(((Buffer)buffer).position() + 17);
722
		
723
		// write the language id
724
		buffer.put((byte)getLanguageID());
725
		
726
		// write the reserved bytes in the header		
727
		((Buffer)buffer).position(((Buffer)buffer).position() + 2);
600
  public String getCharsetName(int ldid) {
601
    if (ldid != 0) {
602
      charset = DbaseCodepage.getCharsetName(ldid);
603
    }
604
    if (charset != null) {
605
      // use charset otherwise
606
      return charset;
607
    }
608
    // default
609
    return "ISO-8859-1";
610
  }
728 611

  
729
		// write all of the header records
730
		int tempOffset = 0;
612
  public String getOriginalCharset() {
613
    return getCharsetName(this.origLanguageID);
614
  }
731 615

  
732
		if (myFieldDescriptions != null) {
733
			for (int i = 0; i < myFieldDescriptions.length; i++) {
734
				// write the field name
735
				for (int j = 0; j < DbaseFile.MAX_FIELD_NAME_LENGTH+1; j++) {
736
					if (myFieldDescriptions[i].myFieldName.length() > j) {
737
						buffer.put((byte) myFieldDescriptions[i].myFieldName.charAt(j));
738
					} else {
739
						buffer.put((byte) 0);
740
					}
741
				}
616
  public String mappingEncoding(String dbfEnconding) {
617
    if (encodingSupportedByString.contains(dbfEnconding)) {
618
      return dbfEnconding;
619
    } else {
620
      return "UTF-8";
621
    }
622
  }
742 623

  
743
				// write the field type
744
				buffer.put((byte) myFieldDescriptions[i].myFieldType);
624
  private String getUniqueFieldName(String fieldName, List fieldNames) {
745 625

  
746
				// // write the field data address, offset from the start of the
747
				// record.
748
				buffer.putInt(tempOffset);
749
				tempOffset += myFieldDescriptions[i].myFieldLength;
626
    int index = 0;
627
    String tempFieldName = fieldName;
628
    while (fieldNames.contains(tempFieldName) && index < 1000) {
629
      index++;
630
      String sufix = String.valueOf(index);
631
      tempFieldName = tempFieldName.substring(0, DbaseFile.MAX_FIELD_NAME_LENGTH - sufix.length()) + sufix;
632
    }
633
    if (index >= 1000) {
634
      throw new RuntimeException("Can't fix duplicated name for field '" + fieldName + "'.");
635
    }
636
    return tempFieldName;
637
  }
750 638

  
751
				// write the length of the field.
752
				buffer.put((byte) myFieldDescriptions[i].myFieldLength);
639
  public EditableFeatureType toFeatureType(EditableFeatureType featureType, boolean handleDatesAsStrings) throws DataException {
640
    featureType.setHasOID(true);
753 641

  
754
				// write the decimal count.
755
				buffer.put((byte) myFieldDescriptions[i].myDecimalCount);
642
    for (DbaseFieldDescriptor dbfattr : this.myFieldDescriptions) {
643
      if (featureType.get(dbfattr.getName()) != null) {
644
        throw new DuplicatedFieldNameException(dbfattr.getName());
645
      }
646
      EditableFeatureAttributeDescriptor attr;
647
      switch (dbfattr.getType()) {
648
        case DBFTYPE_BOOLEAN:
649
          attr = featureType.add(dbfattr.getName(), DataTypes.BOOLEAN);
650
          attr.setSize(0);
651
          attr.setDefaultValue(null);
652
          attr.setAllowNull(true);
653
          break;
654
        case DBFTYPE_FLOAT:
655
          if (dbfattr.getScale() > 0) {
656
            if (dbfattr.getPrecision() > FeatureAttributeDescriptor.DOUBLE_MAX_PRECISION
657
                    || dbfattr.getScale() > FeatureAttributeDescriptor.DOUBLE_MAX_PRECISION) {
658
              attr = featureType.add(dbfattr.getName(),
659
                      DataTypes.DECIMAL, dbfattr.getSize());
660
              attr.setScale(dbfattr.getScale());
661
              attr.setPrecision(dbfattr.getPrecision());
662
              attr.setDefaultValue(null);
663
              attr.setAllowNull(true);
756 664

  
757
				// write the reserved bytes.
758
				// for (in j=0; jj<14; j++) out.writeByteLE(0);
759
				((Buffer)buffer).position(((Buffer)buffer).position() + 14);
760
			}
761
		}
762
		// write the end of the field definitions marker
763
		buffer.put((byte) 0x0D);
665
            } else if (dbfattr.getPrecision() > FeatureAttributeDescriptor.FLOAT_MAX_PRECISION
666
                    || dbfattr.getScale() > FeatureAttributeDescriptor.FLOAT_MAX_PRECISION) {
667
              attr = featureType.add(dbfattr.getName(),
668
                      DataTypes.DOUBLE, dbfattr.getSize());
669
              attr.setScale(dbfattr.getScale());
670
              attr.setPrecision(dbfattr.getPrecision());
671
              attr.setDefaultValue(null);
672
              attr.setAllowNull(true);
673
            } else {
674
              attr = featureType.add(dbfattr.getName(),
675
                      DataTypes.FLOAT, dbfattr.getSize());
676
              attr.setScale(dbfattr.getScale());
677
              attr.setPrecision(dbfattr.getPrecision());
678
              attr.setDefaultValue(null);
679
              attr.setAllowNull(true);
680
            }
681
          } else {
682
            if (dbfattr.getPrecision() > FeatureAttributeDescriptor.LONG_MAX_PRECISION) {
683
              attr = featureType.add(dbfattr.getName(),
684
                      DataTypes.DECIMAL, dbfattr.getSize());
685
              attr.setPrecision(dbfattr.getPrecision());
686
              attr.setScale(dbfattr.getScale());
687
              attr.setDefaultValue(null);
688
              attr.setAllowNull(true);
689
            } else if (dbfattr.getPrecision() > FeatureAttributeDescriptor.INT_MAX_PRECISION) {
690
              attr = featureType.add(
691
                      dbfattr.getName(),
692
                      DataTypes.LONG
693
              );
694
              attr.setPrecision(dbfattr.getPrecision());
695
              attr.setScale(0);
696
              attr.setDefaultValue(null);
697
              attr.setAllowNull(true);
698
            } else if (dbfattr.getPrecision() > FeatureAttributeDescriptor.BYTE_MAX_PRECISION) {
699
              attr = featureType.add(
700
                      dbfattr.getName(),
701
                      DataTypes.INT
702
              );
703
              attr.setPrecision(dbfattr.getPrecision());
704
              attr.setScale(0);
705
              attr.setDefaultValue(null);
706
              attr.setAllowNull(true);
707
            } else {
708
              attr = featureType.add(
709
                      dbfattr.getName(),
710
                      DataTypes.BYTE
711
              );
712
              attr.setPrecision(dbfattr.getPrecision());
713
              attr.setScale(0);
714
              attr.setDefaultValue(null);
715
              attr.setAllowNull(true);
716
            }
717
          }
718
          break;
719
        case DBFTYPE_NUMBER:
720
          if (dbfattr.getScale() > 0) {
721
            attr = featureType.add(dbfattr.getName(),
722
                    DataTypes.DECIMAL, dbfattr.getSize());
723
            attr.setPrecision(dbfattr.getPrecision());
724
            attr.setScale(dbfattr.getScale());
725
            attr.setDefaultValue(null);
726
            attr.setAllowNull(true);
727
          } else {
728
            if (dbfattr.getPrecision() > FeatureAttributeDescriptor.LONG_MAX_PRECISION) {
729
              attr = featureType.add(dbfattr.getName(),
730
                      DataTypes.DECIMAL, dbfattr.getSize());
731
              attr.setPrecision(dbfattr.getPrecision());
732
              attr.setScale(dbfattr.getScale());
733
              attr.setDefaultValue(null);
734
              attr.setAllowNull(true);
735
            } else if (dbfattr.getPrecision() > FeatureAttributeDescriptor.INT_MAX_PRECISION) {
736
              attr = featureType.add(
737
                      dbfattr.getName(),
738
                      DataTypes.LONG
739
              );
740
              attr.setPrecision(dbfattr.getPrecision());
741
              attr.setScale(0);
742
              attr.setDefaultValue(null);
743
              attr.setAllowNull(true);
744
            } else if (dbfattr.getPrecision() > FeatureAttributeDescriptor.BYTE_MAX_PRECISION) {
745
              attr = featureType.add(
746
                      dbfattr.getName(),
747
                      DataTypes.INT
748
              );
749
              attr.setPrecision(dbfattr.getPrecision());
750
              attr.setScale(0);
751
              attr.setDefaultValue(null);
752
              attr.setAllowNull(true);
753
            } else {
754
              attr = featureType.add(
755
                      dbfattr.getName(),
756
                      DataTypes.BYTE
757
              );
758
              attr.setPrecision(dbfattr.getPrecision());
759
              attr.setScale(0);
760
              attr.setDefaultValue(null);
761
              attr.setAllowNull(true);
762
            }
763
          }
764
          break;
765
        case DBFTYPE_STRING:
766
          attr = featureType.add(dbfattr.getName(),
767
                  DataTypes.STRING, dbfattr.getSize());
768
          attr.setDefaultValue(null);
769
          attr.setAllowNull(true);
770
          if (dbfattr.getSize() == TIMESTAMP_SIZE) {
771
            try {
772
              DataType dataType = ToolsLocator.getDataTypesManager().get(DataTypes.STRING).clone();
773
              dataType.addCoercion(new CoerceDateToDbfTimestampAsString());
774
              attr.setDataType(dataType);
775
            } catch (Exception ex) {
776
              // Do nothing
777
            }
778
          } else if (dbfattr.getSize() == TIME_SIZE) {
779
            try {
780
              DataType dataType = ToolsLocator.getDataTypesManager().get(DataTypes.STRING).clone();
781
              dataType.addCoercion(new CoerceDateToDbfTimeAsString());
782
              attr.setDataType(dataType);
783
            } catch (Exception ex) {
784
              // Do nothing
785
            }
786
          }
787
          break;
788
        case DBFTYPE_DATE:
789
          if (handleDatesAsStrings) {
790
            attr = featureType.add(dbfattr.getName(),
791
                    DataTypes.STRING, dbfattr.getSize());
792
            attr.setDefaultValue(null);
793
            attr.setAllowNull(true);
794
          } else {
795
            attr = featureType.add(dbfattr.getName(),
796
                    DataTypes.DATE
797
            );
798
            attr.setDefaultValue(null);
799
            attr.setAllowNull(true);
800
          }
801
          break;
802
        default:
803
          throw new UnknownDataTypeException(
804
                  dbfattr.getName(), String.valueOf(dbfattr.getType()),
805
                  DBFStoreProvider.NAME
806
          );
807
      }
808
    }
809
    return featureType;
810
  }
764 811

  
765
		((Buffer)buffer).position(0);
812
  public static DbaseFileHeader fromFeatureType(FeatureType featureType)
813
          throws DataException {
814
    return fromFeatureType(featureType, null);
815
  }
766 816

  
767
		int r = buffer.remaining();
817
  public static DbaseFileHeader fromFeatureType(FeatureType featureType, String charsetName)
818
          throws DataException {
819
    DbaseFileHeader header = new DbaseFileHeader();
820
    Iterator iterator = featureType.iterator();
821
    header.myLanguageID = DbaseCodepage.getLdid(charsetName);
822
    header.charset = charsetName;
823
    while (iterator.hasNext()) {
824
      FeatureAttributeDescriptor descriptor = (FeatureAttributeDescriptor) iterator.next();
768 825

  
769
		while ((r -= out.write(buffer)) > 0) {
770
			; // do nothing
771
		}
772
	}
826
      if (descriptor.isComputed()) {
827
        continue;
828
      }
829
      int type = descriptor.getType();
830
      String colName = descriptor.getName();
773 831

  
774
	public String getCharsetName() {
775
		return getCharsetName(getLanguageID());
776
	}
777
	
778
	public String getCharsetName(int ldid) {
779
		if (ldid!=0) {
780
            charset = DbaseCodepage.getCharsetName(ldid);
781
		}
782
		if (charset!=null) {
783
			// use charset otherwise
784
			return charset;
785
		}
786
		// default
787
		return "ISO-8859-1";
788
	}
789
	
790
	public String getOriginalCharset() {
791
		return getCharsetName(this.origLanguageID);
792
	}
832
      int size = descriptor.getSize();
833
      int scale = descriptor.getScale();
834
      int precision = descriptor.getPrecision();
835
      switch (type) {
836
        case DataTypes.DECIMAL:
837
          header.addColumn(colName, 'N', 0, precision, scale);
838
          break;
839
        case DataTypes.DOUBLE:
840
        case DataTypes.FLOAT:
841
          header.addColumn(colName, 'F', 0, precision, scale);
842
          break;
843
        case DataTypes.INT:
844
          header.addColumn(colName, 'N', 0, precision, scale);
845
          break;
846
        case DataTypes.LONG:
847
          header.addColumn(colName, 'N', 0, precision, scale);
848
          break;
849
        case DataTypes.DATE:
850
          header.addColumn(colName, 'D', FieldFormatter.DATE_SIZE, 0, 0);
851
          break;
852
        case DataTypes.TIME:
853
          header.addColumn(colName, 'C', FieldFormatter.TIME_SIZE, 0, 0);
854
          break;
855
        case DataTypes.TIMESTAMP:
856
          header.addColumn(colName, 'C', TIMESTAMP_SIZE, 0, 0);
857
          break;
858
        case DataTypes.BOOLEAN:
859
          header.addColumn(colName, 'L', 1, 0, 0);
860
          break;
861
        case DataTypes.STRING:
862
          header.addColumn(colName, 'C', Math.min(254, size), 0, 0);
863
          break;
864
        case DataTypes.BYTE:
865
          header.addColumn(colName, 'N', 0, precision, scale);
866
          break;
867
        default:
868
          // Si no sabemos lo que es intentaremos guardarlo como un string
869
          header.addColumn(colName, 'C', Math.min(254, size < 10 ? 10 : size), 0, 0);
870
          break;
871
      }
793 872

  
794
	public String mappingEncoding(String dbfEnconding) {
795
		if(encodingSupportedByString.contains(dbfEnconding))
796
			return dbfEnconding;
797
		else
798
			return "UTF-8";
799
	}
800

  
801
    private String getUniqueFieldName(String fieldName, List fieldNames) {
802

  
803
        int index = 0;
804
        String tempFieldName = fieldName;
805
        while(fieldNames.contains(tempFieldName) && index<1000){
806
            index++;
807
            String sufix = String.valueOf(index);
808
            tempFieldName = tempFieldName.substring(0, DbaseFile.MAX_FIELD_NAME_LENGTH-sufix.length())+sufix;
809
        }
810
        if(index>=1000){
811
            throw new RuntimeException("Can't fix duplicated name for field '"+fieldName+"'.");
812
        }
813
        return tempFieldName;
814 873
    }
815

  
874
    return header;
875
  }
816 876
}

Also available in: Unified diff