Statistics
| Revision:

svn-gvsig-desktop / trunk / org.gvsig.desktop / org.gvsig.desktop.compat.cdc / org.gvsig.fmap.dal / org.gvsig.fmap.dal.file / org.gvsig.fmap.dal.file.dbf / src / main / java / org / gvsig / fmap / dal / store / dbf / utils / DbaseFileHeader.java @ 44669

History | View | Annotate | Download (28.6 KB)

1
/**
2
 * gvSIG. Desktop Geographic Information System.
3
 *
4
 * Copyright (C) 2007-2013 gvSIG Association.
5
 *
6
 * This program is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU General Public License
8
 * as published by the Free Software Foundation; either version 3
9
 * of the License, or (at your option) any later version.
10
 *
11
 * This program is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
 * GNU General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU General Public License
17
 * along with this program; if not, write to the Free Software
18
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
19
 * MA  02110-1301, USA.
20
 *
21
 * For any additional information, do not hesitate to contact us
22
 * at info AT gvsig.com, or visit our website www.gvsig.com.
23
 */
24
package org.gvsig.fmap.dal.store.dbf.utils;
25

    
26
import java.io.IOException;
27
import java.io.UnsupportedEncodingException;
28
import java.nio.Buffer;
29
import java.nio.ByteBuffer;
30
import java.nio.ByteOrder;
31
import java.nio.channels.FileChannel;
32
import java.nio.charset.Charset;
33
import java.util.ArrayList;
34
import java.util.Calendar;
35
import java.util.Date;
36
import java.util.Iterator;
37
import java.util.List;
38
import java.util.Set;
39
import java.util.SortedMap;
40

    
41
import org.gvsig.fmap.dal.DataTypes;
42
import org.gvsig.fmap.dal.exception.DataException;
43
import org.gvsig.fmap.dal.exception.UnsupportedVersionException;
44
import org.gvsig.fmap.dal.feature.EditableFeatureAttributeDescriptor;
45
import org.gvsig.fmap.dal.feature.EditableFeatureType;
46
import org.gvsig.fmap.dal.feature.FeatureAttributeDescriptor;
47
import org.gvsig.fmap.dal.feature.FeatureType;
48
import org.gvsig.fmap.dal.feature.exception.AttributeFeatureTypeNotSuportedException;
49
import org.gvsig.fmap.dal.feature.exception.UnknownDataTypeException;
50
import org.gvsig.fmap.dal.store.dbf.DBFStoreProvider;
51
import org.gvsig.fmap.dal.store.dbf.DuplicatedFieldNameException;
52
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_BOOLEAN;
53
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_DATE;
54
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_FLOAT;
55
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_NUMBER;
56
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_STRING;
57
import org.gvsig.tools.ToolsLocator;
58
import org.gvsig.tools.dataTypes.DataType;
59
import org.gvsig.utils.bigfile.BigByteBuffer2;
60
import org.slf4j.Logger;
61
import org.slf4j.LoggerFactory;
62
import static org.gvsig.fmap.dal.store.dbf.utils.FieldFormatter.TIMESTAMP_SIZE;
63
import static org.gvsig.fmap.dal.store.dbf.utils.FieldFormatter.TIME_SIZE;
64

    
65
/**
66
 * Class to represent the header of a Dbase III file.
67
 */
68
@SuppressWarnings("UseSpecificCatch")
69
public class DbaseFileHeader {
70

    
71
  private static final Logger LOGGER = LoggerFactory.getLogger(DbaseFileHeader.class);
72

    
73
  // Constant for the size of a record
74
  private final int FILE_DESCRIPTOR_SIZE = 32;
75

    
76
  // type of the file, must be 03h
77
  private static final byte MAGIC = 0x03;
78

    
79
  private static final int MINIMUM_HEADER = 33;
80

    
81
  // type of the file, must be 03h
82
  private int myFileType = 0x03;
83

    
84
  // Date the file was last updated.
85
  private Date myUpdateDate = new Date();
86

    
87
  // Number of records in the datafile
88
  private int myNumRecords = 0;
89

    
90
  // Length of the header structure
91
  private int myHeaderLength;
92

    
93
  /**
94
   * Length of the records. Set to 1 as the default value as if there is not any
95
   * defined column, at least the deleted status initial byte is taken into
96
   * account.
97
   */
98
  private int myRecordLength = 1;
99

    
100
  // Number of fields in the record.
101
  private int myNumFields;
102

    
103
  // collection of header records.
104
  private DbaseFieldDescriptor[] myFieldDescriptions;
105

    
106
  private int myLanguageID = 0x00;
107
  /**
108
   * Only considered when myLanguageID = 0x00;
109
   */
110
  private String charset = null;
111

    
112
  private List<String> encodingSupportedByString = null;
113

    
114
  private int origLanguageID = 0x00;
115

    
116
  /**
117
   * Headers must always be encoded using ASCII/ISO-8859-1, regardless the
118
   * encoding of the records
119
   */
120
  private static final Charset HEADER_CHARSET = Charset.forName("ISO-8859-1");
121

    
122
  /**
123
   * DbaseFileHreader constructor comment.
124
   */
125
  public DbaseFileHeader() {
126
    super();
127

    
128
    encodingSupportedByString = new ArrayList<>();
129
    SortedMap<String, Charset> m = Charset.availableCharsets();
130
    Set<String> k = m.keySet();
131
    Iterator<String> it = k.iterator();
132
    while (it.hasNext()) {
133
      encodingSupportedByString.add(it.next());
134
    }
135
  }
136

    
137
  /**
138
   * Add a column to this DbaseFileHeader.The type is one of (C N L or D)
139
 character, number, logical(true/false), or date. The Field length is the
140
 total length in bytes reserved for this column. The decimal count only
141
 applies to numbers(N), and floating point values (F), and refers to the
142
 number of characters to reserve after the decimal point.
143
   *
144
   * @param fieldName DOCUMENT ME!
145
   * @param fieldType DOCUMENT ME!
146
   * @param fieldSize DOCUMENT ME!
147
   * @param fieldPrecision
148
   * @param fieldScale DOCUMENT ME!
149
   * @throws
150
   * org.gvsig.fmap.dal.feature.exception.AttributeFeatureTypeNotSuportedException
151
   *
152
   */
153
  public void addColumn(String fieldName, char fieldType,
154
          int fieldSize, int fieldPrecision, int fieldScale)
155
          throws AttributeFeatureTypeNotSuportedException {
156
//    if (fieldSize <= 0) {
157
//      fieldSize = 1;
158
//    }
159

    
160
    if (myFieldDescriptions == null) {
161
      myFieldDescriptions = new DbaseFieldDescriptor[0];
162
    }
163

    
164
    int tempLength = 1; // the length is used for the offset, and there is a * for deleted as the first byte
165
    DbaseFieldDescriptor[] tempFieldDescriptors = new DbaseFieldDescriptor[myFieldDescriptions.length
166
            + 1];
167

    
168
    for (int i = 0; i < myFieldDescriptions.length; i++) {
169
      myFieldDescriptions[i].setOffsetInRecord(tempLength);
170
      tempLength = tempLength + myFieldDescriptions[i].getSize();
171
      tempFieldDescriptors[i] = myFieldDescriptions[i];
172
    }
173

    
174
    tempFieldDescriptors[myFieldDescriptions.length] = new DbaseFieldDescriptor();
175
    tempFieldDescriptors[myFieldDescriptions.length].setType(fieldType);
176
    tempFieldDescriptors[myFieldDescriptions.length].setSize(fieldSize);
177
    tempFieldDescriptors[myFieldDescriptions.length].setPrecision(fieldPrecision);
178
    tempFieldDescriptors[myFieldDescriptions.length].setScale(fieldScale);
179
    tempFieldDescriptors[myFieldDescriptions.length].setOffsetInRecord(tempLength);
180

    
181
    // set the field name
182
    String tempFieldName = fieldName;
183

    
184
    if (tempFieldName == null) {
185
      tempFieldName = "NoName";
186
    }
187

    
188
    if (tempFieldName.length() > DbaseFile.MAX_FIELD_NAME_LENGTH) {
189
      tempFieldName = tempFieldName.substring(0, DbaseFile.MAX_FIELD_NAME_LENGTH);
190
      warn("FieldName " + fieldName
191
              + " is longer than " + DbaseFile.MAX_FIELD_NAME_LENGTH + " characters, truncating to "
192
              + tempFieldName);
193
    }
194

    
195
    tempFieldDescriptors[myFieldDescriptions.length].setName(tempFieldName);
196
    tempFieldDescriptors[myFieldDescriptions.length].setName_trim(tempFieldName.trim());
197

    
198
    // the length of a record
199
    tempLength += tempFieldDescriptors[myFieldDescriptions.length].getSize();
200

    
201
    // set the new fields.
202
    myFieldDescriptions = tempFieldDescriptors;
203
    myHeaderLength = 33 + (32 * myFieldDescriptions.length);
204
    myNumFields = myFieldDescriptions.length;
205
    myRecordLength = tempLength;
206
  }
207

    
208
  /**
209
   * Remove a column from this DbaseFileHeader.
210
   *
211
   * @param inFieldName
212
   *
213
   * @return index of the removed column, -1 if no found
214
   */
215
  public int removeColumn(String inFieldName) {
216
    int retCol = -1;
217
    int tempLength = 1;
218
    DbaseFieldDescriptor[] tempFieldDescriptors = new DbaseFieldDescriptor[myFieldDescriptions.length
219
            - 1];
220

    
221
    for (int i = 0, j = 0; i < myFieldDescriptions.length; i++) {
222
      if (!inFieldName.equalsIgnoreCase(
223
              myFieldDescriptions[i].getName().trim())) {
224
        // if this is the last field and we still haven't found the
225
        // named field
226
        if ((i == j) && (i == (myFieldDescriptions.length - 1))) {
227
//                    System.err.println("Could not find a field named '" +
228
//                        inFieldName + "' for removal");
229

    
230
          return retCol;
231
        }
232

    
233
        tempFieldDescriptors[j] = myFieldDescriptions[i];
234
        tempFieldDescriptors[j].setOffsetInRecord(tempLength);
235
        tempLength += tempFieldDescriptors[j].getSize();
236

    
237
        // only increment j on non-matching fields
238
        j++;
239
      } else {
240
        retCol = i;
241
      }
242
    }
243

    
244
    // set the new fields.
245
    myFieldDescriptions = tempFieldDescriptors;
246
    myHeaderLength = 33 + (32 * myFieldDescriptions.length);
247
    myNumFields = myFieldDescriptions.length;
248
    myRecordLength = tempLength;
249

    
250
    return retCol;
251
  }
252

    
253
  private void warn(String inWarn) {
254
    LOGGER.warn(inWarn);
255
  }
256

    
257
  /**
258
   * Return the Field Descriptor for the given field.
259
   *
260
   * @param index, the index of the requeted field description
261
   * @return the dbase field descriptor.
262
   */
263
  public DbaseFieldDescriptor getFieldDescription(int index) {
264
    return myFieldDescriptions[index];
265
  }
266

    
267
  public DbaseFieldDescriptor getFieldDescription(String name) {
268
    int index = this.getFieldIndex(name);
269
    return myFieldDescriptions[index];
270
  }
271

    
272
  public int getFieldIndex(String name) {
273
    for (int i = 0; i < myFieldDescriptions.length; i++) {
274
      if (myFieldDescriptions[i].getName_trim()
275
              .equalsIgnoreCase(name)) {
276
        return i;
277
      }
278
    }
279
    return -1;
280
  }
281

    
282
  /**
283
   * Return the date this file was last updated.
284
   *
285
   * @return
286
   */
287
  public Date getLastUpdateDate() {
288
    return myUpdateDate;
289
  }
290

    
291
  /**
292
   * Return the number of fields in the records.
293
   *
294
   * @return
295
   */
296
  public int getNumFields() {
297
    return myNumFields;
298
  }
299

    
300
  /**
301
   * Return the number of records in the file
302
   *
303
   * @return
304
   */
305
  public int getNumRecords() {
306
    return myNumRecords;
307
  }
308

    
309
  /**
310
   * Return the length of the records in bytes.
311
   *
312
   * @return
313
   */
314
  public int getRecordLength() {
315
    return myRecordLength;
316
  }
317

    
318
  /**
319
   * Return the length of the header
320
   *
321
   * @return
322
   */
323
  public int getHeaderLength() {
324
    return myHeaderLength;
325
  }
326

    
327
  /**
328
   * Read the header data from the DBF file.
329
   *
330
   * @param in
331
   * @param charsName
332
   * @param allowDuplicatedFieldNames
333
   * @throws UnsupportedVersionException
334
   * @throws AttributeFeatureTypeNotSuportedException
335
   *
336
   */
337
  public void read(BigByteBuffer2 in, String charsName, boolean allowDuplicatedFieldNames)
338
          throws UnsupportedVersionException, 
339
            AttributeFeatureTypeNotSuportedException {
340
    // type of file.
341
    myFileType = in.get();
342

    
343
    if (myFileType != 0x03) {
344
      throw new UnsupportedVersionException("DBF", Integer.toHexString(myFileType));
345
    }
346

    
347
    // parse the update date information.
348
    int tempUpdateYear = in.get();
349
    int tempUpdateMonth = in.get();
350
    int tempUpdateDay = in.get();
351
    tempUpdateYear = tempUpdateYear + 1900;
352

    
353
    Calendar c = Calendar.getInstance();
354
    c.set(Calendar.YEAR, tempUpdateYear);
355
    c.set(Calendar.MONTH, tempUpdateMonth - 1);
356
    c.set(Calendar.DATE, tempUpdateDay);
357
    myUpdateDate = c.getTime();
358

    
359
    // read the number of records.
360
    in.order(ByteOrder.LITTLE_ENDIAN);
361
    myNumRecords = in.getInt();
362

    
363
    // read the length of the header structure.
364
    myHeaderLength = in.getShort();
365

    
366
    // read the length of a record
367
    myRecordLength = in.getShort(); //posicon 0h
368

    
369
    in.order(ByteOrder.BIG_ENDIAN);
370

    
371
    // read the language bit (LDID) 
372
    in.position(29);
373
    origLanguageID = byteAsUnsigned(in.get());
374
    if (charsName != null) {
375
      // ignore the language bit, use the provided charset name
376
      myLanguageID = DbaseCodepage.getLdid(charsName);
377
      this.charset = charsName;
378
    } else {
379
      // use the read the language bit
380
      myLanguageID = origLanguageID;
381
      charsName = getCharsetName();
382
    }
383

    
384
    // Posicionamos para empezar a leer los campos.
385
    in.position(32);
386

    
387
    // calculate the number of Fields in the header
388
    myNumFields = (myHeaderLength - FILE_DESCRIPTOR_SIZE - 1) / FILE_DESCRIPTOR_SIZE;
389

    
390
    // read all of the header records
391
    myFieldDescriptions = new DbaseFieldDescriptor[myNumFields];
392
    int fieldOffset = 0;
393

    
394
    List fieldNames = new ArrayList<>();
395

    
396
    // FIXME: should field names be always read using ISO8859-1??
397
    for (int i = 0; i < myNumFields; i++) {
398
      myFieldDescriptions[i] = new DbaseFieldDescriptor();
399

    
400
      // read the field name
401
      byte[] buffer = new byte[11];
402
      in.get(buffer);
403
      String fieldName;
404
      fieldName = new String(buffer, HEADER_CHARSET);
405

    
406
      if (allowDuplicatedFieldNames) {
407
        fieldName = getUniqueFieldName(fieldName, fieldNames);
408
      }
409
      fieldNames.add(fieldName);
410

    
411
      myFieldDescriptions[i].setName(fieldName);
412

    
413
      myFieldDescriptions[i].setName_trim(myFieldDescriptions[i].getName().trim());
414

    
415
      // read the field type
416
      myFieldDescriptions[i].setType((char) in.get());
417

    
418
      // read the field data address, offset from the start of the record.
419
      myFieldDescriptions[i].setOffsetInRecord(in.getInt());
420

    
421
      // read the field length in bytes
422
      int tempLength = in.get();
423

    
424
      if (tempLength < 0) {
425
        tempLength = tempLength + 256;
426
      }
427

    
428
      myFieldDescriptions[i].setSize(tempLength);
429

    
430
      // read the field decimal count in bytes
431
      myFieldDescriptions[i].setScale(in.get());
432

    
433
      myFieldDescriptions[i].setOffsetInRecord(fieldOffset);
434
      
435
      myFieldDescriptions[i].calculateScaleAndPrecission();
436
      
437
      fieldOffset += tempLength;
438

    
439
      // read the reserved bytes.
440
      in.position(in.position() + 14);
441

    
442
    }
443

    
444
    // Last byte is a marker for the end of the field definitions.
445
    in.get();
446
  }
447

    
448
  /**
449
   * Set the number of records in the file
450
   *
451
   * @param inNumRecords
452
   */
453
  public void setNumRecords(int inNumRecords) {
454
    myNumRecords = inNumRecords;
455
  }
456

    
457
  /**
458
   * Returns the value of the unsigned byte as a short Bytes are always signed
459
   * in Java, so if we are reading a C unsigned byte with value > 128, it will
460
   * appear as a negative value.
461
   *
462
   * In this case, we need to get the original unsigned value and return it as
463
   * short or int, as byte will never correctly store the value in Java.
464
   *
465
   * @return
466
   */
467
  private int byteAsUnsigned(byte b) {
468
    int i;
469
    if (b < 0) {
470
      i = b & 0xFF;
471
    } else {
472
      i = b;
473
    }
474
    return i;
475
  }
476

    
477
  /**
478
   * Gets the Language driver IDs (code page) defined on the file header (or
479
   * guessed from the provided charset)
480
   *
481
   * Some examples: 01h        DOS USA        code page 437 02h        DOS Multilingual code page 850
482
   * 03h        Windows ANSI code page 1252 04h        Standard Macintosh 64h        EE MS-DOS code
483
   * page 852 65h        Nordic MS-DOS code page 865 66h        Russian MS-DOS code page 866
484
   * 67h        Icelandic MS-DOS 68h        Kamenicky (Czech) MS-DOS 69h        Mazovia (Polish)
485
   * MS-DOS 6Ah        Greek MS-DOS (437G) 6Bh        Turkish MS-DOS 96h        Russian Macintosh 97h
486
   * Eastern European Macintosh 98h        Greek Macintosh C8h        Windows EE        code page
487
   * 1250 C9h        Russian Windows CAh        Turkish Windows CBh        Greek Windows
488
   *
489
   * See the java equivalences in
490
   * {@link DbaseCodepage#dbfLdid} & {@link DbaseCodepage#ldidJava} objects.
491
   *
492
   * See some others here:
493
   * https://github.com/infused/dbf/blob/master/docs/supported_encodings.csv
494
   *
495
   * @return
496
   */
497
  public int getLanguageID() {
498

    
499
    return myLanguageID;
500
  }
501

    
502
  /**
503
   * Write the header data to the DBF file.
504
   *
505
   * @param out A channel to write to. If you have an OutputStream you can
506
   * obtain the correct channel by using
507
   * java.nio.Channels.newChannel(OutputStream out).
508
   *
509
   * @throws IOException If errors occur.
510
   */
511
  public void write(FileChannel out) throws IOException {
512
    // take care of the annoying case where no records have been added...
513
    if (myHeaderLength <= 0) {
514
      myHeaderLength = MINIMUM_HEADER;
515
    }
516

    
517
    // Desde el principio
518
    out.position(0);
519

    
520
    ByteBuffer buffer = ByteBuffer.allocateDirect(myHeaderLength);
521
    buffer.order(ByteOrder.LITTLE_ENDIAN);
522

    
523
    // write the output file type.
524
    buffer.put(MAGIC);
525

    
526
    // write the date stuff
527
    Calendar c = Calendar.getInstance();
528
    c.setTime(new Date());
529
    buffer.put((byte) (c.get(Calendar.YEAR) % 100));
530
    buffer.put((byte) (c.get(Calendar.MONTH) + 1));
531
    buffer.put((byte) (c.get(Calendar.DAY_OF_MONTH)));
532

    
533
    // write the number of records in the datafile.
534
    buffer.putInt(myNumRecords);
535

    
536
    // write the length of the header structure.
537
    buffer.putShort((short) myHeaderLength);
538

    
539
    // write the length of a record
540
    buffer.putShort((short) myRecordLength);
541

    
542
    // write the reserved bytes in the header
543
    ((Buffer) buffer).position(((Buffer) buffer).position() + 17);
544

    
545
    // write the language id
546
    buffer.put((byte) getLanguageID());
547

    
548
    // write the reserved bytes in the header                
549
    ((Buffer) buffer).position(((Buffer) buffer).position() + 2);
550

    
551
    // write all of the header records
552
    int tempOffset = 0;
553

    
554
    if (myFieldDescriptions != null) {
555
      for (int i = 0; i < myFieldDescriptions.length; i++) {
556
        // write the field name
557
        for (int j = 0; j < DbaseFile.MAX_FIELD_NAME_LENGTH + 1; j++) {
558
          if (myFieldDescriptions[i].getName().length() > j) {
559
            buffer.put((byte) myFieldDescriptions[i].getName().charAt(j));
560
          } else {
561
            buffer.put((byte) 0);
562
          }
563
        }
564

    
565
        // write the field type
566
        buffer.put((byte) myFieldDescriptions[i].getType());
567

    
568
        // // write the field data address, offset from the start of the
569
        // record.
570
        buffer.putInt(tempOffset);
571
        tempOffset += myFieldDescriptions[i].getSize();
572

    
573
        // write the length of the field.
574
        buffer.put((byte) myFieldDescriptions[i].getSize());
575

    
576
        // write the decimal count.
577
        buffer.put((byte) myFieldDescriptions[i].getScale());
578

    
579
        // write the reserved bytes.
580
        // for (in j=0; jj<14; j++) out.writeByteLE(0);
581
        ((Buffer) buffer).position(((Buffer) buffer).position() + 14);
582
      }
583
    }
584
    // write the end of the field definitions marker
585
    buffer.put((byte) 0x0D);
586

    
587
    ((Buffer) buffer).position(0);
588

    
589
    int r = buffer.remaining();
590

    
591
    while ((r -= out.write(buffer)) > 0) {
592
      // do nothing
593
    }
594
  }
595

    
596
  public String getCharsetName() {
597
    return getCharsetName(getLanguageID());
598
  }
599

    
600
  public String getCharsetName(int ldid) {
601
    if (ldid != 0) {
602
      charset = DbaseCodepage.getCharsetName(ldid);
603
    }
604
    if (charset != null) {
605
      // use charset otherwise
606
      return charset;
607
    }
608
    // default
609
    return "ISO-8859-1";
610
  }
611

    
612
  public String getOriginalCharset() {
613
    return getCharsetName(this.origLanguageID);
614
  }
615

    
616
  public String mappingEncoding(String dbfEnconding) {
617
    if (encodingSupportedByString.contains(dbfEnconding)) {
618
      return dbfEnconding;
619
    } else {
620
      return "UTF-8";
621
    }
622
  }
623

    
624
  private String getUniqueFieldName(String fieldName, List fieldNames) {
625

    
626
    int index = 0;
627
    String tempFieldName = fieldName;
628
    while (fieldNames.contains(tempFieldName) && index < 1000) {
629
      index++;
630
      String sufix = String.valueOf(index);
631
      tempFieldName = tempFieldName.substring(0, DbaseFile.MAX_FIELD_NAME_LENGTH - sufix.length()) + sufix;
632
    }
633
    if (index >= 1000) {
634
      throw new RuntimeException("Can't fix duplicated name for field '" + fieldName + "'.");
635
    }
636
    return tempFieldName;
637
  }
638

    
639
  public EditableFeatureType toFeatureType(EditableFeatureType featureType, boolean handleDatesAsStrings) throws DataException {
640
    featureType.setHasOID(true);
641

    
642
    for (DbaseFieldDescriptor dbfattr : this.myFieldDescriptions) {
643
      if (featureType.get(dbfattr.getName()) != null) {
644
        throw new DuplicatedFieldNameException(dbfattr.getName());
645
      }
646
      EditableFeatureAttributeDescriptor attr;
647
      switch (dbfattr.getType()) {
648
        case DBFTYPE_BOOLEAN:
649
          attr = featureType.add(dbfattr.getName(), DataTypes.BOOLEAN);
650
          attr.setSize(0);
651
          attr.setDefaultValue(null);
652
          attr.setAllowNull(true);
653
          break;
654
        case DBFTYPE_FLOAT:
655
          if (dbfattr.getScale() > 0) {
656
            if (dbfattr.getPrecision() > FeatureAttributeDescriptor.DOUBLE_MAX_PRECISION
657
                    || dbfattr.getScale() > FeatureAttributeDescriptor.DOUBLE_MAX_PRECISION) {
658
              attr = featureType.add(dbfattr.getName(),
659
                      DataTypes.DECIMAL, dbfattr.getSize());
660
              attr.setScale(dbfattr.getScale());
661
              attr.setPrecision(dbfattr.getPrecision());
662
              attr.setDefaultValue(null);
663
              attr.setAllowNull(true);
664

    
665
            } else if (dbfattr.getPrecision() > FeatureAttributeDescriptor.FLOAT_MAX_PRECISION
666
                    || dbfattr.getScale() > FeatureAttributeDescriptor.FLOAT_MAX_PRECISION) {
667
              attr = featureType.add(dbfattr.getName(),
668
                      DataTypes.DOUBLE, dbfattr.getSize());
669
              attr.setScale(dbfattr.getScale());
670
              attr.setPrecision(dbfattr.getPrecision());
671
              attr.setDefaultValue(null);
672
              attr.setAllowNull(true);
673
            } else {
674
              attr = featureType.add(dbfattr.getName(),
675
                      DataTypes.FLOAT, dbfattr.getSize());
676
              attr.setScale(dbfattr.getScale());
677
              attr.setPrecision(dbfattr.getPrecision());
678
              attr.setDefaultValue(null);
679
              attr.setAllowNull(true);
680
            }
681
          } else {
682
            if (dbfattr.getPrecision() > FeatureAttributeDescriptor.LONG_MAX_PRECISION) {
683
              attr = featureType.add(dbfattr.getName(),
684
                      DataTypes.DECIMAL, dbfattr.getSize());
685
              attr.setPrecision(dbfattr.getPrecision());
686
              attr.setScale(dbfattr.getScale());
687
              attr.setDefaultValue(null);
688
              attr.setAllowNull(true);
689
            } else if (dbfattr.getPrecision() > FeatureAttributeDescriptor.INT_MAX_PRECISION) {
690
              attr = featureType.add(
691
                      dbfattr.getName(),
692
                      DataTypes.LONG
693
              );
694
              attr.setPrecision(dbfattr.getPrecision());
695
              attr.setScale(0);
696
              attr.setDefaultValue(null);
697
              attr.setAllowNull(true);
698
            } else if (dbfattr.getPrecision() > FeatureAttributeDescriptor.BYTE_MAX_PRECISION) {
699
              attr = featureType.add(
700
                      dbfattr.getName(),
701
                      DataTypes.INT
702
              );
703
              attr.setPrecision(dbfattr.getPrecision());
704
              attr.setScale(0);
705
              attr.setDefaultValue(null);
706
              attr.setAllowNull(true);
707
            } else {
708
              attr = featureType.add(
709
                      dbfattr.getName(),
710
                      DataTypes.BYTE
711
              );
712
              attr.setPrecision(dbfattr.getPrecision());
713
              attr.setScale(0);
714
              attr.setDefaultValue(null);
715
              attr.setAllowNull(true);
716
            }
717
          }
718
          break;
719
        case DBFTYPE_NUMBER:
720
          if (dbfattr.getScale() > 0) {
721
            attr = featureType.add(dbfattr.getName(),
722
                    DataTypes.DECIMAL, dbfattr.getSize());
723
            attr.setPrecision(dbfattr.getPrecision());
724
            attr.setScale(dbfattr.getScale());
725
            attr.setDefaultValue(null);
726
            attr.setAllowNull(true);
727
          } else {
728
            if (dbfattr.getPrecision() > FeatureAttributeDescriptor.LONG_MAX_PRECISION) {
729
              attr = featureType.add(dbfattr.getName(),
730
                      DataTypes.DECIMAL, dbfattr.getSize());
731
              attr.setPrecision(dbfattr.getPrecision());
732
              attr.setScale(dbfattr.getScale());
733
              attr.setDefaultValue(null);
734
              attr.setAllowNull(true);
735
            } else if (dbfattr.getPrecision() > FeatureAttributeDescriptor.INT_MAX_PRECISION) {
736
              attr = featureType.add(
737
                      dbfattr.getName(),
738
                      DataTypes.LONG
739
              );
740
              attr.setPrecision(dbfattr.getPrecision());
741
              attr.setScale(0);
742
              attr.setDefaultValue(null);
743
              attr.setAllowNull(true);
744
            } else if (dbfattr.getPrecision() > FeatureAttributeDescriptor.BYTE_MAX_PRECISION) {
745
              attr = featureType.add(
746
                      dbfattr.getName(),
747
                      DataTypes.INT
748
              );
749
              attr.setPrecision(dbfattr.getPrecision());
750
              attr.setScale(0);
751
              attr.setDefaultValue(null);
752
              attr.setAllowNull(true);
753
            } else {
754
              attr = featureType.add(
755
                      dbfattr.getName(),
756
                      DataTypes.BYTE
757
              );
758
              attr.setPrecision(dbfattr.getPrecision());
759
              attr.setScale(0);
760
              attr.setDefaultValue(null);
761
              attr.setAllowNull(true);
762
            }
763
          }
764
          break;
765
        case DBFTYPE_STRING:
766
          attr = featureType.add(dbfattr.getName(),
767
                  DataTypes.STRING, dbfattr.getSize());
768
          attr.setDefaultValue(null);
769
          attr.setAllowNull(true);
770
          if (dbfattr.getSize() == TIMESTAMP_SIZE) {
771
            try {
772
              DataType dataType = ToolsLocator.getDataTypesManager().get(DataTypes.STRING).clone();
773
              dataType.addCoercion(new CoerceDateToDbfTimestampAsString());
774
              attr.setDataType(dataType);
775
            } catch (Exception ex) {
776
              // Do nothing
777
            }
778
          } else if (dbfattr.getSize() == TIME_SIZE) {
779
            try {
780
              DataType dataType = ToolsLocator.getDataTypesManager().get(DataTypes.STRING).clone();
781
              dataType.addCoercion(new CoerceDateToDbfTimeAsString());
782
              attr.setDataType(dataType);
783
            } catch (Exception ex) {
784
              // Do nothing
785
            }
786
          }
787
          break;
788
        case DBFTYPE_DATE:
789
          if (handleDatesAsStrings) {
790
            attr = featureType.add(dbfattr.getName(),
791
                    DataTypes.STRING, dbfattr.getSize());
792
            attr.setDefaultValue(null);
793
            attr.setAllowNull(true);
794
          } else {
795
            attr = featureType.add(dbfattr.getName(),
796
                    DataTypes.DATE
797
            );
798
            attr.setDefaultValue(null);
799
            attr.setAllowNull(true);
800
          }
801
          break;
802
        default:
803
          throw new UnknownDataTypeException(
804
                  dbfattr.getName(), String.valueOf(dbfattr.getType()),
805
                  DBFStoreProvider.NAME
806
          );
807
      }
808
    }
809
    return featureType;
810
  }
811

    
812
  public static DbaseFileHeader fromFeatureType(FeatureType featureType)
813
          throws DataException {
814
    return fromFeatureType(featureType, null);
815
  }
816

    
817
  public static DbaseFileHeader fromFeatureType(FeatureType featureType, String charsetName)
818
          throws DataException {
819
    DbaseFileHeader header = new DbaseFileHeader();
820
    Iterator iterator = featureType.iterator();
821
    header.myLanguageID = DbaseCodepage.getLdid(charsetName);
822
    header.charset = charsetName;
823
    while (iterator.hasNext()) {
824
      FeatureAttributeDescriptor descriptor = (FeatureAttributeDescriptor) iterator.next();
825

    
826
      if (descriptor.isComputed()) {
827
        continue;
828
      }
829
      int type = descriptor.getType();
830
      String colName = descriptor.getName();
831

    
832
      int size = descriptor.getSize();
833
      int scale = descriptor.getScale();
834
      int precision = descriptor.getPrecision();
835
      switch (type) {
836
        case DataTypes.DECIMAL:
837
          header.addColumn(colName, 'N', 0, precision, scale);
838
          break;
839
        case DataTypes.DOUBLE:
840
        case DataTypes.FLOAT:
841
          header.addColumn(colName, 'F', 0, precision, scale);
842
          break;
843
        case DataTypes.INT:
844
          header.addColumn(colName, 'N', 0, precision, scale);
845
          break;
846
        case DataTypes.LONG:
847
          header.addColumn(colName, 'N', 0, precision, scale);
848
          break;
849
        case DataTypes.DATE:
850
          header.addColumn(colName, 'D', FieldFormatter.DATE_SIZE, 0, 0);
851
          break;
852
        case DataTypes.TIME:
853
          header.addColumn(colName, 'C', FieldFormatter.TIME_SIZE, 0, 0);
854
          break;
855
        case DataTypes.TIMESTAMP:
856
          header.addColumn(colName, 'C', TIMESTAMP_SIZE, 0, 0);
857
          break;
858
        case DataTypes.BOOLEAN:
859
          header.addColumn(colName, 'L', 1, 0, 0);
860
          break;
861
        case DataTypes.STRING:
862
          header.addColumn(colName, 'C', Math.min(254, size), 0, 0);
863
          break;
864
        case DataTypes.BYTE:
865
          header.addColumn(colName, 'N', 0, precision, scale);
866
          break;
867
        default:
868
          // Si no sabemos lo que es intentaremos guardarlo como un string
869
          header.addColumn(colName, 'C', Math.min(254, size < 10 ? 10 : size), 0, 0);
870
          break;
871
      }
872

    
873
    }
874
    return header;
875
  }
876
}