001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 * http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing,
013 * software distributed under the License is distributed on an
014 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
015 * KIND, either express or implied.  See the License for the
016 * specific language governing permissions and limitations
017 * under the License.
018 */
019package org.apache.commons.compress.archivers.tar;
020
021import static java.nio.charset.StandardCharsets.UTF_8;
022
023import java.io.File;
024import java.io.IOException;
025import java.io.OutputStream;
026import java.io.StringWriter;
027import java.math.BigDecimal;
028import java.math.RoundingMode;
029import java.nio.ByteBuffer;
030import java.nio.charset.StandardCharsets;
031import java.nio.file.LinkOption;
032import java.nio.file.Path;
033import java.nio.file.attribute.FileTime;
034import java.time.Instant;
035import java.util.Arrays;
036import java.util.HashMap;
037import java.util.Map;
038
039import org.apache.commons.compress.archivers.ArchiveOutputStream;
040import org.apache.commons.compress.archivers.zip.ZipEncoding;
041import org.apache.commons.compress.archivers.zip.ZipEncodingHelper;
042import org.apache.commons.compress.utils.FixedLengthBlockOutputStream;
043import org.apache.commons.compress.utils.TimeUtils;
044import org.apache.commons.io.Charsets;
045import org.apache.commons.io.output.CountingOutputStream;
046
047/**
048 * The TarOutputStream writes a UNIX tar archive as an OutputStream. Methods are provided to put entries, and then write their contents by writing to this
049 * stream using write().
050 *
051 * <p>
052 * tar archives consist of a sequence of records of 512 bytes each that are grouped into blocks. Prior to Apache Commons Compress 1.14 it has been possible to
053 * configure a record size different from 512 bytes and arbitrary block sizes. Starting with Compress 1.15 512 is the only valid option for the record size and
054 * the block size must be a multiple of 512. Also the default block size changed from 10240 bytes prior to Compress 1.15 to 512 bytes with Compress 1.15.
055 * </p>
056 *
057 * @NotThreadSafe
058 */
059public class TarArchiveOutputStream extends ArchiveOutputStream<TarArchiveEntry> {
060
061    /**
062     * Fail if a long file name is required in the archive.
063     */
064    public static final int LONGFILE_ERROR = 0;
065
066    /**
067     * Long paths will be truncated in the archive.
068     */
069    public static final int LONGFILE_TRUNCATE = 1;
070
071    /**
072     * GNU tar extensions are used to store long file names in the archive.
073     */
074    public static final int LONGFILE_GNU = 2;
075
076    /**
077     * POSIX/PAX extensions are used to store long file names in the archive.
078     */
079    public static final int LONGFILE_POSIX = 3;
080
081    /**
082     * Fail if a big number (e.g. size &gt; 8GiB) is required in the archive.
083     */
084    public static final int BIGNUMBER_ERROR = 0;
085
086    /**
087     * star/GNU tar/BSD tar extensions are used to store big number in the archive.
088     */
089    public static final int BIGNUMBER_STAR = 1;
090
091    /**
092     * POSIX/PAX extensions are used to store big numbers in the archive.
093     */
094    public static final int BIGNUMBER_POSIX = 2;
095    private static final int RECORD_SIZE = 512;
096
097    private static final ZipEncoding ASCII = ZipEncodingHelper.getZipEncoding(StandardCharsets.US_ASCII);
098
099    private static final int BLOCK_SIZE_UNSPECIFIED = -511;
100    private long currSize;
101    private String currName;
102    private long currBytes;
103    private final byte[] recordBuf;
104    private int longFileMode = LONGFILE_ERROR;
105    private int bigNumberMode = BIGNUMBER_ERROR;
106
107    private long recordsWritten;
108
109    private final int recordsPerBlock;
110
111    private boolean closed;
112
113    /**
114     * Indicates if putArchiveEntry has been called without closeArchiveEntry
115     */
116
117    private boolean haveUnclosedEntry;
118
119    /**
120     * indicates if this archive is finished
121     */
122    private boolean finished;
123
124    private final FixedLengthBlockOutputStream out;
125
126    private final CountingOutputStream countingOut;
127
128    private final ZipEncoding zipEncoding;
129
130    /**
131     * The provided encoding (for unit tests).
132     */
133    final String charsetName;
134
135    private boolean addPaxHeadersForNonAsciiNames;
136
137    /**
138     * Constructs a new instance.
139     *
140     * <p>
141     * Uses a block size of 512 bytes.
142     * </p>
143     *
144     * @param os the output stream to use
145     */
146    public TarArchiveOutputStream(final OutputStream os) {
147        this(os, BLOCK_SIZE_UNSPECIFIED);
148    }
149
150    /**
151     * Constructs a new instance.
152     *
153     * @param os        the output stream to use
154     * @param blockSize the block size to use. Must be a multiple of 512 bytes.
155     */
156    public TarArchiveOutputStream(final OutputStream os, final int blockSize) {
157        this(os, blockSize, null);
158    }
159
160    /**
161     * Constructs a new instance.
162     *
163     * @param os         the output stream to use
164     * @param blockSize  the block size to use
165     * @param recordSize the record size to use. Must be 512 bytes.
166     * @deprecated recordSize must always be 512 bytes. An IllegalArgumentException will be thrown if any other value is used
167     */
168    @Deprecated
169    public TarArchiveOutputStream(final OutputStream os, final int blockSize, final int recordSize) {
170        this(os, blockSize, recordSize, null);
171    }
172
173    /**
174     * Constructs a new instance.
175     *
176     * @param os         the output stream to use
177     * @param blockSize  the block size to use . Must be a multiple of 512 bytes.
178     * @param recordSize the record size to use. Must be 512 bytes.
179     * @param encoding   name of the encoding to use for file names
180     * @since 1.4
181     * @deprecated recordSize must always be 512 bytes. An IllegalArgumentException will be thrown if any other value is used.
182     */
183    @Deprecated
184    public TarArchiveOutputStream(final OutputStream os, final int blockSize, final int recordSize, final String encoding) {
185        this(os, blockSize, encoding);
186        if (recordSize != RECORD_SIZE) {
187            throw new IllegalArgumentException("Tar record size must always be 512 bytes. Attempt to set size of " + recordSize);
188        }
189
190    }
191
192    /**
193     * Constructs a new instance.
194     *
195     * @param os        the output stream to use
196     * @param blockSize the block size to use. Must be a multiple of 512 bytes.
197     * @param encoding  name of the encoding to use for file names
198     * @since 1.4
199     */
200    public TarArchiveOutputStream(final OutputStream os, final int blockSize, final String encoding) {
201        final int realBlockSize;
202        if (BLOCK_SIZE_UNSPECIFIED == blockSize) {
203            realBlockSize = RECORD_SIZE;
204        } else {
205            realBlockSize = blockSize;
206        }
207
208        if (realBlockSize <= 0 || realBlockSize % RECORD_SIZE != 0) {
209            throw new IllegalArgumentException("Block size must be a multiple of 512 bytes. Attempt to use set size of " + blockSize);
210        }
211        out = new FixedLengthBlockOutputStream(countingOut = new CountingOutputStream(os), RECORD_SIZE);
212        this.charsetName = Charsets.toCharset(encoding).name();
213        this.zipEncoding = ZipEncodingHelper.getZipEncoding(encoding);
214
215        this.recordBuf = new byte[RECORD_SIZE];
216        this.recordsPerBlock = realBlockSize / RECORD_SIZE;
217    }
218
219    /**
220     * Constructs a new instance.
221     *
222     * <p>
223     * Uses a block size of 512 bytes.
224     * </p>
225     *
226     * @param os       the output stream to use
227     * @param encoding name of the encoding to use for file names
228     * @since 1.4
229     */
230    public TarArchiveOutputStream(final OutputStream os, final String encoding) {
231        this(os, BLOCK_SIZE_UNSPECIFIED, encoding);
232    }
233
234    private void addFileTimePaxHeader(final Map<String, String> paxHeaders, final String header, final FileTime value) {
235        if (value != null) {
236            final Instant instant = value.toInstant();
237            final long seconds = instant.getEpochSecond();
238            final int nanos = instant.getNano();
239            if (nanos == 0) {
240                paxHeaders.put(header, String.valueOf(seconds));
241            } else {
242                addInstantPaxHeader(paxHeaders, header, seconds, nanos);
243            }
244        }
245    }
246
247    private void addFileTimePaxHeaderForBigNumber(final Map<String, String> paxHeaders, final String header, final FileTime value, final long maxValue) {
248        if (value != null) {
249            final Instant instant = value.toInstant();
250            final long seconds = instant.getEpochSecond();
251            final int nanos = instant.getNano();
252            if (nanos == 0) {
253                addPaxHeaderForBigNumber(paxHeaders, header, seconds, maxValue);
254            } else {
255                addInstantPaxHeader(paxHeaders, header, seconds, nanos);
256            }
257        }
258    }
259
260    private void addInstantPaxHeader(final Map<String, String> paxHeaders, final String header, final long seconds, final int nanos) {
261        final BigDecimal bdSeconds = BigDecimal.valueOf(seconds);
262        final BigDecimal bdNanos = BigDecimal.valueOf(nanos).movePointLeft(9).setScale(7, RoundingMode.DOWN);
263        final BigDecimal timestamp = bdSeconds.add(bdNanos);
264        paxHeaders.put(header, timestamp.toPlainString());
265    }
266
267    private void addPaxHeaderForBigNumber(final Map<String, String> paxHeaders, final String header, final long value, final long maxValue) {
268        if (value < 0 || value > maxValue) {
269            paxHeaders.put(header, String.valueOf(value));
270        }
271    }
272
273    private void addPaxHeadersForBigNumbers(final Map<String, String> paxHeaders, final TarArchiveEntry entry) {
274        addPaxHeaderForBigNumber(paxHeaders, "size", entry.getSize(), TarConstants.MAXSIZE);
275        addPaxHeaderForBigNumber(paxHeaders, "gid", entry.getLongGroupId(), TarConstants.MAXID);
276        addFileTimePaxHeaderForBigNumber(paxHeaders, "mtime", entry.getLastModifiedTime(), TarConstants.MAXSIZE);
277        addFileTimePaxHeader(paxHeaders, "atime", entry.getLastAccessTime());
278        if (entry.getStatusChangeTime() != null) {
279            addFileTimePaxHeader(paxHeaders, "ctime", entry.getStatusChangeTime());
280        } else {
281            // ctime is usually set from creation time on platforms where the real ctime is not available
282            addFileTimePaxHeader(paxHeaders, "ctime", entry.getCreationTime());
283        }
284        addPaxHeaderForBigNumber(paxHeaders, "uid", entry.getLongUserId(), TarConstants.MAXID);
285        // libarchive extensions
286        addFileTimePaxHeader(paxHeaders, "LIBARCHIVE.creationtime", entry.getCreationTime());
287        // star extensions by Jörg Schilling
288        addPaxHeaderForBigNumber(paxHeaders, "SCHILY.devmajor", entry.getDevMajor(), TarConstants.MAXID);
289        addPaxHeaderForBigNumber(paxHeaders, "SCHILY.devminor", entry.getDevMinor(), TarConstants.MAXID);
290        // there is no PAX header for file mode
291        failForBigNumber("mode", entry.getMode(), TarConstants.MAXID);
292    }
293
294    /**
295     * Closes the underlying OutputStream.
296     *
297     * @throws IOException on error
298     */
299    @Override
300    public void close() throws IOException {
301        try {
302            if (!finished) {
303                finish();
304            }
305        } finally {
306            if (!closed) {
307                out.close();
308                closed = true;
309            }
310        }
311    }
312
313    /**
314     * Closes an entry. This method MUST be called for all file entries that contain data. The reason is that we must buffer data written to the stream in order
315     * to satisfy the buffer's record based writes. Thus, there may be data fragments still being assembled that must be written to the output stream before
316     * this entry is closed and the next entry written.
317     *
318     * @throws IOException on error
319     */
320    @Override
321    public void closeArchiveEntry() throws IOException {
322        if (finished) {
323            throw new IOException("Stream has already been finished");
324        }
325        if (!haveUnclosedEntry) {
326            throw new IOException("No current entry to close");
327        }
328        out.flushBlock();
329        if (currBytes < currSize) {
330            throw new IOException(
331                    "Entry '" + currName + "' closed at '" + currBytes + "' before the '" + currSize + "' bytes specified in the header were written");
332        }
333        recordsWritten += currSize / RECORD_SIZE;
334
335        if (0 != currSize % RECORD_SIZE) {
336            recordsWritten++;
337        }
338        haveUnclosedEntry = false;
339    }
340
341    @Override
342    public TarArchiveEntry createArchiveEntry(final File inputFile, final String entryName) throws IOException {
343        if (finished) {
344            throw new IOException("Stream has already been finished");
345        }
346        return new TarArchiveEntry(inputFile, entryName);
347    }
348
349    @Override
350    public TarArchiveEntry createArchiveEntry(final Path inputPath, final String entryName, final LinkOption... options) throws IOException {
351        if (finished) {
352            throw new IOException("Stream has already been finished");
353        }
354        return new TarArchiveEntry(inputPath, entryName, options);
355    }
356
357    private byte[] encodeExtendedPaxHeadersContents(final Map<String, String> headers) {
358        final StringWriter w = new StringWriter();
359        headers.forEach((k, v) -> {
360            int len = k.length() + v.length() + 3 /* blank, equals and newline */
361                    + 2 /* guess 9 < actual length < 100 */;
362            String line = len + " " + k + "=" + v + "\n";
363            int actualLength = line.getBytes(UTF_8).length;
364            while (len != actualLength) {
365                // Adjust for cases where length < 10 or > 100
366                // or where UTF-8 encoding isn't a single octet
367                // per character.
368                // Must be in loop as size may go from 99 to 100 in
369                // first pass, so we'd need a second.
370                len = actualLength;
371                line = len + " " + k + "=" + v + "\n";
372                actualLength = line.getBytes(UTF_8).length;
373            }
374            w.write(line);
375        });
376        return w.toString().getBytes(UTF_8);
377    }
378
379    private void failForBigNumber(final String field, final long value, final long maxValue) {
380        failForBigNumber(field, value, maxValue, "");
381    }
382
383    private void failForBigNumber(final String field, final long value, final long maxValue, final String additionalMsg) {
384        if (value < 0 || value > maxValue) {
385            throw new IllegalArgumentException(field + " '" + value // NOSONAR
386                    + "' is too big ( > " + maxValue + " )." + additionalMsg);
387        }
388    }
389
390    private void failForBigNumbers(final TarArchiveEntry entry) {
391        failForBigNumber("entry size", entry.getSize(), TarConstants.MAXSIZE);
392        failForBigNumberWithPosixMessage("group id", entry.getLongGroupId(), TarConstants.MAXID);
393        failForBigNumber("last modification time", TimeUtils.toUnixTime(entry.getLastModifiedTime()), TarConstants.MAXSIZE);
394        failForBigNumber("user id", entry.getLongUserId(), TarConstants.MAXID);
395        failForBigNumber("mode", entry.getMode(), TarConstants.MAXID);
396        failForBigNumber("major device number", entry.getDevMajor(), TarConstants.MAXID);
397        failForBigNumber("minor device number", entry.getDevMinor(), TarConstants.MAXID);
398    }
399
400    private void failForBigNumberWithPosixMessage(final String field, final long value, final long maxValue) {
401        failForBigNumber(field, value, maxValue, " Use STAR or POSIX extensions to overcome this limit");
402    }
403
404    /**
405     * Finishes the TAR archive without closing the underlying OutputStream.
406     *
407     * An archive consists of a series of file entries terminated by an end-of-archive entry, which consists of two 512 blocks of zero bytes. POSIX.1 requires
408     * two EOF records, like some other implementations.
409     *
410     * @throws IOException on error
411     */
412    @Override
413    public void finish() throws IOException {
414        if (finished) {
415            throw new IOException("This archive has already been finished");
416        }
417
418        if (haveUnclosedEntry) {
419            throw new IOException("This archive contains unclosed entries.");
420        }
421        writeEOFRecord();
422        writeEOFRecord();
423        padAsNeeded();
424        out.flush();
425        finished = true;
426    }
427
428    @Override
429    public void flush() throws IOException {
430        out.flush();
431    }
432
433    @Override
434    public long getBytesWritten() {
435        return countingOut.getByteCount();
436    }
437
438    @Deprecated
439    @Override
440    public int getCount() {
441        return (int) getBytesWritten();
442    }
443
444    /**
445     * Gets the record size being used by this stream's TarBuffer.
446     *
447     * @return The TarBuffer record size.
448     * @deprecated
449     */
450    @Deprecated
451    public int getRecordSize() {
452        return RECORD_SIZE;
453    }
454
455    /**
456     * Handles long file or link names according to the longFileMode setting.
457     *
458     * <p>
459     * I.e. if the given name is too long to be written to a plain tar header then
460     * <ul>
461     * <li>it creates a pax header who's name is given by the paxHeaderName parameter if longFileMode is POSIX</li>
462     * <li>it creates a GNU longlink entry who's type is given by the linkType parameter if longFileMode is GNU</li>
463     * <li>it throws an exception if longFileMode is ERROR</li>
464     * <li>it truncates the name if longFileMode is TRUNCATE</li>
465     * </ul>
466     * </p>
467     *
468     * @param entry         entry the name belongs to
469     * @param name          the name to write
470     * @param paxHeaders    current map of pax headers
471     * @param paxHeaderName name of the pax header to write
472     * @param linkType      type of the GNU entry to write
473     * @param fieldName     the name of the field
474     * @throws IllegalArgumentException if the {@link TarArchiveOutputStream#longFileMode} equals {@link TarArchiveOutputStream#LONGFILE_ERROR} and the file
475     *                                  name is too long
476     * @return whether a pax header has been written.
477     */
478    private boolean handleLongName(final TarArchiveEntry entry, final String name, final Map<String, String> paxHeaders, final String paxHeaderName,
479            final byte linkType, final String fieldName) throws IOException {
480        final ByteBuffer encodedName = zipEncoding.encode(name);
481        final int len = encodedName.limit() - encodedName.position();
482        if (len >= TarConstants.NAMELEN) {
483
484            if (longFileMode == LONGFILE_POSIX) {
485                paxHeaders.put(paxHeaderName, name);
486                return true;
487            }
488            if (longFileMode == LONGFILE_GNU) {
489                // create a TarEntry for the LongLink, the contents
490                // of which are the link's name
491                final TarArchiveEntry longLinkEntry = new TarArchiveEntry(TarConstants.GNU_LONGLINK, linkType);
492
493                longLinkEntry.setSize(len + 1L); // +1 for NUL
494                transferModTime(entry, longLinkEntry);
495                putArchiveEntry(longLinkEntry);
496                write(encodedName.array(), encodedName.arrayOffset(), len);
497                write(0); // NUL terminator
498                closeArchiveEntry();
499            } else if (longFileMode != LONGFILE_TRUNCATE) {
500                throw new IllegalArgumentException(fieldName + " '" + name // NOSONAR
501                        + "' is too long ( > " + TarConstants.NAMELEN + " bytes)");
502            }
503        }
504        return false;
505    }
506
507    private void padAsNeeded() throws IOException {
508        final int start = Math.toIntExact(recordsWritten % recordsPerBlock);
509        if (start != 0) {
510            for (int i = start; i < recordsPerBlock; i++) {
511                writeEOFRecord();
512            }
513        }
514    }
515
516    /**
517     * Puts an entry on the output stream. This writes the entry's header record and positions the output stream for writing the contents of the entry. Once
518     * this method is called, the stream is ready for calls to write() to write the entry's contents. Once the contents are written, closeArchiveEntry()
519     * <B>MUST</B> be called to ensure that all buffered data is completely written to the output stream.
520     *
521     * @param archiveEntry The TarEntry to be written to the archive.
522     * @throws IOException              on error
523     * @throws ClassCastException       if archiveEntry is not an instance of TarArchiveEntry
524     * @throws IllegalArgumentException if the {@link TarArchiveOutputStream#longFileMode} equals {@link TarArchiveOutputStream#LONGFILE_ERROR} and the file
525     *                                  name is too long
526     * @throws IllegalArgumentException if the {@link TarArchiveOutputStream#bigNumberMode} equals {@link TarArchiveOutputStream#BIGNUMBER_ERROR} and one of the
527     *                                  numeric values exceeds the limits of a traditional tar header.
528     */
529    @Override
530    public void putArchiveEntry(final TarArchiveEntry archiveEntry) throws IOException {
531        if (finished) {
532            throw new IOException("Stream has already been finished");
533        }
534        if (archiveEntry.isGlobalPaxHeader()) {
535            final byte[] data = encodeExtendedPaxHeadersContents(archiveEntry.getExtraPaxHeaders());
536            archiveEntry.setSize(data.length);
537            archiveEntry.writeEntryHeader(recordBuf, zipEncoding, bigNumberMode == BIGNUMBER_STAR);
538            writeRecord(recordBuf);
539            currSize = archiveEntry.getSize();
540            currBytes = 0;
541            this.haveUnclosedEntry = true;
542            write(data);
543            closeArchiveEntry();
544        } else {
545            final Map<String, String> paxHeaders = new HashMap<>();
546            final String entryName = archiveEntry.getName();
547            final boolean paxHeaderContainsPath = handleLongName(archiveEntry, entryName, paxHeaders, "path", TarConstants.LF_GNUTYPE_LONGNAME, "file name");
548            final String linkName = archiveEntry.getLinkName();
549            final boolean paxHeaderContainsLinkPath = linkName != null && !linkName.isEmpty()
550                    && handleLongName(archiveEntry, linkName, paxHeaders, "linkpath", TarConstants.LF_GNUTYPE_LONGLINK, "link name");
551
552            if (bigNumberMode == BIGNUMBER_POSIX) {
553                addPaxHeadersForBigNumbers(paxHeaders, archiveEntry);
554            } else if (bigNumberMode != BIGNUMBER_STAR) {
555                failForBigNumbers(archiveEntry);
556            }
557
558            if (addPaxHeadersForNonAsciiNames && !paxHeaderContainsPath && !ASCII.canEncode(entryName)) {
559                paxHeaders.put("path", entryName);
560            }
561
562            if (addPaxHeadersForNonAsciiNames && !paxHeaderContainsLinkPath && (archiveEntry.isLink() || archiveEntry.isSymbolicLink())
563                    && !ASCII.canEncode(linkName)) {
564                paxHeaders.put("linkpath", linkName);
565            }
566            paxHeaders.putAll(archiveEntry.getExtraPaxHeaders());
567
568            if (!paxHeaders.isEmpty()) {
569                writePaxHeaders(archiveEntry, entryName, paxHeaders);
570            }
571
572            archiveEntry.writeEntryHeader(recordBuf, zipEncoding, bigNumberMode == BIGNUMBER_STAR);
573            writeRecord(recordBuf);
574
575            currBytes = 0;
576
577            if (archiveEntry.isDirectory()) {
578                currSize = 0;
579            } else {
580                currSize = archiveEntry.getSize();
581            }
582            currName = entryName;
583            haveUnclosedEntry = true;
584        }
585    }
586
587    /**
588     * Sets whether to add a PAX extension header for non-ASCII file names.
589     *
590     * @param b whether to add a PAX extension header for non-ASCII file names.
591     * @since 1.4
592     */
593    public void setAddPaxHeadersForNonAsciiNames(final boolean b) {
594        addPaxHeadersForNonAsciiNames = b;
595    }
596
597    /**
598     * Sets the big number mode. This can be BIGNUMBER_ERROR(0), BIGNUMBER_STAR(1) or BIGNUMBER_POSIX(2). This specifies the treatment of big files (sizes &gt;
599     * TarConstants.MAXSIZE) and other numeric values too big to fit into a traditional tar header. Default is BIGNUMBER_ERROR.
600     *
601     * @param bigNumberMode the mode to use
602     * @since 1.4
603     */
604    public void setBigNumberMode(final int bigNumberMode) {
605        this.bigNumberMode = bigNumberMode;
606    }
607
608    /**
609     * Sets the long file mode. This can be LONGFILE_ERROR(0), LONGFILE_TRUNCATE(1), LONGFILE_GNU(2) or LONGFILE_POSIX(3). This specifies the treatment of long
610     * file names (names &gt;= TarConstants.NAMELEN). Default is LONGFILE_ERROR.
611     *
612     * @param longFileMode the mode to use
613     */
614    public void setLongFileMode(final int longFileMode) {
615        this.longFileMode = longFileMode;
616    }
617
618    /**
619     * Tests whether the character could lead to problems when used inside a TarArchiveEntry name for a PAX header.
620     *
621     * @return true if the character could lead to problems when used inside a TarArchiveEntry name for a PAX header.
622     */
623    private boolean shouldBeReplaced(final char c) {
624        return c == 0 // would be read as Trailing null
625                || c == '/' // when used as last character TAE will consider the PAX header a directory
626                || c == '\\'; // same as '/' as slashes get "normalized" on Windows
627    }
628
629    private String stripTo7Bits(final String name) {
630        final int length = name.length();
631        final StringBuilder result = new StringBuilder(length);
632        for (int i = 0; i < length; i++) {
633            final char stripped = (char) (name.charAt(i) & 0x7F);
634            if (shouldBeReplaced(stripped)) {
635                result.append("_");
636            } else {
637                result.append(stripped);
638            }
639        }
640        return result.toString();
641    }
642
643    private void transferModTime(final TarArchiveEntry from, final TarArchiveEntry to) {
644        long fromModTimeSeconds = TimeUtils.toUnixTime(from.getLastModifiedTime());
645        if (fromModTimeSeconds < 0 || fromModTimeSeconds > TarConstants.MAXSIZE) {
646            fromModTimeSeconds = 0;
647        }
648        to.setLastModifiedTime(TimeUtils.unixTimeToFileTime(fromModTimeSeconds));
649    }
650
651    /**
652     * Writes bytes to the current tar archive entry. This method is aware of the current entry and will throw an exception if you attempt to write bytes past
653     * the length specified for the current entry.
654     *
655     * @param wBuf       The buffer to write to the archive.
656     * @param wOffset    The offset in the buffer from which to get bytes.
657     * @param numToWrite The number of bytes to write.
658     * @throws IOException on error
659     */
660    @Override
661    public void write(final byte[] wBuf, final int wOffset, final int numToWrite) throws IOException {
662        if (!haveUnclosedEntry) {
663            throw new IllegalStateException("No current tar entry");
664        }
665        if (currBytes + numToWrite > currSize) {
666            throw new IOException(
667                    "Request to write '" + numToWrite + "' bytes exceeds size in header of '" + currSize + "' bytes for entry '" + currName + "'");
668        }
669        out.write(wBuf, wOffset, numToWrite);
670        currBytes += numToWrite;
671    }
672
673    /**
674     * Writes an EOF (end of archive) record to the tar archive. An EOF record consists of a record of all zeros.
675     */
676    private void writeEOFRecord() throws IOException {
677        Arrays.fill(recordBuf, (byte) 0);
678        writeRecord(recordBuf);
679    }
680
681    /**
682     * Writes a PAX extended header with the given map as contents.
683     *
684     * @since 1.4
685     */
686    void writePaxHeaders(final TarArchiveEntry entry, final String entryName, final Map<String, String> headers) throws IOException {
687        String name = "./PaxHeaders.X/" + stripTo7Bits(entryName);
688        if (name.length() >= TarConstants.NAMELEN) {
689            name = name.substring(0, TarConstants.NAMELEN - 1);
690        }
691        final TarArchiveEntry pex = new TarArchiveEntry(name, TarConstants.LF_PAX_EXTENDED_HEADER_LC);
692        transferModTime(entry, pex);
693
694        final byte[] data = encodeExtendedPaxHeadersContents(headers);
695        pex.setSize(data.length);
696        putArchiveEntry(pex);
697        write(data);
698        closeArchiveEntry();
699    }
700
701    /**
702     * Writes an archive record to the archive.
703     *
704     * @param record The record data to write to the archive.
705     * @throws IOException on error
706     */
707    private void writeRecord(final byte[] record) throws IOException {
708        if (record.length != RECORD_SIZE) {
709            throw new IOException("Record to write has length '" + record.length + "' which is not the record size of '" + RECORD_SIZE + "'");
710        }
711
712        out.write(record);
713        recordsWritten++;
714    }
715}