Blake3.java

  1. /*
  2.  * Licensed to the Apache Software Foundation (ASF) under one or more
  3.  * contributor license agreements.  See the NOTICE file distributed with
  4.  * this work for additional information regarding copyright ownership.
  5.  * The ASF licenses this file to You under the Apache License, Version 2.0
  6.  * (the "License"); you may not use this file except in compliance with
  7.  * the License.  You may obtain a copy of the License at
  8.  *
  9.  *      https://www.apache.org/licenses/LICENSE-2.0
  10.  *
  11.  * Unless required by applicable law or agreed to in writing, software
  12.  * distributed under the License is distributed on an "AS IS" BASIS,
  13.  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14.  * See the License for the specific language governing permissions and
  15.  * limitations under the License.
  16.  */
  17. package org.apache.commons.codec.digest;

  18. import java.util.Arrays;
  19. import java.util.Objects;

  20. /**
  21.  * Implements the Blake3 algorithm providing a {@linkplain #initHash() hash function} with extensible output (XOF), a
  22.  * {@linkplain #initKeyedHash(byte[]) keyed hash function} (MAC, PRF), and a
  23.  * {@linkplain #initKeyDerivationFunction(byte[]) key derivation function} (KDF). Blake3 has a 128-bit security level
  24.  * and a default output length of 256 bits (32 bytes) which can extended up to 2<sup>64</sup> bytes.
  25.  * <h2>Hashing</h2>
  26.  * <p>Hash mode calculates the same output hash given the same input bytes and can be used as both a message digest and
  27.  * and extensible output function.</p>
  28.  * <pre>{@code
  29.  *      Blake3 hasher = Blake3.initHash();
  30.  *      hasher.update("Hello, world!".getBytes(StandardCharsets.UTF_8));
  31.  *      byte[] hash = new byte[32];
  32.  *      hasher.doFinalize(hash);
  33.  * }</pre>
  34.  * <h2>Keyed Hashing</h2>
  35.  * <p>Keyed hashes take a 32-byte secret key and calculates a message authentication code on some input bytes. These
  36.  * also work as pseudo-random functions (PRFs) with extensible output similar to the extensible hash output. Note that
  37.  * Blake3 keyed hashes have the same performance as plain hashes; the key is used in initialization in place of a
  38.  * standard initialization vector used for plain hashing.</p>
  39.  * <pre>{@code
  40.  *      SecureRandom random = SecureRandom.getInstanceStrong();
  41.  *      byte[] key = new byte[32];
  42.  *      random.nextBytes(key);
  43.  *      Blake3 hasher = Blake3.initKeyedHash(key);
  44.  *      hasher.update("Hello, Alice!".getBytes(StandardCharsets.UTF_8));
  45.  *      byte[] mac = new byte[32];
  46.  *      hasher.doFinalize(mac);
  47.  * }</pre>
  48.  * <h2>Key Derivation</h2>
  49.  * <p>A specific hash mode for deriving session keys and other derived keys in a unique key derivation context
  50.  * identified by some sequence of bytes. These context strings should be unique but do not need to be kept secret.
  51.  * Additional input data is hashed for key material which can be finalized to derive subkeys.</p>
  52.  * <pre>{@code
  53.  *      String context = "org.apache.commons.codec.digest.Blake3Example";
  54.  *      byte[] sharedSecret = ...;
  55.  *      byte[] senderId = ...;
  56.  *      byte[] recipientId = ...;
  57.  *      Blake3 kdf = Blake3.initKeyDerivationFunction(context.getBytes(StandardCharsets.UTF_8));
  58.  *      kdf.update(sharedSecret);
  59.  *      kdf.update(senderId);
  60.  *      kdf.update(recipientId);
  61.  *      byte[] txKey = new byte[32];
  62.  *      byte[] rxKey = new byte[32];
  63.  *      kdf.doFinalize(txKey);
  64.  *      kdf.doFinalize(rxKey);
  65.  * }</pre>
  66.  * <p>
  67.  * Adapted from the ISC-licensed O(1) Cryptography library by Matt Sicker and ported from the reference public domain
  68.  * implementation by Jack O'Connor.
  69.  * </p>
  70.  *
  71.  * @see <a href="https://github.com/BLAKE3-team/BLAKE3">BLAKE3 hash function</a>
  72.  * @since 1.16
  73.  */
  74. public final class Blake3 {

  75.     private static final class ChunkState {

  76.         private int[] chainingValue;
  77.         private final long chunkCounter;
  78.         private final int flags;

  79.         private final byte[] block = new byte[BLOCK_LEN];
  80.         private int blockLength;
  81.         private int blocksCompressed;

  82.         private ChunkState(final int[] key, final long chunkCounter, final int flags) {
  83.             chainingValue = key;
  84.             this.chunkCounter = chunkCounter;
  85.             this.flags = flags;
  86.         }

  87.         private int length() {
  88.             return BLOCK_LEN * blocksCompressed + blockLength;
  89.         }

  90.         private Output output() {
  91.             final int[] blockWords = unpackInts(block, BLOCK_INTS);
  92.             final int outputFlags = flags | startFlag() | CHUNK_END;
  93.             return new Output(chainingValue, blockWords, chunkCounter, blockLength, outputFlags);
  94.         }

  95.         private int startFlag() {
  96.             return blocksCompressed == 0 ? CHUNK_START : 0;
  97.         }

  98.         private void update(final byte[] input, int offset, int length) {
  99.             while (length > 0) {
  100.                 if (blockLength == BLOCK_LEN) {
  101.                     // If the block buffer is full, compress it and clear it. More
  102.                     // input is coming, so this compression is not CHUNK_END.
  103.                     final int[] blockWords = unpackInts(block, BLOCK_INTS);
  104.                     chainingValue = Arrays.copyOf(
  105.                             compress(chainingValue, blockWords, BLOCK_LEN, chunkCounter, flags | startFlag()),
  106.                             CHAINING_VALUE_INTS);
  107.                     blocksCompressed++;
  108.                     blockLength = 0;
  109.                     Arrays.fill(block, (byte) 0);
  110.                 }

  111.                 final int want = BLOCK_LEN - blockLength;
  112.                 final int take = Math.min(want, length);
  113.                 System.arraycopy(input, offset, block, blockLength, take);
  114.                 blockLength += take;
  115.                 offset += take;
  116.                 length -= take;
  117.             }
  118.         }
  119.     }
  120.     private static final class EngineState {
  121.         private final int[] key;
  122.         private final int flags;
  123.         // Space for 54 subtree chaining values: 2^54 * CHUNK_LEN = 2^64
  124.         // No more than 54 entries can ever be added to this stack (after updating 2^64 bytes and not finalizing any)
  125.         // so we preallocate the stack here. This can be smaller in environments where the data limit is expected to
  126.         // be much lower.
  127.         private final int[][] cvStack = new int[54][];
  128.         private int stackLen;
  129.         private ChunkState state;

  130.         private EngineState(final int[] key, final int flags) {
  131.             this.key = key;
  132.             this.flags = flags;
  133.             state = new ChunkState(key, 0, flags);
  134.         }

  135.         // Section 5.1.2 of the BLAKE3 spec explains this algorithm in more detail.
  136.         private void addChunkCV(final int[] firstCV, final long totalChunks) {
  137.             // This chunk might complete some subtrees. For each completed subtree,
  138.             // its left child will be the current top entry in the CV stack, and
  139.             // its right child will be the current value of `newCV`. Pop each left
  140.             // child off the stack, merge it with `newCV`, and overwrite `newCV`
  141.             // with the result. After all these merges, push the final value of
  142.             // `newCV` onto the stack. The number of completed subtrees is given
  143.             // by the number of trailing 0-bits in the new total number of chunks.
  144.             int[] newCV = firstCV;
  145.             long chunkCounter = totalChunks;
  146.             while ((chunkCounter & 1) == 0) {
  147.                 newCV = parentChainingValue(popCV(), newCV, key, flags);
  148.                 chunkCounter >>= 1;
  149.             }
  150.             pushCV(newCV);
  151.         }

  152.         private void inputData(final byte[] in, int offset, int length) {
  153.             while (length > 0) {
  154.                 // If the current chunk is complete, finalize it and reset the
  155.                 // chunk state. More input is coming, so this chunk is not ROOT.
  156.                 if (state.length() == CHUNK_LEN) {
  157.                     final int[] chunkCV = state.output().chainingValue();
  158.                     final long totalChunks = state.chunkCounter + 1;
  159.                     addChunkCV(chunkCV, totalChunks);
  160.                     state = new ChunkState(key, totalChunks, flags);
  161.                 }

  162.                 // Compress input bytes into the current chunk state.
  163.                 final int want = CHUNK_LEN - state.length();
  164.                 final int take = Math.min(want, length);
  165.                 state.update(in, offset, take);
  166.                 offset += take;
  167.                 length -= take;
  168.             }
  169.         }

  170.         private void outputHash(final byte[] out, final int offset, final int length) {
  171.             // Starting with the Output from the current chunk, compute all the
  172.             // parent chaining values along the right edge of the tree, until we
  173.             // have the root Output.
  174.             Output output = state.output();
  175.             int parentNodesRemaining = stackLen;
  176.             while (parentNodesRemaining-- > 0) {
  177.                 final int[] parentCV = cvStack[parentNodesRemaining];
  178.                 output = parentOutput(parentCV, output.chainingValue(), key, flags);
  179.             }
  180.             output.rootOutputBytes(out, offset, length);
  181.         }

  182.         private int[] popCV() {
  183.             return cvStack[--stackLen];
  184.         }

  185.         private void pushCV(final int[] cv) {
  186.             cvStack[stackLen++] = cv;
  187.         }

  188.         private void reset() {
  189.             stackLen = 0;
  190.             Arrays.fill(cvStack, null);
  191.             state = new ChunkState(key, 0, flags);
  192.         }
  193.     }

  194.     /**
  195.      * Represents the state just prior to either producing an eight word chaining value or any number of output bytes
  196.      * when the ROOT flag is set.
  197.      */
  198.     private static final class Output {

  199.         private final int[] inputChainingValue;
  200.         private final int[] blockWords;
  201.         private final long counter;
  202.         private final int blockLength;
  203.         private final int flags;

  204.         private Output(final int[] inputChainingValue, final int[] blockWords, final long counter, final int blockLength, final int flags) {
  205.             this.inputChainingValue = inputChainingValue;
  206.             this.blockWords = blockWords;
  207.             this.counter = counter;
  208.             this.blockLength = blockLength;
  209.             this.flags = flags;
  210.         }

  211.         private int[] chainingValue() {
  212.             return Arrays.copyOf(compress(inputChainingValue, blockWords, blockLength, counter, flags), CHAINING_VALUE_INTS);
  213.         }

  214.         private void rootOutputBytes(final byte[] out, int offset, int length) {
  215.             int outputBlockCounter = 0;
  216.             while (length > 0) {
  217.                 int chunkLength = Math.min(OUT_LEN * 2, length);
  218.                 length -= chunkLength;
  219.                 final int[] words = compress(inputChainingValue, blockWords, blockLength, outputBlockCounter++, flags | ROOT);
  220.                 int wordCounter = 0;
  221.                 while (chunkLength > 0) {
  222.                     final int wordLength = Math.min(Integer.BYTES, chunkLength);
  223.                     packInt(words[wordCounter++], out, offset, wordLength);
  224.                     offset += wordLength;
  225.                     chunkLength -= wordLength;
  226.                 }
  227.             }
  228.         }
  229.     }

  230.     private static final int BLOCK_LEN = 64;
  231.     private static final int BLOCK_INTS = BLOCK_LEN / Integer.BYTES;
  232.     private static final int KEY_LEN = 32;
  233.     private static final int KEY_INTS = KEY_LEN / Integer.BYTES;
  234.     private static final int OUT_LEN = 32;
  235.     private static final int CHUNK_LEN = 1024;
  236.     private static final int CHAINING_VALUE_INTS = 8;

  237.     /**
  238.      * Standard hash key used for plain hashes; same initialization vector as Blake2s.
  239.      */
  240.     private static final int[] IV = { 0x6A09E667, 0xBB67AE85, 0x3C6EF372, 0xA54FF53A, 0x510E527F, 0x9B05688C, 0x1F83D9AB, 0x5BE0CD19 };

  241.     // domain flags
  242.     private static final int CHUNK_START = 1;
  243.     private static final int CHUNK_END = 1 << 1;
  244.     private static final int PARENT = 1 << 2;
  245.     private static final int ROOT = 1 << 3;
  246.     private static final int KEYED_HASH = 1 << 4;
  247.     private static final int DERIVE_KEY_CONTEXT = 1 << 5;
  248.     private static final int DERIVE_KEY_MATERIAL = 1 << 6;

  249.     /**
  250.      * Pre-permuted for all 7 rounds; the second row (2,6,3,...) indicates the base permutation.
  251.      */
  252.     // @formatter:off
  253.     private static final byte[][] MSG_SCHEDULE = {
  254.             { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
  255.             { 2, 6, 3, 10, 7, 0, 4, 13, 1, 11, 12, 5, 9, 14, 15, 8 },
  256.             { 3, 4, 10, 12, 13, 2, 7, 14, 6, 5, 9, 0, 11, 15, 8, 1 },
  257.             { 10, 7, 12, 9, 14, 3, 13, 15, 4, 0, 11, 2, 5, 8, 1, 6 },
  258.             { 12, 13, 9, 11, 15, 10, 14, 8, 7, 2, 5, 3, 0, 1, 6, 4 },
  259.             { 9, 14, 11, 5, 8, 12, 15, 1, 13, 3, 0, 10, 2, 6, 4, 7 },
  260.             { 11, 15, 5, 0, 1, 9, 8, 6, 14, 10, 2, 12, 3, 4, 7, 13 }
  261.     };
  262.     // @formatter:on

  263.     private static void checkBufferArgs(final byte[] buffer, final int offset, final int length) {
  264.         Objects.requireNonNull(buffer);
  265.         if (offset < 0) {
  266.             throw new IndexOutOfBoundsException("Offset must be non-negative");
  267.         }
  268.         if (length < 0) {
  269.             throw new IndexOutOfBoundsException("Length must be non-negative");
  270.         }
  271.         final int bufferLength = buffer.length;
  272.         if (offset > bufferLength - length) {
  273.             throw new IndexOutOfBoundsException("Offset " + offset + " and length " + length + " out of bounds with buffer length " + bufferLength);
  274.         }
  275.     }

  276.     private static int[] compress(final int[] chainingValue, final int[] blockWords, final int blockLength, final long counter, final int flags) {
  277.         final int[] state = Arrays.copyOf(chainingValue, BLOCK_INTS);
  278.         System.arraycopy(IV, 0, state, 8, 4);
  279.         state[12] = (int) counter;
  280.         state[13] = (int) (counter >> Integer.SIZE);
  281.         state[14] = blockLength;
  282.         state[15] = flags;
  283.         for (int i = 0; i < 7; i++) {
  284.             final byte[] schedule = MSG_SCHEDULE[i];
  285.             round(state, blockWords, schedule);
  286.         }
  287.         for (int i = 0; i < state.length / 2; i++) {
  288.             state[i] ^= state[i + 8];
  289.             state[i + 8] ^= chainingValue[i];
  290.         }
  291.         return state;
  292.     }

  293.     /**
  294.      * The mixing function, G, which mixes either a column or a diagonal.
  295.      */
  296.     private static void g(final int[] state, final int a, final int b, final int c, final int d, final int mx, final int my) {
  297.         state[a] += state[b] + mx;
  298.         state[d] = Integer.rotateRight(state[d] ^ state[a], 16);
  299.         state[c] += state[d];
  300.         state[b] = Integer.rotateRight(state[b] ^ state[c], 12);
  301.         state[a] += state[b] + my;
  302.         state[d] = Integer.rotateRight(state[d] ^ state[a], 8);
  303.         state[c] += state[d];
  304.         state[b] = Integer.rotateRight(state[b] ^ state[c], 7);
  305.     }

  306.     /**
  307.      * Calculates the Blake3 hash of the provided data.
  308.      *
  309.      * @param data source array to absorb data from
  310.      * @return 32-byte hash squeezed from the provided data
  311.      * @throws NullPointerException if data is null
  312.      */
  313.     public static byte[] hash(final byte[] data) {
  314.         return Blake3.initHash().update(data).doFinalize(OUT_LEN);
  315.     }

  316.     /**
  317.      * Constructs a fresh Blake3 hash function. The instance returned functions as an arbitrary length message digest.
  318.      *
  319.      * @return fresh Blake3 instance in hashed mode
  320.      */
  321.     public static Blake3 initHash() {
  322.         return new Blake3(IV, 0);
  323.     }

  324.     /**
  325.      * Constructs a fresh Blake3 key derivation function using the provided key derivation context byte string.
  326.      * The instance returned functions as a key-derivation function which can further absorb additional context data
  327.      * before squeezing derived key data.
  328.      *
  329.      * @param kdfContext a globally unique key-derivation context byte string to separate key derivation contexts from each other
  330.      * @return fresh Blake3 instance in key derivation mode
  331.      * @throws NullPointerException if kdfContext is null
  332.      */
  333.     public static Blake3 initKeyDerivationFunction(final byte[] kdfContext) {
  334.         Objects.requireNonNull(kdfContext);
  335.         final EngineState kdf = new EngineState(IV, DERIVE_KEY_CONTEXT);
  336.         kdf.inputData(kdfContext, 0, kdfContext.length);
  337.         final byte[] key = new byte[KEY_LEN];
  338.         kdf.outputHash(key, 0, key.length);
  339.         return new Blake3(unpackInts(key, KEY_INTS), DERIVE_KEY_MATERIAL);
  340.     }

  341.     /**
  342.      * Constructs a fresh Blake3 keyed hash function. The instance returned functions as a pseudorandom function (PRF) or as a
  343.      * message authentication code (MAC).
  344.      *
  345.      * @param key 32-byte secret key
  346.      * @return fresh Blake3 instance in keyed mode using the provided key
  347.      * @throws NullPointerException     if key is null
  348.      * @throws IllegalArgumentException if key is not 32 bytes
  349.      */
  350.     public static Blake3 initKeyedHash(final byte[] key) {
  351.         Objects.requireNonNull(key);
  352.         if (key.length != KEY_LEN) {
  353.             throw new IllegalArgumentException("Blake3 keys must be 32 bytes");
  354.         }
  355.         return new Blake3(unpackInts(key, KEY_INTS), KEYED_HASH);
  356.     }

  357.     /**
  358.      * Calculates the Blake3 keyed hash (MAC) of the provided data.
  359.      *
  360.      * @param key  32-byte secret key
  361.      * @param data source array to absorb data from
  362.      * @return 32-byte mac squeezed from the provided data
  363.      * @throws NullPointerException if key or data are null
  364.      */
  365.     public static byte[] keyedHash(final byte[] key, final byte[] data) {
  366.         return Blake3.initKeyedHash(key).update(data).doFinalize(OUT_LEN);
  367.     }

  368.     private static void packInt(final int value, final byte[] dst, final int off, final int len) {
  369.         for (int i = 0; i < len; i++) {
  370.             dst[off + i] = (byte) (value >>> i * Byte.SIZE);
  371.         }
  372.     }

  373.     private static int[] parentChainingValue(final int[] leftChildCV, final int[] rightChildCV, final int[] key, final int flags) {
  374.         return parentOutput(leftChildCV, rightChildCV, key, flags).chainingValue();
  375.     }

  376.     private static Output parentOutput(final int[] leftChildCV, final int[] rightChildCV, final int[] key, final int flags) {
  377.         final int[] blockWords = Arrays.copyOf(leftChildCV, BLOCK_INTS);
  378.         System.arraycopy(rightChildCV, 0, blockWords, 8, CHAINING_VALUE_INTS);
  379.         return new Output(key.clone(), blockWords, 0, BLOCK_LEN, flags | PARENT);
  380.     }

  381.     private static void round(final int[] state, final int[] msg, final byte[] schedule) {
  382.         // Mix the columns.
  383.         g(state, 0, 4, 8, 12, msg[schedule[0]], msg[schedule[1]]);
  384.         g(state, 1, 5, 9, 13, msg[schedule[2]], msg[schedule[3]]);
  385.         g(state, 2, 6, 10, 14, msg[schedule[4]], msg[schedule[5]]);
  386.         g(state, 3, 7, 11, 15, msg[schedule[6]], msg[schedule[7]]);

  387.         // Mix the diagonals.
  388.         g(state, 0, 5, 10, 15, msg[schedule[8]], msg[schedule[9]]);
  389.         g(state, 1, 6, 11, 12, msg[schedule[10]], msg[schedule[11]]);
  390.         g(state, 2, 7, 8, 13, msg[schedule[12]], msg[schedule[13]]);
  391.         g(state, 3, 4, 9, 14, msg[schedule[14]], msg[schedule[15]]);
  392.     }

  393.     private static int unpackInt(final byte[] buf, final int off) {
  394.         return buf[off] & 0xFF | (buf[off + 1] & 0xFF) << 8 | (buf[off + 2] & 0xFF) << 16 | (buf[off + 3] & 0xFF) << 24;
  395.     }

  396.     private static int[] unpackInts(final byte[] buf, final int nrInts) {
  397.         final int[] values = new int[nrInts];
  398.         for (int i = 0, off = 0; i < nrInts; i++, off += Integer.BYTES) {
  399.             values[i] = unpackInt(buf, off);
  400.         }
  401.         return values;
  402.     }

  403.     private final EngineState engineState;

  404.     private Blake3(final int[] key, final int flags) {
  405.         engineState = new EngineState(key, flags);
  406.     }

  407.     /**
  408.      * Finalizes hash output data that depends on the sequence of updated bytes preceding this invocation and any
  409.      * previously finalized bytes. Note that this can finalize up to 2<sup>64</sup> bytes per instance.
  410.      *
  411.      * @param out destination array to finalize bytes into
  412.      * @return {@code this} instance.
  413.      * @throws NullPointerException if out is null
  414.      */
  415.     public Blake3 doFinalize(final byte[] out) {
  416.         return doFinalize(out, 0, out.length);
  417.     }

  418.     /**
  419.      * Finalizes an arbitrary number of bytes into the provided output array that depends on the sequence of previously
  420.      * updated and finalized bytes. Note that this can finalize up to 2<sup>64</sup> bytes per instance.
  421.      *
  422.      * @param out    destination array to finalize bytes into
  423.      * @param offset where in the array to begin writing bytes to
  424.      * @param length number of bytes to finalize
  425.      * @return {@code this} instance.
  426.      * @throws NullPointerException      if out is null
  427.      * @throws IndexOutOfBoundsException if offset or length are negative or if offset + length is greater than the
  428.      *                                   length of the provided array
  429.      */
  430.     public Blake3 doFinalize(final byte[] out, final int offset, final int length) {
  431.         checkBufferArgs(out, offset, length);
  432.         engineState.outputHash(out, offset, length);
  433.         return this;
  434.     }

  435.     /**
  436.      * Squeezes and returns an arbitrary number of bytes dependent on the sequence of previously absorbed and squeezed bytes.
  437.      *
  438.      * @param nrBytes number of bytes to finalize
  439.      * @return requested number of finalized bytes
  440.      * @throws IllegalArgumentException if nrBytes is negative
  441.      */
  442.     public byte[] doFinalize(final int nrBytes) {
  443.         if (nrBytes < 0) {
  444.             throw new IllegalArgumentException("Requested bytes must be non-negative");
  445.         }
  446.         final byte[] hash = new byte[nrBytes];
  447.         doFinalize(hash);
  448.         return hash;
  449.     }

  450.     /**
  451.      * Resets this instance back to its initial state when it was first constructed.
  452.      * @return {@code this} instance.
  453.      */
  454.     public Blake3 reset() {
  455.         engineState.reset();
  456.         return this;
  457.     }

  458.     /**
  459.      * Updates this hash state using the provided bytes.
  460.      *
  461.      * @param in source array to update data from
  462.      * @return {@code this} instance.
  463.      * @throws NullPointerException if in is null
  464.      */
  465.     public Blake3 update(final byte[] in) {
  466.         return update(in, 0, in.length);
  467.     }

  468.     /**
  469.      * Updates this hash state using the provided bytes at an offset.
  470.      *
  471.      * @param in     source array to update data from
  472.      * @param offset where in the array to begin reading bytes
  473.      * @param length number of bytes to update
  474.      * @return {@code this} instance.
  475.      * @throws NullPointerException      if in is null
  476.      * @throws IndexOutOfBoundsException if offset or length are negative or if offset + length is greater than the
  477.      *                                   length of the provided array
  478.      */
  479.     public Blake3 update(final byte[] in, final int offset, final int length) {
  480.         checkBufferArgs(in, offset, length);
  481.         engineState.inputData(in, offset, length);
  482.         return this;
  483.     }

  484. }