mirror of
https://github.com/taigrr/arc
synced 2025-01-18 04:33:13 -08:00
initial import
This commit is contained in:
commit
0075ef607f
90
FORMAT.md
Normal file
90
FORMAT.md
Normal file
@ -0,0 +1,90 @@
|
||||
# arc - disk format
|
||||
|
||||
arc archives are tar archives compressed with gzip and then encrypted
|
||||
with the XChaCha20 + Poly1305 authenticated encryption mode using a
|
||||
key derived in one of three ways:
|
||||
|
||||
1. from a password using the Argon2 KDF
|
||||
2. from a static-ephemeral ECDH key exchange
|
||||
3. from a random key split into n shards
|
||||
|
||||
The on-disk format begins with a 1-byte disk format version V followed
|
||||
by a 1-byte archive type T, then a type-specific number of bytes, a
|
||||
16-byte Poly1305 authentication tag, a 24-byte cryptographically secure
|
||||
random nonce, and finally the encrypted data.
|
||||
|
||||
All numbers are stored in little-endian format.
|
||||
|
||||
## Password Archive Format
|
||||
|
||||
The 32-byte XChaCha20Poly1305 key is generated by applying the Argon2
|
||||
KDF to a user-supplied password and 32 bytes of cryptographically
|
||||
secure random salt.
|
||||
|
||||
T = 1
|
||||
I = uint32 number of iterations
|
||||
M = uint32 memory usage
|
||||
|
||||
┌─┬─┬────┬────┬───────────────────────────────┐
|
||||
│V│T│I │M │Salt │
|
||||
├─┴─┴────┴────┴─┬───────────────────────┬─────┴─────────────┐
|
||||
│Tag │Nonce │Data···············│
|
||||
├───────────────┴───────────────────────┴───────────────────┤
|
||||
│···························································│
|
||||
└───────────────────────────────────────────────────────────┘
|
||||
|
||||
## Curve448 Archive Format
|
||||
|
||||
The 32-byte XChaCha20Poly1305 key results from applying BLAKE2b to the
|
||||
shared secret derived from a X448 ECDH key exchange with an ephemeral
|
||||
private key and static public key. The corresponding ephemeral public
|
||||
key is embedded in the archive.
|
||||
|
||||
T = 2
|
||||
|
||||
┌─┬─┬───────────────────────────────────────────────────────┐
|
||||
│V│T│Ephemeral Public Key │
|
||||
├─┴─┴───────────┬───────────────────────┬───────────────────┤
|
||||
│Tag │Nonce │Data···············│
|
||||
├───────────────┴───────────────────────┴───────────────────┤
|
||||
│···························································│
|
||||
└───────────────────────────────────────────────────────────┘
|
||||
|
||||
## Shard Archive Format
|
||||
|
||||
The 32-byte XChaCha20Poly1305 key is cryptographically secure random
|
||||
bytes split into n shards using Shamir's Secret Sharing algorithm.
|
||||
One archive is generated for each n and k archives must be present to
|
||||
recreate the key and decrypt the archive.
|
||||
|
||||
T = 3
|
||||
n = shard number
|
||||
|
||||
┌─┬─┬─┬───────────────────────────────┐
|
||||
│V│T│n│Shard │
|
||||
├─┴─┴─┴─────────┬─────────────────────┴─┬───────────────────┐
|
||||
│Tag │Nonce │Data···············│
|
||||
├───────────────┴───────────────────────┴───────────────────┤
|
||||
│···························································│
|
||||
└───────────────────────────────────────────────────────────┘
|
||||
|
||||
## Curve448 Key Format
|
||||
|
||||
arc curve448 public and private keys are encrypted with XChaCha20+
|
||||
Poly1305 using a key derived from applying the Argon2 KDF to a
|
||||
password and 32 bytes of cryptographically secure random salt.
|
||||
|
||||
T = public = 1, private = 2
|
||||
I = uint32 number of iterations
|
||||
M = uint32 memory usage
|
||||
|
||||
┌─┬─┬────┬────┬───────────────────────────────┐
|
||||
│V│T│I │M │Salt │
|
||||
├─┴─┴────┴────┴─┬───────────────────────┬─────┴─────────────┐
|
||||
│Tag │Nonce │Key················│
|
||||
├───────────────┴───────────────────────┴───────────────────┤
|
||||
│···························································│
|
||||
└───────────────────────────────────────────────────────────┘
|
||||
|
||||
Private keys use a user-supplied password while public keys use an
|
||||
empty string.
|
621
LICENSE
Normal file
621
LICENSE
Normal file
@ -0,0 +1,621 @@
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
268
NOTICE
Normal file
268
NOTICE
Normal file
@ -0,0 +1,268 @@
|
||||
arc - Copyright (C) 2016 - Will Glozer
|
||||
|
||||
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
|
||||
┃ NOTICE: chacha20, poly1305 ┃
|
||||
┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
|
||||
|
||||
To the extent possible under law, Yawning Angel has waived all copyright
|
||||
and related or neighboring rights to chacha20, using the Creative
|
||||
Commons "CC0" public domain dedication. See LICENSE or
|
||||
<http://creativecommons.org/publicdomain/zero/1.0/> for full details.
|
||||
|
||||
To the extent possible under law, Yawning Angel waived all copyright
|
||||
and related or neighboring rights to poly1305, using the creative
|
||||
commons "CC0" public domain dedication. See LICENSE or
|
||||
<http://creativecommons.org/publicdomain/zero/1.0/> for full details.
|
||||
|
||||
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
|
||||
┃ NOTICE: x448 ┃
|
||||
┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
|
||||
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2011 Stanford University.
|
||||
Copyright (c) 2014-2015 Cryptography Research, Inc.
|
||||
Copyright (c) 2015 Yawning Angel.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
|
||||
┃ NOTICE: blake2b ┃
|
||||
┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
|
||||
|
||||
Written in 2012 by Dmitry Chestnykh.
|
||||
|
||||
To the extent possible under law, the author have dedicated all copyright
|
||||
and related and neighboring rights to this software to the public domain
|
||||
worldwide. This software is distributed without any warranty.
|
||||
http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
|
||||
┃ NOTICE: sss ┃
|
||||
┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
|
||||
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Coda Hale
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
|
||||
┃ NOTICE: argon2 ┃
|
||||
┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
|
||||
|
||||
Copyright © 2015 Andrew Ekstedt
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
|
||||
┃ NOTICE: compress ┃
|
||||
┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
|
||||
|
||||
Copyright (c) 2012 The Go Authors. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
|
||||
┃ NOTICE: cpuid ┃
|
||||
┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
|
||||
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015 Klaus Post
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
|
||||
┃ NOTICE: crc32 ┃
|
||||
┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
|
||||
|
||||
Copyright (c) 2012 The Go Authors. All rights reserved.
|
||||
Copyright (c) 2015 Klaus Post
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
|
||||
┃ NOTICE: go-flags ┃
|
||||
┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
|
||||
|
||||
Copyright (c) 2012 Jesse van den Kieboom. All rights reserved.
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
|
||||
┃ NOTICE: golang.org/x/crypto/ssh/terminal ┃
|
||||
┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
|
||||
|
||||
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
|
||||
┃ END OF NOTICE ┃
|
||||
┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
|
144
README.md
Normal file
144
README.md
Normal file
@ -0,0 +1,144 @@
|
||||
# arc - secure file archiver
|
||||
|
||||
arc is a file archiver designed to manage secure and stable archives
|
||||
suitable for storage and transmission. arc archives are standard tar
|
||||
archives compressed with gzip and then encrypted with the XChaCha20+
|
||||
Poly1305 authenticated encryption mode.
|
||||
|
||||
arc is distributed as open source code and static executables with
|
||||
no external dependencies.
|
||||
|
||||
## Security
|
||||
|
||||
arc archives are designed for secure storage and transmission and
|
||||
must not allow decryption or tampering by an attacker without the
|
||||
appropriate encryption key.
|
||||
|
||||
However, arc has not been subject to peer review and the specific
|
||||
algorithm combinations used have not been standardized.
|
||||
|
||||
arc archives are encrypted with XChaCha20 + Poly1305 using the
|
||||
same algorithm as NaCl's secretbox but substituting XChaCha20 for
|
||||
XSalsa20. This algorithm is similar to the ChaCha20 + Poly1305
|
||||
AEAD mode defined in RFC 7539 but uses a longer random nonce and
|
||||
does not include lengths in the authentication tag computation.
|
||||
|
||||
The XChaCha20 + Poly1305 key is derived in one of three ways:
|
||||
|
||||
1. from a password using the Argon2 KDF
|
||||
2. from a static-ephemeral ECDH key exchange
|
||||
3. from a random key split into n shards
|
||||
|
||||
See the Archive sections below for details of each.
|
||||
|
||||
## Stability
|
||||
|
||||
arc archives are designed for long-term storage and their content
|
||||
should be extractable using hardware and software that does not
|
||||
exist at the time the archive was created.
|
||||
|
||||
A decrypted archive is a standard gzip-compressed tar archive for
|
||||
which there exist a wide variety of open source tools & libraries
|
||||
capable of reading and extracting their contents. Should that fail
|
||||
the tar and gzip formats are well documented and reasonably simple
|
||||
to implement.
|
||||
|
||||
Decryption is more difficult due to rapid advances in the state of
|
||||
the art and arc's desire for strong security. However portable open
|
||||
source C implementations of each algorithm are available, and the
|
||||
implementations arc uses are written in Go, a language designed for
|
||||
long-term backwards compatibility.
|
||||
|
||||
See the Compatibility section which follows for important caveats
|
||||
and read FORMAT for the specific disk format arc uses as a header
|
||||
for the encrypted tar+gzip stream.
|
||||
|
||||
## Compatibility
|
||||
|
||||
arc releases follow the semantic versioning scheme and the major
|
||||
version will be incremented when the on-disk format changes.
|
||||
|
||||
Each release of arc will support a single version of the on-disk
|
||||
format and any security flaws will cause a new release with the
|
||||
version incremented and support for the flawed method dropped.
|
||||
|
||||
This means future versions of arc may not be capable of extracting
|
||||
old archives so copies of arc in binary and/or source form should
|
||||
be kept alongside the archives themselves.
|
||||
|
||||
## Password Archives
|
||||
|
||||
A password, cost parameters, and cryptographically secure random salt
|
||||
are used as input to the Argon2 password hashing function to derive
|
||||
the encryption key used to encrypt & decrypt the archive.
|
||||
|
||||
## Curve448 Archives
|
||||
|
||||
A Curve448 key pair is generated via arc's --keygen option.
|
||||
|
||||
Encryption uses the public key and an ephemeral private key as input
|
||||
to the X448 ECDH key exchange function and the resulting shared secret
|
||||
is hashed with BLAKE2b to derive the encryption key. The corresponding
|
||||
ephemeral public key is embedded in the archive and used with the
|
||||
static private key to decrypt the archive.
|
||||
|
||||
This method is suitable for transmitting archives to another party or
|
||||
for use on a system that may become compromised after the archive is
|
||||
created.
|
||||
|
||||
## Shard Archives
|
||||
|
||||
The encryption key is cryptographically secure random bytes that are
|
||||
split into n shards using Shamir's Secret Sharing algorithm. One
|
||||
archive is generated for each shard and k must be present to recreate
|
||||
the key and decrypt the archive.
|
||||
|
||||
This method is most suitable for small archives that will be stored
|
||||
or transmitted via multiple channels where k - 1 can be compromised
|
||||
with no loss in archive security.
|
||||
|
||||
## License
|
||||
|
||||
Copyright (C) 2016 Will Glozer.
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
## Acknowledgments
|
||||
|
||||
arc contains code from a number of open source projects including
|
||||
Yawning Angel's chacha20, poly1305, and x448 libraries, Dmitry
|
||||
Chestnykh's blake2b, Coda Hale's sss, Andrw Ekstedt's argon2, Klaus
|
||||
Post's optimized compression packages and Jesse van den Kieboom's
|
||||
go-flags. See NOTICE for license details.
|
||||
|
||||
## Cryptography Notice
|
||||
|
||||
This distribution includes cryptographic software. The country in
|
||||
which you currently reside may have restrictions on the import,
|
||||
possession, use, and/or re-export to another country, of encryption
|
||||
software. BEFORE using any encryption software, please check your
|
||||
country's laws, regulations and policies concerning the import,
|
||||
possession, or use, and re-export of encryption software, to see if
|
||||
this is permitted. See <http://www.wassenaar.org/> for more
|
||||
information.
|
||||
|
||||
The U.S. Government Department of Commerce, Bureau of Industry and
|
||||
Security (BIS), has classified this software as Export Commodity
|
||||
Control Number (ECCN) 5D002.C.1, which includes information security
|
||||
software using or performing cryptographic functions with asymmetric
|
||||
algorithms. The form and manner of this distribution makes it
|
||||
eligible for export under the License Exception ENC Technology
|
||||
Software Unrestricted (TSU) exception (see the BIS Export
|
||||
Administration Regulations, Section 740.13) for both object code and
|
||||
source code.
|
379
archive.go
Normal file
379
archive.go
Normal file
@ -0,0 +1,379 @@
|
||||
// Copyright (C) 2016 - Will Glozer. All rights reserved.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"crypto/rand"
|
||||
"errors"
|
||||
"io"
|
||||
|
||||
"github.com/codahale/sss"
|
||||
"github.com/magical/argon2"
|
||||
"github.com/wg/arc/archive"
|
||||
"github.com/wg/arc/binary"
|
||||
)
|
||||
|
||||
const (
|
||||
Version = 0x01
|
||||
Password = 0x01
|
||||
Curve448 = 0x02
|
||||
Shard = 0x03
|
||||
KeySize = archive.KeySize
|
||||
)
|
||||
|
||||
type Archiver interface {
|
||||
Reader() (*Reader, error)
|
||||
Writer() (*Writer, error)
|
||||
}
|
||||
|
||||
type Reader struct {
|
||||
buffer *bufio.Reader
|
||||
files []File
|
||||
*archive.Reader
|
||||
}
|
||||
|
||||
type Writer struct {
|
||||
buffer *bufio.Writer
|
||||
files []File
|
||||
tagAt int64
|
||||
*archive.Writer
|
||||
}
|
||||
|
||||
var (
|
||||
ErrInvalidArchive = errors.New("archive: verify failed")
|
||||
ErrInvalidVersion = errors.New("archive: unsupported version")
|
||||
ErrPasswordArchive = errors.New("archive: password archive")
|
||||
ErrCurve448Archive = errors.New("archive: curve448 archive")
|
||||
ErrShardArchive = errors.New("archive: shard archive")
|
||||
)
|
||||
|
||||
// A PasswordArchive is encrypted with a key derived from a password,
|
||||
// cost parameters, and cryptographically secure random salt using the
|
||||
// Argon2 password hashing function.
|
||||
type PasswordArchive struct {
|
||||
Version byte
|
||||
Type byte
|
||||
Iterations uint32
|
||||
Memory uint32
|
||||
Salt [32]byte
|
||||
Password []byte
|
||||
File File
|
||||
}
|
||||
|
||||
func NewPasswordArchive(password []byte, iterations, memory uint32, file File) *PasswordArchive {
|
||||
return &PasswordArchive{
|
||||
Version: Version,
|
||||
Type: Password,
|
||||
Iterations: iterations,
|
||||
Memory: memory,
|
||||
Password: password,
|
||||
File: file,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *PasswordArchive) Reader() (*Reader, error) {
|
||||
err := binary.Read(a.File, binary.LE, a)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch {
|
||||
case a.Version != Version:
|
||||
return nil, ErrInvalidVersion
|
||||
case a.Type == Curve448:
|
||||
return nil, ErrCurve448Archive
|
||||
case a.Type == Shard:
|
||||
return nil, ErrShardArchive
|
||||
}
|
||||
|
||||
key, err := a.Key()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return newArchiveReader(key, a.File, a.File)
|
||||
}
|
||||
|
||||
func (a *PasswordArchive) Writer() (*Writer, error) {
|
||||
_, err := rand.Read(a.Salt[:])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = binary.Write(a.File, binary.LE, a)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
key, err := a.Key()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return newArchiveWriter(key, a.File, a.File)
|
||||
}
|
||||
|
||||
func (a *PasswordArchive) Key() ([]byte, error) {
|
||||
var (
|
||||
password = a.Password
|
||||
salt = a.Salt[:]
|
||||
iterations = int(a.Iterations)
|
||||
memory = int64(a.Memory)
|
||||
)
|
||||
return argon2.Key(password, salt, iterations, 1, memory, KeySize)
|
||||
}
|
||||
|
||||
// A Curve448Archive is encrypted with a key derived from applying
|
||||
// BLAKE2b to the shared secret derived from an X448 ECDH key exchange
|
||||
// with an ephemeral private key and static public key.
|
||||
type Curve448Archive struct {
|
||||
Version byte
|
||||
Type byte
|
||||
Ephemeral PublicKey
|
||||
PublicKey *PublicKey
|
||||
PrivateKey *PrivateKey
|
||||
File File
|
||||
}
|
||||
|
||||
func NewCurve448Archive(public *PublicKey, private *PrivateKey, file File) *Curve448Archive {
|
||||
return &Curve448Archive{
|
||||
Version: Version,
|
||||
Type: Curve448,
|
||||
PublicKey: public,
|
||||
PrivateKey: private,
|
||||
File: file,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *Curve448Archive) Reader() (*Reader, error) {
|
||||
err := binary.Read(a.File, binary.LE, a)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch {
|
||||
case a.Version != Version:
|
||||
return nil, ErrInvalidVersion
|
||||
case a.Type == Password:
|
||||
return nil, ErrPasswordArchive
|
||||
case a.Type == Shard:
|
||||
return nil, ErrShardArchive
|
||||
}
|
||||
|
||||
key, err := ComputeSharedKey(&a.Ephemeral, a.PrivateKey, KeySize)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return newArchiveReader(key, a.File, a.File)
|
||||
}
|
||||
|
||||
func (a *Curve448Archive) Writer() (*Writer, error) {
|
||||
ephemeralPublicKey, ephemeralPrivateKey, err := GenerateKeypair()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer ephemeralPrivateKey.Zero()
|
||||
|
||||
key, err := ComputeSharedKey(a.PublicKey, ephemeralPrivateKey, KeySize)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
a.Ephemeral = *ephemeralPublicKey
|
||||
err = binary.Write(a.File, binary.LE, a)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return newArchiveWriter(key, a.File, a.File)
|
||||
}
|
||||
|
||||
// A ShardArchive is encrypted with a key consisting of cryptographically
|
||||
// secure random bytes. That key is split into n shards using Shamir's
|
||||
// Secret Sharing algorithm and one archive is generate for each shard.
|
||||
// k shards must be present to recreate the key.
|
||||
type ShardArchive struct {
|
||||
Version byte
|
||||
Type byte
|
||||
ID byte
|
||||
Share [KeySize]byte
|
||||
Threshold int
|
||||
File File
|
||||
Shards []*ShardArchive
|
||||
}
|
||||
|
||||
func NewShardArchive(threshold int, files []File) *ShardArchive {
|
||||
shards := make([]*ShardArchive, len(files))
|
||||
|
||||
for i, file := range files {
|
||||
shards[i] = &ShardArchive{
|
||||
Version: Version,
|
||||
Type: Shard,
|
||||
Threshold: threshold,
|
||||
File: file,
|
||||
Shards: shards,
|
||||
}
|
||||
}
|
||||
|
||||
return shards[0]
|
||||
}
|
||||
|
||||
func (a *ShardArchive) Reader() (*Reader, error) {
|
||||
shares := make(map[byte][]byte, len(a.Shards))
|
||||
|
||||
for _, shard := range a.Shards {
|
||||
err := binary.Read(shard.File, binary.LE, shard)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch {
|
||||
case shard.Version != Version:
|
||||
return nil, ErrInvalidVersion
|
||||
case shard.Type == Password:
|
||||
return nil, ErrPasswordArchive
|
||||
case shard.Type == Curve448:
|
||||
return nil, ErrCurve448Archive
|
||||
}
|
||||
|
||||
shares[shard.ID] = shard.Share[:]
|
||||
}
|
||||
|
||||
key := sss.Combine(shares)
|
||||
|
||||
return newArchiveReader(key, a.File, a.Files()...)
|
||||
}
|
||||
|
||||
func (a *ShardArchive) Writer() (*Writer, error) {
|
||||
var key [32]byte
|
||||
|
||||
_, err := rand.Read(key[:])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
n := byte(len(a.Shards))
|
||||
k := byte(a.Threshold)
|
||||
|
||||
shares, err := sss.Split(n, k, key[:])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
writers := make([]io.Writer, len(a.Shards))
|
||||
for id, share := range shares {
|
||||
index := id - 1
|
||||
shard := a.Shards[index]
|
||||
|
||||
shard.ID = id
|
||||
copy(shard.Share[:], share)
|
||||
|
||||
err = binary.Write(shard.File, binary.LE, shard)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
writers[index] = shard.File
|
||||
}
|
||||
w := io.MultiWriter(writers...)
|
||||
|
||||
return newArchiveWriter(key[:], w, a.Files()...)
|
||||
}
|
||||
|
||||
func (a *ShardArchive) Files() []File {
|
||||
files := make([]File, len(a.Shards))
|
||||
for i, shard := range a.Shards {
|
||||
files[i] = shard.File
|
||||
}
|
||||
return files
|
||||
}
|
||||
|
||||
func newArchiveReader(key []byte, raw io.Reader, files ...File) (*Reader, error) {
|
||||
switch valid, err := verify(key, files[0]); {
|
||||
case err != nil:
|
||||
return nil, err
|
||||
case !valid:
|
||||
return nil, ErrInvalidArchive
|
||||
}
|
||||
|
||||
buffer := bufio.NewReader(raw)
|
||||
r, err := archive.NewReader(buffer, key)
|
||||
|
||||
return &Reader{
|
||||
Reader: r,
|
||||
buffer: buffer,
|
||||
files: files,
|
||||
}, err
|
||||
}
|
||||
|
||||
func newArchiveWriter(key []byte, raw io.Writer, files ...File) (*Writer, error) {
|
||||
tagAt, err := files[0].Seek(0, 1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
buffer := bufio.NewWriter(raw)
|
||||
w, err := archive.NewWriter(buffer, key)
|
||||
|
||||
return &Writer{
|
||||
Writer: w,
|
||||
buffer: buffer,
|
||||
files: files,
|
||||
tagAt: tagAt,
|
||||
}, err
|
||||
}
|
||||
|
||||
func verify(key []byte, file File) (bool, error) {
|
||||
p, err := file.Seek(0, 1)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
defer file.Seek(p, 0)
|
||||
buffer := bufio.NewReader(file)
|
||||
return archive.Verify(buffer, key)
|
||||
}
|
||||
|
||||
func (r *Reader) Close() error {
|
||||
for _, f := range r.files {
|
||||
err := f.Close()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (w *Writer) Close() error {
|
||||
tag, err := w.Finish()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = w.buffer.Flush()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, f := range w.files {
|
||||
_, err := f.WriteAt(tag, w.tagAt)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = f.Close()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type File interface {
|
||||
io.Reader
|
||||
io.Writer
|
||||
io.WriterAt
|
||||
io.Seeker
|
||||
io.Closer
|
||||
}
|
76
archive/archive.go
Normal file
76
archive/archive.go
Normal file
@ -0,0 +1,76 @@
|
||||
// Copyright (C) 2016 - Will Glozer. All rights reserved.
|
||||
|
||||
package archive
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"crypto/subtle"
|
||||
"io"
|
||||
|
||||
"github.com/wg/ecies/xchacha20poly1305"
|
||||
)
|
||||
|
||||
const KeySize = xchacha20poly1305.KeySize
|
||||
|
||||
type Archive struct {
|
||||
xchacha20poly1305.XChaCha20Poly1305
|
||||
tag [xchacha20poly1305.TagSize]byte
|
||||
io.Reader
|
||||
io.Writer
|
||||
}
|
||||
|
||||
func NewArchiveFromReader(r io.Reader, key []byte) (*Archive, error) {
|
||||
var nonce [xchacha20poly1305.NonceSize]byte
|
||||
a := &Archive{Reader: r}
|
||||
|
||||
if _, err := io.ReadFull(r, a.tag[:]); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if _, err := io.ReadFull(r, nonce[:]); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err := a.Init(key, nonce[:])
|
||||
return a, err
|
||||
}
|
||||
|
||||
func NewArchiveForWriter(w io.Writer, key []byte) (*Archive, error) {
|
||||
var nonce [xchacha20poly1305.NonceSize]byte
|
||||
a := &Archive{Writer: w}
|
||||
|
||||
if _, err := rand.Read(nonce[:]); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := a.Init(key, nonce[:]); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if _, err := w.Write(a.tag[:]); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if _, err := w.Write(nonce[:]); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return a, nil
|
||||
}
|
||||
|
||||
func (a *Archive) Read(b []byte) (int, error) {
|
||||
n, err := a.Reader.Read(b)
|
||||
a.Decrypt(b[:n], b[:n])
|
||||
return n, err
|
||||
}
|
||||
|
||||
func (a *Archive) Write(b []byte) (int, error) {
|
||||
a.Encrypt(b, b)
|
||||
return a.Writer.Write(b)
|
||||
}
|
||||
|
||||
func (a *Archive) Verify() bool {
|
||||
var tag [xchacha20poly1305.TagSize]byte
|
||||
a.Tag(tag[:0])
|
||||
return subtle.ConstantTimeCompare(a.tag[:], tag[:]) == 1
|
||||
}
|
170
archive/archive_test.go
Normal file
170
archive/archive_test.go
Normal file
@ -0,0 +1,170 @@
|
||||
// Copyright (C) 2016 - Will Glozer. All rights reserved.
|
||||
|
||||
package archive
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"bytes"
|
||||
"crypto/rand"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCreateArchive(t *testing.T) {
|
||||
entries := []*tar.Header{
|
||||
{Name: "foo", Size: 0},
|
||||
{Name: "bar", Size: 1<<16 - 1},
|
||||
{Name: "baz", Size: 64},
|
||||
}
|
||||
key := randomKey()
|
||||
|
||||
buf, dat, err := createArchive(key, entries)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
r, err := NewReader(buf, key)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
for i, e := range entries {
|
||||
switch next, err := r.Next(); {
|
||||
case err != nil:
|
||||
t.Fatal(err)
|
||||
case e.Name != next.Name:
|
||||
t.Fatalf("expected entry name %s got %s", e.Name, next.Name)
|
||||
case e.Size != next.Size:
|
||||
t.Fatalf("expected entry size %d got %d", e.Size, next.Size)
|
||||
}
|
||||
|
||||
switch b, err := ioutil.ReadAll(r); {
|
||||
case err != nil:
|
||||
t.Fatal(err)
|
||||
case int(e.Size) != len(b):
|
||||
t.Fatalf("expected to read %d bytes got %d", e.Size, len(b))
|
||||
case !bytes.Equal(b, dat[i]):
|
||||
t.Fatalf("expected content '%v' got '%v'", b, dat[i])
|
||||
}
|
||||
}
|
||||
|
||||
if !r.Verify() {
|
||||
t.Fatal("archive verify failed")
|
||||
}
|
||||
}
|
||||
|
||||
func TestVerifyArchive(t *testing.T) {
|
||||
entries := []*tar.Header{
|
||||
{Name: "foo", Size: 32},
|
||||
{Name: "bar", Size: 64},
|
||||
}
|
||||
key := randomKey()
|
||||
|
||||
buf, _, err := createArchive(key, entries)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if valid, _ := Verify(buf, key); !valid {
|
||||
t.Fatal("archive verify failed")
|
||||
}
|
||||
}
|
||||
|
||||
func TestVerifyFailWrongKey(t *testing.T) {
|
||||
entries := []*tar.Header{
|
||||
{Name: "foo", Size: 32},
|
||||
{Name: "bar", Size: 64},
|
||||
}
|
||||
key := randomKey()
|
||||
|
||||
buf, _, err := createArchive(key, entries)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
key[0] = ^key[0]
|
||||
|
||||
if valid, _ := Verify(buf, key); valid {
|
||||
t.Fatal("verified invalid archive")
|
||||
}
|
||||
}
|
||||
|
||||
func TestVerifyFailByteFlip(t *testing.T) {
|
||||
entries := []*tar.Header{
|
||||
{Name: "foo", Size: 32},
|
||||
{Name: "bar", Size: 64},
|
||||
}
|
||||
key := randomKey()
|
||||
|
||||
buf, _, err := createArchive(key, entries)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
archive := buf.Bytes()
|
||||
for i, b := range archive {
|
||||
archive[i] = ^archive[i]
|
||||
|
||||
r := bytes.NewReader(archive)
|
||||
if valid, _ := Verify(r, key); valid {
|
||||
t.Fatal("verified invalid archive at", i)
|
||||
}
|
||||
|
||||
archive[i] = b
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriterInvariants(t *testing.T) {
|
||||
_, _, err := createArchive(make([]byte, 31), nil)
|
||||
if err == nil {
|
||||
t.Fatalf("created archive with 31 byte key")
|
||||
}
|
||||
}
|
||||
|
||||
func createArchive(key []byte, entries []*tar.Header) (*Buffer, [][]byte, error) {
|
||||
buf := &Buffer{}
|
||||
|
||||
arc, err := NewWriter(buf, key)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
dat := make([][]byte, len(entries))
|
||||
|
||||
for i, e := range entries {
|
||||
err := arc.Add(e)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
dat[i] = make([]byte, e.Size)
|
||||
_, err = rand.Read(dat[i])
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
err = arc.Copy(bytes.NewReader(dat[i]), e.Size)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
}
|
||||
|
||||
tag, _ := arc.Finish()
|
||||
copy(buf.Bytes()[0:16], tag)
|
||||
|
||||
return buf, dat, nil
|
||||
}
|
||||
|
||||
type Buffer struct {
|
||||
bytes.Buffer
|
||||
}
|
||||
|
||||
func randomKey() []byte {
|
||||
key := make([]byte, 32)
|
||||
_, err := io.ReadFull(rand.Reader, key)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return key
|
||||
}
|
47
archive/reader.go
Normal file
47
archive/reader.go
Normal file
@ -0,0 +1,47 @@
|
||||
// Copyright (C) 2016 - Will Glozer. All rights reserved.
|
||||
|
||||
package archive
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"compress/gzip"
|
||||
"io"
|
||||
)
|
||||
|
||||
type Reader struct {
|
||||
archiver *tar.Reader
|
||||
compressor *gzip.Reader
|
||||
archive *Archive
|
||||
}
|
||||
|
||||
func NewReader(r io.Reader, key []byte) (*Reader, error) {
|
||||
archive, err := NewArchiveFromReader(r, key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
compressor, err := gzip.NewReader(archive)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
archiver := tar.NewReader(compressor)
|
||||
|
||||
return &Reader{
|
||||
archiver: archiver,
|
||||
compressor: compressor,
|
||||
archive: archive,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *Reader) Next() (*tar.Header, error) {
|
||||
return r.archiver.Next()
|
||||
}
|
||||
|
||||
func (r *Reader) Read(b []byte) (int, error) {
|
||||
return r.archiver.Read(b)
|
||||
}
|
||||
|
||||
func (r *Reader) Verify() bool {
|
||||
return r.archive.Verify()
|
||||
}
|
22
archive/verify.go
Normal file
22
archive/verify.go
Normal file
@ -0,0 +1,22 @@
|
||||
// Copyright (C) 2016 - Will Glozer. All rights reserved.
|
||||
|
||||
package archive
|
||||
|
||||
import (
|
||||
"io"
|
||||
"io/ioutil"
|
||||
)
|
||||
|
||||
func Verify(r io.Reader, key []byte) (bool, error) {
|
||||
archive, err := NewArchiveFromReader(r, key)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
_, err = io.Copy(ioutil.Discard, archive)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return archive.Verify(), nil
|
||||
}
|
63
archive/writer.go
Normal file
63
archive/writer.go
Normal file
@ -0,0 +1,63 @@
|
||||
// Copyright (C) 2016 - Will Glozer. All rights reserved.
|
||||
|
||||
package archive
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"errors"
|
||||
"io"
|
||||
|
||||
"github.com/klauspost/compress/gzip"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrShortCopy = errors.New("archive: short copy")
|
||||
)
|
||||
|
||||
type Writer struct {
|
||||
archiver *tar.Writer
|
||||
compressor *gzip.Writer
|
||||
archive *Archive
|
||||
}
|
||||
|
||||
func NewWriter(w io.Writer, key []byte) (*Writer, error) {
|
||||
archive, err := NewArchiveForWriter(w, key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
compressor := gzip.NewWriter(archive)
|
||||
archiver := tar.NewWriter(compressor)
|
||||
|
||||
return &Writer{
|
||||
archiver: archiver,
|
||||
compressor: compressor,
|
||||
archive: archive,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (w *Writer) Add(header *tar.Header) error {
|
||||
return w.archiver.WriteHeader(header)
|
||||
}
|
||||
|
||||
func (w *Writer) Copy(r io.Reader, size int64) error {
|
||||
switch n, err := io.Copy(w.archiver, r); {
|
||||
case err != nil:
|
||||
return err
|
||||
case n < size:
|
||||
return ErrShortCopy
|
||||
}
|
||||
return w.archiver.Flush()
|
||||
}
|
||||
|
||||
func (w *Writer) Finish() ([]byte, error) {
|
||||
if err := w.archiver.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := w.compressor.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return w.archive.Tag(nil), nil
|
||||
}
|
398
archive_test.go
Normal file
398
archive_test.go
Normal file
@ -0,0 +1,398 @@
|
||||
// Copyright (C) 2016 - Will Glozer. All rights reserved.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"bytes"
|
||||
"crypto/rand"
|
||||
"encoding/binary"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
|
||||
"github.com/codahale/sss"
|
||||
"github.com/magical/argon2"
|
||||
"github.com/wg/arc/archive"
|
||||
)
|
||||
|
||||
var entries = []*tar.Header{
|
||||
{Name: "foo", Size: 0},
|
||||
{Name: "bar", Size: 1<<16 - 1},
|
||||
{Name: "baz", Size: 64},
|
||||
}
|
||||
|
||||
func TestPasswordArchive(t *testing.T) {
|
||||
arc := NewPasswordArchive([]byte("secret"), 1, 8, &Buffer{})
|
||||
dat := createArchive(t, arc)
|
||||
verifyArchive(t, arc, dat)
|
||||
}
|
||||
|
||||
func TestPasswordArchiveKey(t *testing.T) {
|
||||
var (
|
||||
password = []byte("secret")
|
||||
iterations = 1
|
||||
memory = 8
|
||||
)
|
||||
|
||||
buf := &Buffer{}
|
||||
arc := NewPasswordArchive(password, uint32(iterations), uint32(memory), buf)
|
||||
createArchive(t, arc)
|
||||
|
||||
buf.Rewind()
|
||||
buf.Seek(2+4+4+32, 0)
|
||||
|
||||
key, err := argon2.Key(password, arc.Salt[:], int(iterations), 1, int64(memory), KeySize)
|
||||
if err != nil {
|
||||
t.Fatal("password key derivation failed", err)
|
||||
}
|
||||
|
||||
if valid, err := archive.Verify(buf, key); !valid || err != nil {
|
||||
t.Fatal("password archive key incorrect")
|
||||
}
|
||||
}
|
||||
|
||||
func TestPasswordArchiveFormat(t *testing.T) {
|
||||
buf := &Buffer{}
|
||||
arc := NewPasswordArchive([]byte("secret"), 2, 16, buf)
|
||||
createArchive(t, arc)
|
||||
|
||||
if binary.LittleEndian.Uint32(buf.buffer[2:6]) != arc.Iterations {
|
||||
t.Fatal("serialized iterations incorrect")
|
||||
}
|
||||
|
||||
if binary.LittleEndian.Uint32(buf.buffer[6:10]) != arc.Memory {
|
||||
t.Fatal("serialized memory incorrect")
|
||||
}
|
||||
|
||||
if !bytes.Equal(buf.buffer[10:42], arc.Salt[:]) {
|
||||
t.Fatal("serialized salt incorrect")
|
||||
}
|
||||
}
|
||||
|
||||
func TestWrongPassword(t *testing.T) {
|
||||
arc := NewPasswordArchive([]byte("secret"), 1, 8, &Buffer{})
|
||||
createArchive(t, arc)
|
||||
arc.Password = []byte("terces")
|
||||
ensureInvalid(t, arc)
|
||||
}
|
||||
|
||||
func TestCurve448Archive(t *testing.T) {
|
||||
public, private := keypair(t)
|
||||
arc := NewCurve448Archive(public, private, &Buffer{})
|
||||
dat := createArchive(t, arc)
|
||||
verifyArchive(t, arc, dat)
|
||||
}
|
||||
|
||||
func TestCurve448ArchiveKey(t *testing.T) {
|
||||
public, private := keypair(t)
|
||||
buf := &Buffer{}
|
||||
arc := NewCurve448Archive(public, nil, buf)
|
||||
createArchive(t, arc)
|
||||
|
||||
buf.Rewind()
|
||||
buf.Seek(2+56, 0)
|
||||
|
||||
key, err := ComputeSharedKey(&arc.Ephemeral, private, KeySize)
|
||||
if err != nil {
|
||||
t.Fatal("curve448 key derivation failed", err)
|
||||
}
|
||||
|
||||
if valid, err := archive.Verify(buf, key); !valid || err != nil {
|
||||
t.Fatal("curve448 archive key incorrect")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCurve448ArchiveFormat(t *testing.T) {
|
||||
public, private := keypair(t)
|
||||
buf := &Buffer{}
|
||||
arc := NewCurve448Archive(public, private, buf)
|
||||
createArchive(t, arc)
|
||||
|
||||
if !bytes.Equal(buf.buffer[2:58], arc.Ephemeral[:]) {
|
||||
t.Fatal("serialized ephemeral public key incorrect")
|
||||
}
|
||||
}
|
||||
|
||||
func TestWrongPrivateKey(t *testing.T) {
|
||||
public, _ := keypair(t)
|
||||
_, private := keypair(t)
|
||||
arc := NewCurve448Archive(public, private, &Buffer{})
|
||||
createArchive(t, arc)
|
||||
ensureInvalid(t, arc)
|
||||
}
|
||||
|
||||
func TestShardArchive(t *testing.T) {
|
||||
arc := NewShardArchive(2, buffers(3))
|
||||
dat := createArchive(t, arc)
|
||||
verifyArchive(t, arc, dat)
|
||||
}
|
||||
|
||||
func TestShardArchiveKey(t *testing.T) {
|
||||
arc := NewShardArchive(2, buffers(3))
|
||||
createArchive(t, arc)
|
||||
|
||||
shares := map[byte][]byte{}
|
||||
for _, shard := range arc.Shards {
|
||||
shares[shard.ID] = shard.Share[:]
|
||||
}
|
||||
key := sss.Combine(shares)
|
||||
|
||||
for _, shard := range arc.Shards {
|
||||
buf := shard.File.(*Buffer)
|
||||
buf.Rewind()
|
||||
buf.Seek(2+1+KeySize, 0)
|
||||
|
||||
if valid, err := archive.Verify(buf, key); !valid || err != nil {
|
||||
t.Fatal("shard archive key incorrect")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestShardArchiveFormat(t *testing.T) {
|
||||
arc := NewShardArchive(2, buffers(3))
|
||||
createArchive(t, arc)
|
||||
|
||||
for _, shard := range arc.Shards {
|
||||
buf := shard.File.(*Buffer)
|
||||
|
||||
if buf.buffer[2] != shard.ID {
|
||||
t.Fatal("serialized shard ID incorrect")
|
||||
}
|
||||
|
||||
if !bytes.Equal(buf.buffer[3:3+KeySize], shard.Share[:]) {
|
||||
t.Fatal("serialized shard share incorrect")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestMissingShard(t *testing.T) {
|
||||
arc := NewShardArchive(2, buffers(2))
|
||||
createArchive(t, arc)
|
||||
arc.Shards = arc.Shards[:1]
|
||||
ensureInvalid(t, arc)
|
||||
}
|
||||
|
||||
func TestArchiveHeader(t *testing.T) {
|
||||
public, private := keypair(t)
|
||||
var (
|
||||
password = NewPasswordArchive([]byte("secret"), 1, 8, &Buffer{})
|
||||
curve448 = NewCurve448Archive(public, private, &Buffer{})
|
||||
shard = NewShardArchive(2, buffers(2))
|
||||
)
|
||||
|
||||
createArchive(t, password)
|
||||
createArchive(t, curve448)
|
||||
createArchive(t, shard)
|
||||
|
||||
switch {
|
||||
case password.File.(*Buffer).buffer[0] != Version:
|
||||
t.Fatal("wrong version in password archive")
|
||||
case password.File.(*Buffer).buffer[1] != Password:
|
||||
t.Fatal("wrong type in password archive")
|
||||
case curve448.File.(*Buffer).buffer[0] != Version:
|
||||
t.Fatal("wrong version in curve448 archive")
|
||||
case curve448.File.(*Buffer).buffer[1] != Curve448:
|
||||
t.Fatal("wrong type in curve448 archive")
|
||||
}
|
||||
|
||||
for _, s := range shard.Shards {
|
||||
switch {
|
||||
case s.File.(*Buffer).buffer[0] != Version:
|
||||
t.Fatal("wrong version in shard archive")
|
||||
case s.File.(*Buffer).buffer[1] != Shard:
|
||||
t.Fatal("wrong type in shard archive")
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestWrongArchiveType(t *testing.T) {
|
||||
public, private := keypair(t)
|
||||
var (
|
||||
password = NewPasswordArchive([]byte("secret"), 1, 8, &Buffer{})
|
||||
curve448 = NewCurve448Archive(public, private, &Buffer{})
|
||||
shard = NewShardArchive(2, buffers(2))
|
||||
)
|
||||
|
||||
createArchive(t, password)
|
||||
createArchive(t, curve448)
|
||||
createArchive(t, shard)
|
||||
|
||||
ensureInvalidType(t, NewPasswordArchive([]byte("secret"), 1, 8, curve448.File))
|
||||
ensureInvalidType(t, NewPasswordArchive([]byte("secret"), 1, 8, shard.Shards[0].File))
|
||||
ensureInvalidType(t, NewCurve448Archive(public, private, password.File))
|
||||
ensureInvalidType(t, NewCurve448Archive(public, private, shard.Shards[0].File))
|
||||
ensureInvalidType(t, NewShardArchive(2, []File{password.File}))
|
||||
ensureInvalidType(t, NewShardArchive(2, []File{curve448.File}))
|
||||
}
|
||||
|
||||
func createArchive(t *testing.T, a Archiver) [][]byte {
|
||||
dat := make([][]byte, len(entries))
|
||||
|
||||
writer, err := a.Writer()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
for i, e := range entries {
|
||||
err := writer.Add(e)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
dat[i] = make([]byte, e.Size)
|
||||
_, err = rand.Read(dat[i])
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
err = writer.Copy(bytes.NewReader(dat[i]), e.Size)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
writer.Close()
|
||||
|
||||
switch a := a.(type) {
|
||||
case *PasswordArchive:
|
||||
a.File.(*Buffer).Rewind()
|
||||
case *Curve448Archive:
|
||||
a.File.(*Buffer).Rewind()
|
||||
case *ShardArchive:
|
||||
for _, s := range a.Shards {
|
||||
s.File.(*Buffer).Rewind()
|
||||
}
|
||||
}
|
||||
|
||||
return dat
|
||||
}
|
||||
|
||||
func verifyArchive(t *testing.T, a Archiver, dat [][]byte) {
|
||||
reader, err := a.Reader()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
for i, e := range entries {
|
||||
switch next, err := reader.Next(); {
|
||||
case err != nil:
|
||||
t.Fatal(err)
|
||||
case e.Name != next.Name:
|
||||
t.Fatalf("expected entry name %s got %s", e.Name, next.Name)
|
||||
case e.Size != next.Size:
|
||||
t.Fatalf("expected entry size %d got %d", e.Size, next.Size)
|
||||
}
|
||||
|
||||
switch b, err := ioutil.ReadAll(reader); {
|
||||
case err != nil:
|
||||
t.Fatal(err)
|
||||
case int(e.Size) != len(b):
|
||||
t.Fatalf("expected to read %d bytes got %d", e.Size, len(b))
|
||||
case !bytes.Equal(b, dat[i]):
|
||||
t.Fatalf("expected content '%v' got '%v'", b, dat[i])
|
||||
}
|
||||
}
|
||||
|
||||
if !reader.Verify() {
|
||||
t.Fatalf("archive verify failed")
|
||||
}
|
||||
}
|
||||
|
||||
func ensureInvalid(t *testing.T, a Archiver) {
|
||||
switch _, err := a.Reader(); {
|
||||
case err != nil && err != ErrInvalidArchive:
|
||||
t.Fatal("error validating archive", err)
|
||||
case err == nil:
|
||||
t.Fatal("invalid archive verified")
|
||||
}
|
||||
}
|
||||
|
||||
func ensureInvalidType(t *testing.T, a Archiver) {
|
||||
switch _, err := a.Reader(); {
|
||||
case err == ErrPasswordArchive:
|
||||
case err == ErrCurve448Archive:
|
||||
case err == ErrShardArchive:
|
||||
case err != nil:
|
||||
t.Fatal("error checking archive type", err)
|
||||
case err == nil:
|
||||
t.Fatal("invalid archive type accepted")
|
||||
}
|
||||
|
||||
switch a := a.(type) {
|
||||
case *PasswordArchive:
|
||||
a.File.(*Buffer).Rewind()
|
||||
case *Curve448Archive:
|
||||
a.File.(*Buffer).Rewind()
|
||||
case *ShardArchive:
|
||||
for _, s := range a.Shards {
|
||||
s.File.(*Buffer).Rewind()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func keypair(t *testing.T) (*PublicKey, *PrivateKey) {
|
||||
public, private, err := GenerateKeypair()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
return public, private
|
||||
}
|
||||
|
||||
func buffers(n int) []File {
|
||||
files := make([]File, n)
|
||||
for i := range files {
|
||||
files[i] = &Buffer{}
|
||||
}
|
||||
return files
|
||||
}
|
||||
|
||||
type Buffer struct {
|
||||
buffer []byte
|
||||
offset int
|
||||
}
|
||||
|
||||
func (b *Buffer) Read(p []byte) (int, error) {
|
||||
s := b.buffer[b.offset:]
|
||||
if len(s) == 0 {
|
||||
return 0, io.EOF
|
||||
}
|
||||
n := copy(p, s)
|
||||
b.offset += n
|
||||
return n, nil
|
||||
}
|
||||
|
||||
func (b *Buffer) Write(p []byte) (int, error) {
|
||||
n := len(p)
|
||||
b.buffer = append(b.buffer, p...)
|
||||
b.offset += n
|
||||
return n, nil
|
||||
}
|
||||
|
||||
func (b *Buffer) WriteAt(p []byte, off int64) (int, error) {
|
||||
n := len(p)
|
||||
m := int(off)
|
||||
copy(b.buffer[m:m+n], p)
|
||||
return n, nil
|
||||
}
|
||||
|
||||
func (b *Buffer) Seek(offset int64, whence int) (int64, error) {
|
||||
switch whence {
|
||||
case 0:
|
||||
b.offset = int(offset)
|
||||
case 1:
|
||||
b.offset += int(offset)
|
||||
case 2:
|
||||
b.offset = len(b.buffer) - int(offset)
|
||||
}
|
||||
return int64(b.offset), nil
|
||||
}
|
||||
|
||||
func (b *Buffer) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *Buffer) Rewind() {
|
||||
b.offset = 0
|
||||
}
|
296
args.go
Normal file
296
args.go
Normal file
@ -0,0 +1,296 @@
|
||||
// Copyright (C) 2016 - Will Glozer. All rights reserved.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/jessevdk/go-flags"
|
||||
|
||||
"golang.org/x/crypto/ssh/terminal"
|
||||
)
|
||||
|
||||
type Args struct {
|
||||
OperationMode `group:"Archive Operation Mode"`
|
||||
OperationModifier `group:"Archive Operation Modifiers"`
|
||||
KeyManagementMode `group:"Key Management Mode"`
|
||||
SecurityOptions `group:"Archive Security Options"`
|
||||
PasswordOptions `group:"Password Options"`
|
||||
KeyManagementOptions `group:"Key Generation Options"`
|
||||
MiscOpts `group:"Misc Options"`
|
||||
Positional `positional-args:"true" required:"0"`
|
||||
}
|
||||
|
||||
type OperationMode struct {
|
||||
Create bool `short:"c" long:"create" description:"create new archive"`
|
||||
List bool `short:"t" long:"list" description:"list archive contents"`
|
||||
Extract bool `short:"x" long:"extract" description:"extract from archive"`
|
||||
}
|
||||
|
||||
type OperationModifier struct {
|
||||
File string `short:"f" long:"file" description:"archive file"`
|
||||
Shards []string ` long:"shard" description:"archive shard"`
|
||||
}
|
||||
|
||||
type SecurityOptions struct {
|
||||
Password bool `long:"password" description:"derive key from password"`
|
||||
Key string `long:"key" description:"derive key from ECDH exchange"`
|
||||
Threshold int `long:"threshold" description:"random key with SSS threshold"`
|
||||
}
|
||||
|
||||
type KeyManagementMode struct {
|
||||
Keygen bool `long:"keygen" description:"generate key pair"`
|
||||
}
|
||||
|
||||
type KeyManagementOptions struct {
|
||||
Private string `long:"private" description:"private key file"`
|
||||
Public string `long:"public" description:"public key file"`
|
||||
}
|
||||
|
||||
type PasswordOptions struct {
|
||||
Iterations uint32 `long:"iterations" description:"argon2 iterations"`
|
||||
Memory uint32 `long:"memory" description:"argon2 memory use"`
|
||||
}
|
||||
|
||||
type MiscOpts struct {
|
||||
Help bool `short:"h" long:"help" description:"show this help message"`
|
||||
Verbose []bool `short:"v" long:"verbose" description:"generate verbose output"`
|
||||
}
|
||||
|
||||
type Positional struct {
|
||||
Names []string `positional-arg-name:"names"`
|
||||
}
|
||||
|
||||
func NewCommand() (*Cmd, error) {
|
||||
args, err := ParseArgs(os.Args[1:]...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
c := &Cmd{
|
||||
Verbose: len(args.Verbose),
|
||||
Names: args.Names,
|
||||
}
|
||||
|
||||
var mode int
|
||||
switch {
|
||||
case args.Create:
|
||||
c.Op = c.Create
|
||||
mode = os.O_EXCL | os.O_CREATE | os.O_WRONLY
|
||||
case args.List:
|
||||
c.Op = c.List
|
||||
mode = os.O_RDONLY
|
||||
case args.Extract:
|
||||
c.Op = c.Extract
|
||||
mode = os.O_RDONLY
|
||||
case args.Keygen:
|
||||
c.Op = c.Keygen
|
||||
}
|
||||
|
||||
switch {
|
||||
case args.Password:
|
||||
c.Archiver, err = args.PreparePasswordArchive(mode)
|
||||
case args.Key != "":
|
||||
c.Archiver, err = args.PrepareCurve448Archive(mode)
|
||||
case len(args.Shards) > 0:
|
||||
c.Archiver, err = args.PrepareShardArchive(mode)
|
||||
case args.Keygen:
|
||||
c.Public, c.Private, err = args.PrepareKeygen()
|
||||
}
|
||||
|
||||
return c, err
|
||||
}
|
||||
|
||||
func ParseArgs(arg ...string) (*Args, error) {
|
||||
args := &Args{
|
||||
PasswordOptions: PasswordOptions{
|
||||
Iterations: 3,
|
||||
Memory: 16,
|
||||
},
|
||||
}
|
||||
|
||||
parser := flags.NewParser(args, flags.PassDoubleDash)
|
||||
parser.Usage = "[OPTIONS]"
|
||||
|
||||
if _, err := parser.Parse(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if args.Help {
|
||||
b := bytes.Buffer{}
|
||||
parser.WriteHelp(&b)
|
||||
return nil, errors.New(b.String())
|
||||
}
|
||||
|
||||
err := args.Validate()
|
||||
return args, err
|
||||
}
|
||||
|
||||
func (a *Args) Validate() error {
|
||||
switch {
|
||||
case !a.Create && !a.List && !a.Extract && !a.Keygen:
|
||||
return fmt.Errorf("must specify one of -c, -t, -x, --keygen")
|
||||
|
||||
case a.Create && (a.List || a.Extract || a.Keygen):
|
||||
return fmt.Errorf("can't combine -c, --create with other operations")
|
||||
case a.List && (a.Create || a.Extract || a.Keygen):
|
||||
return fmt.Errorf("can't combine -t, --list with other operations")
|
||||
case a.Extract && (a.Create || a.List || a.Keygen):
|
||||
return fmt.Errorf("can't combine -x, --extract with other operations")
|
||||
case a.Keygen && (a.Create || a.Extract || a.List):
|
||||
return fmt.Errorf("can't combine --keygen with other operations")
|
||||
|
||||
case a.Create && !a.Password && a.Key == "" && len(a.Shards) == 0:
|
||||
return fmt.Errorf("create requires --password, --key, or --shard")
|
||||
case a.List && !a.Password && a.Key == "" && len(a.Shards) == 0:
|
||||
return fmt.Errorf("list requires --password, --key, or --shard")
|
||||
case a.Extract && !a.Password && a.Key == "" && len(a.Shards) == 0:
|
||||
return fmt.Errorf("extract requires --password, --key, or --shard")
|
||||
|
||||
case a.Password && a.Key != "":
|
||||
return fmt.Errorf("can't combine --password with --key")
|
||||
case a.Password && len(a.Shards) > 0:
|
||||
return fmt.Errorf("can't combine --password with --shard")
|
||||
case a.Key != "" && len(a.Shards) > 0:
|
||||
return fmt.Errorf("can't combine --key with --shard")
|
||||
|
||||
case len(a.Shards) > 255:
|
||||
return fmt.Errorf("can't use more than 255 shards")
|
||||
case a.Create && len(a.Shards) > 0 && len(a.Shards) < 2:
|
||||
return fmt.Errorf("can't use less than 2 shards")
|
||||
case a.Create && len(a.Shards) > 0 && a.Threshold <= 1:
|
||||
return fmt.Errorf("--threshold must be > 1")
|
||||
case a.Create && len(a.Shards) > 0 && a.Threshold > len(a.Shards):
|
||||
return fmt.Errorf("--threshold must be <= %d", len(a.Shards))
|
||||
|
||||
case !a.Keygen && (a.Password || a.Key != "") && a.File == "":
|
||||
return fmt.Errorf("must provide -f, --file")
|
||||
case !a.Keygen && !a.Password && a.Key == "" && a.File == "" && len(a.Shards) == 0:
|
||||
return fmt.Errorf("must provide -f, --file or --shard")
|
||||
case !a.Keygen && a.File != "" && len(a.Shards) > 0:
|
||||
return fmt.Errorf("can't combine -f, --file and --shard")
|
||||
|
||||
case a.Keygen && (a.Public == "" || a.Private == ""):
|
||||
return fmt.Errorf("keygen requires --public and --private")
|
||||
|
||||
case a.Create && len(a.Names) == 0:
|
||||
return fmt.Errorf("no files or directories specified")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *Args) PreparePasswordArchive(mode int) (Archiver, error) {
|
||||
file, err := os.OpenFile(a.File, mode, 0600)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
password, err := ReadPassword()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return NewPasswordArchive(password, a.Iterations, a.Memory, file), nil
|
||||
}
|
||||
|
||||
func (a *Args) PrepareCurve448Archive(mode int) (Archiver, error) {
|
||||
var publicKey PublicKey
|
||||
var privateKey PrivateKey
|
||||
var err error
|
||||
|
||||
if mode&os.O_CREATE == os.O_CREATE {
|
||||
err = a.LoadPublicKey(&publicKey)
|
||||
} else {
|
||||
err = a.LoadPrivateKey(&privateKey)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("file %s: %s", a.Key, err)
|
||||
}
|
||||
|
||||
file, err := os.OpenFile(a.File, mode, 0600)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return NewCurve448Archive(&publicKey, &privateKey, file), nil
|
||||
}
|
||||
|
||||
func (a *Args) PrepareShardArchive(mode int) (Archiver, error) {
|
||||
files := make([]File, len(a.Shards))
|
||||
for i, path := range a.Shards {
|
||||
file, err := os.OpenFile(path, mode, 0600)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
files[i] = file
|
||||
}
|
||||
return NewShardArchive(a.Threshold, files), nil
|
||||
}
|
||||
|
||||
func (a *Args) PrepareKeygen() (public *KeyContainer, private *KeyContainer, err error) {
|
||||
mode := os.O_EXCL | os.O_CREATE | os.O_WRONLY
|
||||
|
||||
public, err = a.OpenPublicKeyContainer(a.Public, mode)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("can't create public key: %s", err)
|
||||
}
|
||||
|
||||
private, err = a.OpenPrivateKeyContainer(a.Private, mode)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("can't create private key: %s", err)
|
||||
}
|
||||
|
||||
return public, private, err
|
||||
}
|
||||
|
||||
func (a *Args) LoadPublicKey(key *PublicKey) error {
|
||||
c, err := a.OpenPublicKeyContainer(a.Key, os.O_RDONLY)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer c.Close()
|
||||
return c.ReadPublicKey(key)
|
||||
}
|
||||
|
||||
func (a *Args) LoadPrivateKey(key *PrivateKey) error {
|
||||
c, err := a.OpenPrivateKeyContainer(a.Key, os.O_RDONLY)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer c.Close()
|
||||
return c.ReadPrivateKey(key)
|
||||
}
|
||||
|
||||
func (a *Args) OpenPublicKeyContainer(path string, mode int) (*KeyContainer, error) {
|
||||
file, err := os.OpenFile(path, mode, 0600)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return NewKeyContainer(file, []byte(""), 1, 8), nil
|
||||
}
|
||||
|
||||
func (a *Args) OpenPrivateKeyContainer(path string, mode int) (*KeyContainer, error) {
|
||||
file, err := os.OpenFile(path, mode, 0600)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
password, err := ReadPassword()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return NewKeyContainer(file, password, a.Iterations, a.Memory), nil
|
||||
}
|
||||
|
||||
func ReadPassword() ([]byte, error) {
|
||||
fmt.Print("password: ")
|
||||
b, err := terminal.ReadPassword(int(os.Stdin.Fd()))
|
||||
fmt.Print("\n")
|
||||
return b, err
|
||||
}
|
140
binary/binary.go
Normal file
140
binary/binary.go
Normal file
@ -0,0 +1,140 @@
|
||||
// Copyright (C) 2016 - Will Glozer. All rights reserved.
|
||||
|
||||
package binary
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"io"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
type ByteOrder binary.ByteOrder
|
||||
|
||||
var (
|
||||
BE ByteOrder = binary.BigEndian
|
||||
LE ByteOrder = binary.LittleEndian
|
||||
)
|
||||
|
||||
func Write(w io.Writer, order ByteOrder, data interface{}) error {
|
||||
v := reflect.Indirect(reflect.ValueOf(data))
|
||||
t := v.Type()
|
||||
|
||||
out := make([]byte, size(v, t))
|
||||
buf := out
|
||||
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
v := v.Field(i)
|
||||
t := v.Type()
|
||||
|
||||
if skip(v, t) {
|
||||
continue
|
||||
}
|
||||
|
||||
switch t.Kind() {
|
||||
case reflect.Int8:
|
||||
buf[0] = byte(v.Int())
|
||||
case reflect.Uint8:
|
||||
buf[0] = byte(v.Uint())
|
||||
case reflect.Int16:
|
||||
order.PutUint16(buf, uint16(v.Int()))
|
||||
case reflect.Uint16:
|
||||
order.PutUint16(buf, uint16(v.Uint()))
|
||||
case reflect.Int32:
|
||||
order.PutUint32(buf, uint32(v.Int()))
|
||||
case reflect.Uint32:
|
||||
order.PutUint32(buf, uint32(v.Uint()))
|
||||
case reflect.Int64:
|
||||
order.PutUint64(buf, uint64(v.Int()))
|
||||
case reflect.Uint64:
|
||||
order.PutUint64(buf, uint64(v.Uint()))
|
||||
case reflect.Array:
|
||||
copy(buf, v.Slice(0, v.Len()).Bytes())
|
||||
}
|
||||
|
||||
buf = buf[t.Size():]
|
||||
}
|
||||
|
||||
_, err := w.Write(out)
|
||||
return err
|
||||
}
|
||||
|
||||
func Read(r io.Reader, order ByteOrder, data interface{}) error {
|
||||
v := reflect.Indirect(reflect.ValueOf(data))
|
||||
t := v.Type()
|
||||
|
||||
buf := make([]byte, size(v, t))
|
||||
|
||||
_, err := io.ReadFull(r, buf)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
v := v.Field(i)
|
||||
t := v.Type()
|
||||
|
||||
if skip(v, t) {
|
||||
continue
|
||||
}
|
||||
|
||||
switch t.Kind() {
|
||||
case reflect.Int8:
|
||||
v.SetInt(int64(buf[0]))
|
||||
case reflect.Uint8:
|
||||
v.SetUint(uint64(buf[0]))
|
||||
case reflect.Int16:
|
||||
v.SetInt(int64(order.Uint16(buf)))
|
||||
case reflect.Uint16:
|
||||
v.SetUint(uint64(order.Uint16(buf)))
|
||||
case reflect.Int32:
|
||||
v.SetInt(int64(order.Uint32(buf)))
|
||||
case reflect.Uint32:
|
||||
v.SetUint(uint64(order.Uint32(buf)))
|
||||
case reflect.Int64:
|
||||
v.SetInt(int64(order.Uint64(buf)))
|
||||
case reflect.Uint64:
|
||||
v.SetUint(uint64(order.Uint64(buf)))
|
||||
case reflect.Array:
|
||||
reflect.Copy(v, reflect.ValueOf(buf))
|
||||
}
|
||||
|
||||
buf = buf[t.Size():]
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func size(v reflect.Value, t reflect.Type) uintptr {
|
||||
size := uintptr(0)
|
||||
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
v := v.Field(i)
|
||||
t := v.Type()
|
||||
if !skip(v, t) {
|
||||
size += t.Size()
|
||||
}
|
||||
}
|
||||
|
||||
return size
|
||||
}
|
||||
|
||||
func skip(v reflect.Value, t reflect.Type) bool {
|
||||
if !v.CanSet() {
|
||||
return true
|
||||
}
|
||||
|
||||
switch t.Kind() {
|
||||
case reflect.Int8, reflect.Uint8:
|
||||
return false
|
||||
case reflect.Int16, reflect.Uint16:
|
||||
return false
|
||||
case reflect.Int32, reflect.Uint32:
|
||||
return false
|
||||
case reflect.Int64, reflect.Uint64:
|
||||
return false
|
||||
case reflect.Array:
|
||||
return t.Elem().Size() != 1
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
168
binary/binary_test.go
Normal file
168
binary/binary_test.go
Normal file
@ -0,0 +1,168 @@
|
||||
// Copyright (C) 2016 - Will Glozer. All rights reserved.
|
||||
|
||||
package binary
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"math"
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestBinaryArray(t *testing.T) {
|
||||
type Values struct {
|
||||
Byte byte
|
||||
Array [3]byte
|
||||
Int64 int64
|
||||
}
|
||||
|
||||
in := &Values{1, [3]byte{2, 3, 4}, 0xAC00BD00}
|
||||
|
||||
buf := &bytes.Buffer{}
|
||||
out := &Values{}
|
||||
|
||||
if err := Write(buf, binary.LittleEndian, in); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err := Read(buf, binary.LittleEndian, out); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(in, out) {
|
||||
t.Fatalf("round trip serialization failed")
|
||||
}
|
||||
}
|
||||
|
||||
func TestBinaryMinMax(t *testing.T) {
|
||||
type Values struct {
|
||||
MinInt8 int8
|
||||
MaxInt8 int8
|
||||
MaxUint8 uint8
|
||||
MinInt16 int16
|
||||
MaxInt16 int16
|
||||
MaxUint16 uint16
|
||||
MinInt32 int32
|
||||
MaxInt32 int32
|
||||
MaxUint32 uint32
|
||||
MinInt64 int64
|
||||
MaxInt64 int64
|
||||
MaxUint64 uint64
|
||||
}
|
||||
|
||||
in := &Values{
|
||||
MinInt8: math.MinInt8,
|
||||
MaxInt8: math.MaxInt8,
|
||||
MaxUint8: 1<<8 - 1,
|
||||
MinInt16: math.MinInt16,
|
||||
MaxInt16: math.MaxInt16,
|
||||
MaxUint16: 1<<16 - 1,
|
||||
MinInt32: math.MinInt32,
|
||||
MaxInt32: math.MaxInt32,
|
||||
MaxUint32: 1<<32 - 1,
|
||||
MinInt64: math.MinInt64,
|
||||
MaxInt64: math.MaxInt64,
|
||||
MaxUint64: 1<<64 - 1,
|
||||
}
|
||||
|
||||
out := &Values{}
|
||||
buf := &bytes.Buffer{}
|
||||
|
||||
if err := Write(buf, binary.LittleEndian, in); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err := Read(buf, binary.LittleEndian, out); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(in, out) {
|
||||
t.Fatalf("round trip serialization failed")
|
||||
}
|
||||
}
|
||||
|
||||
func TestBinaryByteOrder(t *testing.T) {
|
||||
type Values struct {
|
||||
Uint16 uint16
|
||||
Uint32 uint32
|
||||
Uint64 uint64
|
||||
}
|
||||
|
||||
in := &Values{
|
||||
Uint16: 0x1234,
|
||||
Uint32: 0x12345678,
|
||||
Uint64: 0x1234567890ABCDEF,
|
||||
}
|
||||
|
||||
little := []byte{0xEF, 0xCD, 0xAB, 0x90, 0x78, 0x56, 0x34, 0x12}
|
||||
|
||||
buf := &bytes.Buffer{}
|
||||
|
||||
if err := Write(buf, binary.LittleEndian, in); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if !bytes.Equal(buf.Bytes()[0:2], little[6:8]) {
|
||||
t.Fatalf("uint16 little endian incorrect")
|
||||
}
|
||||
|
||||
if !bytes.Equal(buf.Bytes()[2:6], little[4:8]) {
|
||||
t.Fatalf("uint32 little endian incorrect")
|
||||
}
|
||||
|
||||
if !bytes.Equal(buf.Bytes()[6:14], little[0:8]) {
|
||||
t.Fatalf("uint64 little endian incorrect")
|
||||
}
|
||||
buf.Reset()
|
||||
|
||||
big := []byte{0x12, 0x34, 0x56, 0x78, 0x90, 0xAB, 0xCD, 0xEF}
|
||||
|
||||
if err := Write(buf, binary.BigEndian, in); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if !bytes.Equal(buf.Bytes()[0:2], big[0:2]) {
|
||||
t.Fatalf("uint16 big endian incorrect")
|
||||
}
|
||||
|
||||
if !bytes.Equal(buf.Bytes()[2:6], big[0:4]) {
|
||||
t.Fatalf("uint32 big endian incorrect")
|
||||
}
|
||||
|
||||
if !bytes.Equal(buf.Bytes()[6:14], big[0:8]) {
|
||||
t.Fatalf("uint64 big endian incorrect")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestBinarySkip(t *testing.T) {
|
||||
type Values struct {
|
||||
A byte
|
||||
B string
|
||||
C int32
|
||||
D []byte
|
||||
e byte
|
||||
}
|
||||
|
||||
in := &Values{1, "foo", 0xABCDEF, []byte("bar"), 2}
|
||||
|
||||
out := &Values{}
|
||||
buf := &bytes.Buffer{}
|
||||
|
||||
if err := Write(buf, binary.LittleEndian, in); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err := Read(buf, binary.LittleEndian, out); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
in.B = ""
|
||||
in.D = nil
|
||||
in.e = 0
|
||||
|
||||
if !reflect.DeepEqual(in, out) {
|
||||
t.Fatal("round trip serialization failed")
|
||||
}
|
||||
}
|
43
bytesize.go
Normal file
43
bytesize.go
Normal file
@ -0,0 +1,43 @@
|
||||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package main
|
||||
|
||||
import "fmt"
|
||||
|
||||
type ByteSize float64
|
||||
|
||||
const (
|
||||
_ = iota // ignore first value by assigning to blank identifier
|
||||
KB ByteSize = 1 << (10 * iota)
|
||||
MB
|
||||
GB
|
||||
TB
|
||||
PB
|
||||
EB
|
||||
ZB
|
||||
YB
|
||||
)
|
||||
|
||||
func (b ByteSize) String() string {
|
||||
switch {
|
||||
case b >= YB:
|
||||
return fmt.Sprintf("%.2fY", b/YB)
|
||||
case b >= ZB:
|
||||
return fmt.Sprintf("%.2fZ", b/ZB)
|
||||
case b >= EB:
|
||||
return fmt.Sprintf("%.2fE", b/EB)
|
||||
case b >= PB:
|
||||
return fmt.Sprintf("%.2fP", b/PB)
|
||||
case b >= TB:
|
||||
return fmt.Sprintf("%.2fT", b/TB)
|
||||
case b >= GB:
|
||||
return fmt.Sprintf("%.2fG", b/GB)
|
||||
case b >= MB:
|
||||
return fmt.Sprintf("%.2fM", b/MB)
|
||||
case b >= KB:
|
||||
return fmt.Sprintf("%.2fK", b/KB)
|
||||
}
|
||||
return fmt.Sprintf("%.2fB", b)
|
||||
}
|
123
create.go
Normal file
123
create.go
Normal file
@ -0,0 +1,123 @@
|
||||
// Copyright (C) 2016 - Will Glozer. All rights reserved.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/wg/arc/archive"
|
||||
)
|
||||
|
||||
func (c *Cmd) Create(arc *archive.Writer, names ...string) error {
|
||||
headers, errors := Scan(names)
|
||||
for header := range headers {
|
||||
name := header.Name
|
||||
size := header.Size
|
||||
|
||||
err := arc.Add(header)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if header.Typeflag == tar.TypeReg {
|
||||
r, err := os.Open(name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = arc.Copy(r, size)
|
||||
r.Close()
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if c.Verbose > 0 {
|
||||
fmt.Println("a", name)
|
||||
}
|
||||
}
|
||||
|
||||
select {
|
||||
case err := <-errors:
|
||||
return err
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func Scan(names []string) (<-chan *tar.Header, <-chan error) {
|
||||
headers := make(chan *tar.Header, 64)
|
||||
errors := make(chan error)
|
||||
|
||||
go func() {
|
||||
err := scan(names, headers)
|
||||
close(headers)
|
||||
if err != nil {
|
||||
errors <- err
|
||||
}
|
||||
}()
|
||||
|
||||
return headers, errors
|
||||
}
|
||||
|
||||
func scan(names []string, headers chan<- *tar.Header) error {
|
||||
for _, name := range names {
|
||||
info, err := os.Lstat(name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
mode := info.Mode()
|
||||
link := ""
|
||||
|
||||
if !mode.IsRegular() && mode&(os.ModeDir|os.ModeSymlink) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
if mode&os.ModeSymlink != 0 {
|
||||
link, err = os.Readlink(name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
header, err := tar.FileInfoHeader(info, link)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
header.Name = name
|
||||
headers <- header
|
||||
|
||||
if mode.IsDir() {
|
||||
dir, err := os.Open(name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for err == nil {
|
||||
names, err = dir.Readdirnames(64)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
|
||||
for i, n := range names {
|
||||
names[i] = filepath.Join(name, n)
|
||||
}
|
||||
|
||||
err = scan(names, headers)
|
||||
}
|
||||
dir.Close()
|
||||
|
||||
if err != nil && err != io.EOF {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
90
extract.go
Normal file
90
extract.go
Normal file
@ -0,0 +1,90 @@
|
||||
// Copyright (C) 2016 - Will Glozer. All rights reserved.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/wg/arc/archive"
|
||||
)
|
||||
|
||||
func (c *Cmd) Extract(arc *RegexFilter) error {
|
||||
mtimes := map[string]time.Time{}
|
||||
|
||||
for arc.Next() {
|
||||
h := arc.Header
|
||||
|
||||
name := h.Name
|
||||
mode := os.FileMode(h.Mode)
|
||||
|
||||
var err error
|
||||
switch h.Typeflag {
|
||||
case tar.TypeReg, tar.TypeRegA:
|
||||
err = extract(name, mode, h.Size, arc)
|
||||
case tar.TypeDir:
|
||||
err = os.Mkdir(name, mode)
|
||||
case tar.TypeSymlink:
|
||||
err = os.Symlink(h.Linkname, name)
|
||||
}
|
||||
|
||||
var action string
|
||||
switch {
|
||||
case os.IsExist(err):
|
||||
action = "-"
|
||||
case err != nil:
|
||||
return err
|
||||
default:
|
||||
action = "x"
|
||||
}
|
||||
|
||||
if c.Verbose > 0 {
|
||||
fmt.Println(action, name)
|
||||
}
|
||||
|
||||
mtimes[name] = h.ModTime
|
||||
}
|
||||
|
||||
switch {
|
||||
case arc.Error != nil:
|
||||
return arc.Error
|
||||
case !arc.Verify():
|
||||
return ErrVerifyFailed
|
||||
}
|
||||
|
||||
ctime := time.Now()
|
||||
for name, mtime := range mtimes {
|
||||
err := os.Chtimes(name, ctime, mtime)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func extract(path string, mode os.FileMode, size int64, r io.Reader) error {
|
||||
err := os.MkdirAll(filepath.Dir(path), 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
f, err := os.OpenFile(path, os.O_EXCL|os.O_CREATE|os.O_WRONLY, mode)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
switch n, err := io.Copy(f, r); {
|
||||
case err != nil:
|
||||
return err
|
||||
case n < size:
|
||||
return archive.ErrShortCopy
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
42
filter.go
Normal file
42
filter.go
Normal file
@ -0,0 +1,42 @@
|
||||
// Copyright (C) 2016 - Will Glozer. All rights reserved.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"io"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/wg/arc/archive"
|
||||
)
|
||||
|
||||
type RegexFilter struct {
|
||||
Header *tar.Header
|
||||
Error error
|
||||
regex *regexp.Regexp
|
||||
*archive.Reader
|
||||
}
|
||||
|
||||
func NewRegexFilter(r *archive.Reader, paths ...string) (*RegexFilter, error) {
|
||||
regex, err := regexp.Compile(strings.Join(paths, "|"))
|
||||
return &RegexFilter{
|
||||
regex: regex,
|
||||
Reader: r,
|
||||
}, err
|
||||
}
|
||||
|
||||
func (f *RegexFilter) Next() bool {
|
||||
for {
|
||||
switch header, err := f.Reader.Next(); {
|
||||
case err == io.EOF:
|
||||
return false
|
||||
case err != nil:
|
||||
f.Error = err
|
||||
return false
|
||||
case f.regex.MatchString(header.Name):
|
||||
f.Header = header
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
170
key.go
Normal file
170
key.go
Normal file
@ -0,0 +1,170 @@
|
||||
// Copyright (C) 2016 - Will Glozer. All rights reserved.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"crypto/subtle"
|
||||
"errors"
|
||||
"io"
|
||||
|
||||
"github.com/dchest/blake2b"
|
||||
"github.com/magical/argon2"
|
||||
"github.com/wg/arc/binary"
|
||||
"github.com/wg/ecies"
|
||||
"github.com/wg/ecies/xchacha20poly1305"
|
||||
)
|
||||
|
||||
const (
|
||||
Public = 0x01
|
||||
Private = 0x02
|
||||
NonSize = xchacha20poly1305.NonceSize
|
||||
TagSize = xchacha20poly1305.TagSize
|
||||
)
|
||||
|
||||
type (
|
||||
PublicKey [56]byte
|
||||
PrivateKey [56]byte
|
||||
)
|
||||
|
||||
type KeyContainer struct {
|
||||
Version byte
|
||||
Type byte
|
||||
Iterations uint32
|
||||
Memory uint32
|
||||
Salt [32]byte
|
||||
Tag [TagSize]byte
|
||||
Nonce [NonSize]byte
|
||||
Key [56]byte
|
||||
Password []byte
|
||||
File io.ReadWriteCloser
|
||||
}
|
||||
|
||||
var (
|
||||
ErrInvalidPublicKey = errors.New("invalid public key")
|
||||
ErrInvalidPrivateKey = errors.New("invalid private key")
|
||||
)
|
||||
|
||||
func GenerateKeypair() (*PublicKey, *PrivateKey, error) {
|
||||
var public, private [56]byte
|
||||
err := ecies.GenerateCurve448Key(rand.Reader, &public, &private)
|
||||
return (*PublicKey)(&public), (*PrivateKey)(&private), err
|
||||
}
|
||||
|
||||
func ComputeSharedKey(public *PublicKey, private *PrivateKey, size uint8) ([]byte, error) {
|
||||
hash, err := blake2b.New(&blake2b.Config{Size: size})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var secret [56]byte
|
||||
err = ecies.X448(&secret, (*[56]byte)(public), (*[56]byte)(private))
|
||||
hash.Write(secret[:])
|
||||
|
||||
for i := range secret {
|
||||
secret[i] = 0
|
||||
}
|
||||
|
||||
return hash.Sum(nil), err
|
||||
}
|
||||
|
||||
func (private *PrivateKey) Zero() {
|
||||
for i := range private {
|
||||
private[i] = 0
|
||||
}
|
||||
}
|
||||
|
||||
func NewKeyContainer(file io.ReadWriteCloser, password []byte, iterations, memory uint32) *KeyContainer {
|
||||
return &KeyContainer{
|
||||
Version: 1,
|
||||
Iterations: iterations,
|
||||
Memory: memory,
|
||||
Password: password,
|
||||
File: file,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *KeyContainer) ReadPublicKey(key *PublicKey) error {
|
||||
return c.read(Public, (*[56]byte)(key))
|
||||
}
|
||||
|
||||
func (c *KeyContainer) WritePublicKey(key *PublicKey) error {
|
||||
return c.write(Public, (*[56]byte)(key))
|
||||
}
|
||||
|
||||
func (c *KeyContainer) ReadPrivateKey(key *PrivateKey) error {
|
||||
return c.read(Private, (*[56]byte)(key))
|
||||
}
|
||||
|
||||
func (c *KeyContainer) WritePrivateKey(key *PrivateKey) error {
|
||||
return c.write(Private, (*[56]byte)(key))
|
||||
}
|
||||
|
||||
func (c *KeyContainer) Close() error {
|
||||
return c.File.Close()
|
||||
}
|
||||
|
||||
func (c *KeyContainer) read(t byte, key *[56]byte) error {
|
||||
switch err := binary.Read(c.File, binary.LE, c); {
|
||||
case err != nil:
|
||||
return err
|
||||
case c.Type != t && t == Public:
|
||||
return ErrInvalidPublicKey
|
||||
case c.Type != t && t == Private:
|
||||
return ErrInvalidPrivateKey
|
||||
}
|
||||
|
||||
var tag [TagSize]byte
|
||||
x, err := c.cipher()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
x.Decrypt(key[:], c.Key[:])
|
||||
x.Tag(tag[:0])
|
||||
|
||||
if subtle.ConstantTimeCompare(c.Tag[:], tag[:]) != 1 {
|
||||
return ErrInvalidPrivateKey
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *KeyContainer) write(t byte, key *[56]byte) error {
|
||||
c.Type = t
|
||||
|
||||
if _, err := rand.Read(c.Salt[:]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if _, err := rand.Read(c.Nonce[:]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
x, err := c.cipher()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
x.Encrypt(c.Key[:], key[:])
|
||||
x.Tag(c.Tag[:0])
|
||||
|
||||
return binary.Write(c.File, binary.LE, c)
|
||||
}
|
||||
|
||||
func (c *KeyContainer) cipher() (*xchacha20poly1305.XChaCha20Poly1305, error) {
|
||||
var (
|
||||
salt = c.Salt[:]
|
||||
iterations = int(c.Iterations)
|
||||
memory = int64(c.Memory)
|
||||
keySize = xchacha20poly1305.KeySize
|
||||
)
|
||||
|
||||
key, err := argon2.Key(c.Password, salt, iterations, 1, memory, keySize)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
x := &xchacha20poly1305.XChaCha20Poly1305{}
|
||||
return x, x.Init(key[:], c.Nonce[:])
|
||||
}
|
152
key_test.go
Normal file
152
key_test.go
Normal file
@ -0,0 +1,152 @@
|
||||
// Copyright (C) 2016 - Will Glozer. All rights reserved.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"testing"
|
||||
|
||||
"github.com/magical/argon2"
|
||||
"github.com/wg/arc/binary"
|
||||
"github.com/wg/ecies/xchacha20poly1305"
|
||||
)
|
||||
|
||||
func TestPublicKeyFormat(t *testing.T) {
|
||||
p, _ := keypair(t)
|
||||
b, c := StorePublicKey(t, p)
|
||||
|
||||
if b.buffer[1] != Public {
|
||||
t.Fatal("serialized type incorrect")
|
||||
}
|
||||
|
||||
CheckKeyFormat(t, (*[56]byte)(p), b, c)
|
||||
}
|
||||
|
||||
func TestPrivateKeyFormat(t *testing.T) {
|
||||
_, p := keypair(t)
|
||||
b, c := StorePrivateKey(t, p)
|
||||
|
||||
if b.buffer[1] != Private {
|
||||
t.Fatal("serialized type incorrect")
|
||||
}
|
||||
|
||||
CheckKeyFormat(t, (*[56]byte)(p), b, c)
|
||||
}
|
||||
|
||||
func CheckKeyFormat(t *testing.T, k *[56]byte, b *Buffer, c *KeyContainer) {
|
||||
key, err := argon2.Key(c.Password, c.Salt[:], int(c.Iterations), 1, int64(c.Memory), KeySize)
|
||||
if err != nil {
|
||||
t.Fatal("password key derivation failed", err)
|
||||
}
|
||||
|
||||
x := xchacha20poly1305.XChaCha20Poly1305{}
|
||||
if err := x.Init(key, c.Nonce[:]); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
dst := [56]byte{}
|
||||
tag := [TagSize]byte{}
|
||||
|
||||
x.Decrypt(dst[:], c.Key[:])
|
||||
x.Tag(tag[:0])
|
||||
|
||||
if !bytes.Equal(k[:], dst[:]) {
|
||||
t.Fatal("decrypted key incorrect")
|
||||
}
|
||||
|
||||
if !bytes.Equal(tag[:], c.Tag[:]) {
|
||||
t.Fatal("authentication tag incorrect")
|
||||
}
|
||||
|
||||
if binary.LE.Uint32(b.buffer[2:6]) != c.Iterations {
|
||||
t.Fatal("serialized iterations incorrect")
|
||||
}
|
||||
|
||||
if binary.LE.Uint32(b.buffer[6:10]) != c.Memory {
|
||||
t.Fatal("serialized memory incorrect")
|
||||
}
|
||||
|
||||
if !bytes.Equal(b.buffer[10:42], c.Salt[:]) {
|
||||
t.Fatal("serialized salt incorrect")
|
||||
}
|
||||
|
||||
if !bytes.Equal(b.buffer[42:58], tag[:]) {
|
||||
t.Fatal("serialized tag incorrect")
|
||||
}
|
||||
|
||||
if !bytes.Equal(b.buffer[58:82], c.Nonce[:]) {
|
||||
t.Fatal("serialized nonce incorrect")
|
||||
}
|
||||
}
|
||||
|
||||
func TestPublicPrivateKeypair(t *testing.T) {
|
||||
pub, priv := keypair(t)
|
||||
|
||||
shared0, err := ComputeSharedKey(pub, priv, KeySize)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
_, puc := StorePublicKey(t, pub)
|
||||
_, prc := StorePrivateKey(t, priv)
|
||||
|
||||
priv.Zero()
|
||||
|
||||
if err := puc.ReadPublicKey(pub); err != nil {
|
||||
t.Fatal("failed to load public key", err)
|
||||
}
|
||||
|
||||
if err := prc.ReadPrivateKey(priv); err != nil {
|
||||
t.Fatal("failed to load private key", err)
|
||||
}
|
||||
|
||||
shared1, err := ComputeSharedKey(pub, priv, KeySize)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if !bytes.Equal(shared0, shared1) {
|
||||
t.Fatal("serialized keys incorrect")
|
||||
}
|
||||
}
|
||||
|
||||
func TestWrongKeyType(t *testing.T) {
|
||||
pub, priv := keypair(t)
|
||||
|
||||
_, pubc := StorePublicKey(t, pub)
|
||||
_, privc := StorePrivateKey(t, priv)
|
||||
|
||||
if err := pubc.ReadPrivateKey(priv); err != ErrInvalidPrivateKey {
|
||||
t.Fatal("loaded public key as private key")
|
||||
}
|
||||
|
||||
if err := privc.ReadPublicKey(pub); err != ErrInvalidPublicKey {
|
||||
t.Fatal("loaded private key as public key")
|
||||
}
|
||||
}
|
||||
|
||||
func StorePublicKey(t *testing.T, key *PublicKey) (*Buffer, *KeyContainer) {
|
||||
b := &Buffer{}
|
||||
c := NewKeyContainer(b, []byte(""), 1, 8)
|
||||
|
||||
if err := c.WritePublicKey(key); err != nil {
|
||||
t.Fatal("failed to store public key", err)
|
||||
}
|
||||
|
||||
b.Rewind()
|
||||
|
||||
return b, c
|
||||
}
|
||||
|
||||
func StorePrivateKey(t *testing.T, key *PrivateKey) (*Buffer, *KeyContainer) {
|
||||
b := &Buffer{}
|
||||
c := NewKeyContainer(b, []byte("secret"), 1, 8)
|
||||
|
||||
if err := c.WritePrivateKey(key); err != nil {
|
||||
t.Fatal("failed to store private key", err)
|
||||
}
|
||||
|
||||
b.Rewind()
|
||||
|
||||
return b, c
|
||||
}
|
20
keygen.go
Normal file
20
keygen.go
Normal file
@ -0,0 +1,20 @@
|
||||
// Copyright (C) 2016 - Will Glozer. All rights reserved.
|
||||
|
||||
package main
|
||||
|
||||
func (c *Cmd) Keygen(puc *KeyContainer, prc *KeyContainer) error {
|
||||
public, private, err := GenerateKeypair()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = puc.WritePublicKey(public); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = prc.WritePrivateKey(private); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
72
list.go
Normal file
72
list.go
Normal file
@ -0,0 +1,72 @@
|
||||
// Copyright (C) 2016 - Will Glozer. All rights reserved.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrVerifyFailed = errors.New("archive: verify failed")
|
||||
ErrNoEntryFound = errors.New("archive: no entry found")
|
||||
)
|
||||
|
||||
func (c *Cmd) List(arc *RegexFilter) error {
|
||||
matches := 0
|
||||
|
||||
for arc.Next() {
|
||||
h := arc.Header
|
||||
switch {
|
||||
case c.Verbose > 0:
|
||||
const layout = "%s %-6d %-6d %8s %s %s\n"
|
||||
mode := mode(h)
|
||||
size := size(h)
|
||||
date := h.ModTime.Format("2006-01-02 15:04")
|
||||
name := name(h)
|
||||
fmt.Printf(layout, mode, h.Uid, h.Gid, size, date, name)
|
||||
default:
|
||||
fmt.Println(h.Name)
|
||||
}
|
||||
matches++
|
||||
}
|
||||
|
||||
switch {
|
||||
case arc.Error != nil:
|
||||
return arc.Error
|
||||
case !arc.Verify():
|
||||
return ErrVerifyFailed
|
||||
case matches == 0:
|
||||
return ErrNoEntryFound
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func mode(h *tar.Header) string {
|
||||
mode := os.FileMode(h.Mode)
|
||||
switch h.Typeflag {
|
||||
case tar.TypeDir:
|
||||
mode |= os.ModeDir
|
||||
case tar.TypeSymlink:
|
||||
mode |= os.ModeSymlink
|
||||
}
|
||||
return mode.String()
|
||||
}
|
||||
|
||||
func size(h *tar.Header) string {
|
||||
if h.Size == 0 {
|
||||
return "0"
|
||||
}
|
||||
return ByteSize(h.Size).String()
|
||||
}
|
||||
|
||||
func name(h *tar.Header) string {
|
||||
name := h.Name
|
||||
if h.Typeflag == tar.TypeSymlink {
|
||||
name += " -> " + h.Linkname
|
||||
}
|
||||
return name
|
||||
}
|
73
main.go
Normal file
73
main.go
Normal file
@ -0,0 +1,73 @@
|
||||
// Copyright (C) 2016 - Will Glozer. All rights reserved.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/wg/arc/archive"
|
||||
)
|
||||
|
||||
type Cmd struct {
|
||||
Op interface{}
|
||||
Archiver Archiver
|
||||
Verbose int
|
||||
Names []string
|
||||
Private *KeyContainer
|
||||
Public *KeyContainer
|
||||
}
|
||||
|
||||
func main() {
|
||||
c, err := NewCommand()
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
switch op := c.Op.(type) {
|
||||
case func(*archive.Writer, ...string) error:
|
||||
arc := c.createArchive()
|
||||
err = op(arc.Writer, c.Names...)
|
||||
defer arc.Close()
|
||||
case func(*RegexFilter) error:
|
||||
arc, filter := c.filterArchive()
|
||||
err = op(filter)
|
||||
defer arc.Close()
|
||||
case func(*KeyContainer, *KeyContainer) error:
|
||||
err = op(c.Public, c.Private)
|
||||
defer c.Public.Close()
|
||||
defer c.Private.Close()
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
c.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Cmd) createArchive() *Writer {
|
||||
arc, err := c.Archiver.Writer()
|
||||
if err != nil {
|
||||
c.Fatal(err)
|
||||
}
|
||||
return arc
|
||||
}
|
||||
|
||||
func (c *Cmd) filterArchive() (*Reader, *RegexFilter) {
|
||||
arc, err := c.Archiver.Reader()
|
||||
if err != nil {
|
||||
c.Fatal(err)
|
||||
}
|
||||
|
||||
f, err := NewRegexFilter(arc.Reader, c.Names...)
|
||||
if err != nil {
|
||||
c.Fatal(err)
|
||||
}
|
||||
|
||||
return arc, f
|
||||
}
|
||||
|
||||
func (c *Cmd) Fatal(v ...interface{}) {
|
||||
fmt.Println(v...)
|
||||
os.Exit(1)
|
||||
}
|
21
vendor/github.com/codahale/sss/LICENSE
generated
vendored
Normal file
21
vendor/github.com/codahale/sss/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Coda Hale
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
11
vendor/github.com/codahale/sss/README.md
generated
vendored
Normal file
11
vendor/github.com/codahale/sss/README.md
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
# sss (Shamir's Secret Sharing)
|
||||
|
||||
[](https://travis-ci.org/codahale/sss)
|
||||
|
||||
A pure Go implementation of
|
||||
[Shamir's Secret Sharing algorithm](http://en.wikipedia.org/wiki/Shamir's_Secret_Sharing)
|
||||
over GF(2^8).
|
||||
|
||||
Inspired by @hbs's [Python implementation](https://github.com/hbs/PySSSS).
|
||||
|
||||
For documentation, check [godoc](http://godoc.org/github.com/codahale/sss).
|
81
vendor/github.com/codahale/sss/gf256.go
generated
vendored
Normal file
81
vendor/github.com/codahale/sss/gf256.go
generated
vendored
Normal file
@ -0,0 +1,81 @@
|
||||
package sss
|
||||
|
||||
func mul(e, a byte) byte {
|
||||
if e == 0 || a == 0 {
|
||||
return 0
|
||||
}
|
||||
return exp[(int(log[e])+int(log[a]))%255]
|
||||
}
|
||||
|
||||
func div(e, a byte) byte {
|
||||
if a == 0 {
|
||||
panic("div by zero")
|
||||
}
|
||||
|
||||
if e == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
p := (int(log[e]) - int(log[a])) % 255
|
||||
if p < 0 {
|
||||
p += 255
|
||||
}
|
||||
|
||||
return exp[p]
|
||||
}
|
||||
|
||||
const (
|
||||
fieldSize = 256 // 2^8
|
||||
)
|
||||
|
||||
var (
|
||||
// 0x11b prime polynomial and 0x03 as generator
|
||||
exp = [fieldSize]byte{
|
||||
0x01, 0x03, 0x05, 0x0f, 0x11, 0x33, 0x55, 0xff, 0x1a, 0x2e, 0x72, 0x96,
|
||||
0xa1, 0xf8, 0x13, 0x35, 0x5f, 0xe1, 0x38, 0x48, 0xd8, 0x73, 0x95, 0xa4,
|
||||
0xf7, 0x02, 0x06, 0x0a, 0x1e, 0x22, 0x66, 0xaa, 0xe5, 0x34, 0x5c, 0xe4,
|
||||
0x37, 0x59, 0xeb, 0x26, 0x6a, 0xbe, 0xd9, 0x70, 0x90, 0xab, 0xe6, 0x31,
|
||||
0x53, 0xf5, 0x04, 0x0c, 0x14, 0x3c, 0x44, 0xcc, 0x4f, 0xd1, 0x68, 0xb8,
|
||||
0xd3, 0x6e, 0xb2, 0xcd, 0x4c, 0xd4, 0x67, 0xa9, 0xe0, 0x3b, 0x4d, 0xd7,
|
||||
0x62, 0xa6, 0xf1, 0x08, 0x18, 0x28, 0x78, 0x88, 0x83, 0x9e, 0xb9, 0xd0,
|
||||
0x6b, 0xbd, 0xdc, 0x7f, 0x81, 0x98, 0xb3, 0xce, 0x49, 0xdb, 0x76, 0x9a,
|
||||
0xb5, 0xc4, 0x57, 0xf9, 0x10, 0x30, 0x50, 0xf0, 0x0b, 0x1d, 0x27, 0x69,
|
||||
0xbb, 0xd6, 0x61, 0xa3, 0xfe, 0x19, 0x2b, 0x7d, 0x87, 0x92, 0xad, 0xec,
|
||||
0x2f, 0x71, 0x93, 0xae, 0xe9, 0x20, 0x60, 0xa0, 0xfb, 0x16, 0x3a, 0x4e,
|
||||
0xd2, 0x6d, 0xb7, 0xc2, 0x5d, 0xe7, 0x32, 0x56, 0xfa, 0x15, 0x3f, 0x41,
|
||||
0xc3, 0x5e, 0xe2, 0x3d, 0x47, 0xc9, 0x40, 0xc0, 0x5b, 0xed, 0x2c, 0x74,
|
||||
0x9c, 0xbf, 0xda, 0x75, 0x9f, 0xba, 0xd5, 0x64, 0xac, 0xef, 0x2a, 0x7e,
|
||||
0x82, 0x9d, 0xbc, 0xdf, 0x7a, 0x8e, 0x89, 0x80, 0x9b, 0xb6, 0xc1, 0x58,
|
||||
0xe8, 0x23, 0x65, 0xaf, 0xea, 0x25, 0x6f, 0xb1, 0xc8, 0x43, 0xc5, 0x54,
|
||||
0xfc, 0x1f, 0x21, 0x63, 0xa5, 0xf4, 0x07, 0x09, 0x1b, 0x2d, 0x77, 0x99,
|
||||
0xb0, 0xcb, 0x46, 0xca, 0x45, 0xcf, 0x4a, 0xde, 0x79, 0x8b, 0x86, 0x91,
|
||||
0xa8, 0xe3, 0x3e, 0x42, 0xc6, 0x51, 0xf3, 0x0e, 0x12, 0x36, 0x5a, 0xee,
|
||||
0x29, 0x7b, 0x8d, 0x8c, 0x8f, 0x8a, 0x85, 0x94, 0xa7, 0xf2, 0x0d, 0x17,
|
||||
0x39, 0x4b, 0xdd, 0x7c, 0x84, 0x97, 0xa2, 0xfd, 0x1c, 0x24, 0x6c, 0xb4,
|
||||
0xc7, 0x52, 0xf6, 0x01,
|
||||
}
|
||||
log = [fieldSize]byte{
|
||||
0x00, 0x00, 0x19, 0x01, 0x32, 0x02, 0x1a, 0xc6, 0x4b, 0xc7, 0x1b, 0x68,
|
||||
0x33, 0xee, 0xdf, 0x03, 0x64, 0x04, 0xe0, 0x0e, 0x34, 0x8d, 0x81, 0xef,
|
||||
0x4c, 0x71, 0x08, 0xc8, 0xf8, 0x69, 0x1c, 0xc1, 0x7d, 0xc2, 0x1d, 0xb5,
|
||||
0xf9, 0xb9, 0x27, 0x6a, 0x4d, 0xe4, 0xa6, 0x72, 0x9a, 0xc9, 0x09, 0x78,
|
||||
0x65, 0x2f, 0x8a, 0x05, 0x21, 0x0f, 0xe1, 0x24, 0x12, 0xf0, 0x82, 0x45,
|
||||
0x35, 0x93, 0xda, 0x8e, 0x96, 0x8f, 0xdb, 0xbd, 0x36, 0xd0, 0xce, 0x94,
|
||||
0x13, 0x5c, 0xd2, 0xf1, 0x40, 0x46, 0x83, 0x38, 0x66, 0xdd, 0xfd, 0x30,
|
||||
0xbf, 0x06, 0x8b, 0x62, 0xb3, 0x25, 0xe2, 0x98, 0x22, 0x88, 0x91, 0x10,
|
||||
0x7e, 0x6e, 0x48, 0xc3, 0xa3, 0xb6, 0x1e, 0x42, 0x3a, 0x6b, 0x28, 0x54,
|
||||
0xfa, 0x85, 0x3d, 0xba, 0x2b, 0x79, 0x0a, 0x15, 0x9b, 0x9f, 0x5e, 0xca,
|
||||
0x4e, 0xd4, 0xac, 0xe5, 0xf3, 0x73, 0xa7, 0x57, 0xaf, 0x58, 0xa8, 0x50,
|
||||
0xf4, 0xea, 0xd6, 0x74, 0x4f, 0xae, 0xe9, 0xd5, 0xe7, 0xe6, 0xad, 0xe8,
|
||||
0x2c, 0xd7, 0x75, 0x7a, 0xeb, 0x16, 0x0b, 0xf5, 0x59, 0xcb, 0x5f, 0xb0,
|
||||
0x9c, 0xa9, 0x51, 0xa0, 0x7f, 0x0c, 0xf6, 0x6f, 0x17, 0xc4, 0x49, 0xec,
|
||||
0xd8, 0x43, 0x1f, 0x2d, 0xa4, 0x76, 0x7b, 0xb7, 0xcc, 0xbb, 0x3e, 0x5a,
|
||||
0xfb, 0x60, 0xb1, 0x86, 0x3b, 0x52, 0xa1, 0x6c, 0xaa, 0x55, 0x29, 0x9d,
|
||||
0x97, 0xb2, 0x87, 0x90, 0x61, 0xbe, 0xdc, 0xfc, 0xbc, 0x95, 0xcf, 0xcd,
|
||||
0x37, 0x3f, 0x5b, 0xd1, 0x53, 0x39, 0x84, 0x3c, 0x41, 0xa2, 0x6d, 0x47,
|
||||
0x14, 0x2a, 0x9e, 0x5d, 0x56, 0xf2, 0xd3, 0xab, 0x44, 0x11, 0x92, 0xd9,
|
||||
0x23, 0x20, 0x2e, 0x89, 0xb4, 0x7c, 0xb8, 0x26, 0x77, 0x99, 0xe3, 0xa5,
|
||||
0x67, 0x4a, 0xed, 0xde, 0xc5, 0x31, 0xfe, 0x18, 0x0d, 0x63, 0x8c, 0x80,
|
||||
0xc0, 0xf7, 0x70, 0x07,
|
||||
}
|
||||
)
|
35
vendor/github.com/codahale/sss/gf256_test.go
generated
vendored
Normal file
35
vendor/github.com/codahale/sss/gf256_test.go
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
package sss
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestMul(t *testing.T) {
|
||||
if v, want := mul(90, 21), byte(254); v != want {
|
||||
t.Errorf("Was %v, but expected %v", v, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDiv(t *testing.T) {
|
||||
if v, want := div(90, 21), byte(189); v != want {
|
||||
t.Errorf("Was %v, but expected %v", v, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDivZero(t *testing.T) {
|
||||
if v, want := div(0, 2), byte(0); v != want {
|
||||
t.Errorf("Was %v, but expected %v", v, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDivByZero(t *testing.T) {
|
||||
defer func() {
|
||||
m := recover()
|
||||
if m != "div by zero" {
|
||||
t.Error(m)
|
||||
}
|
||||
}()
|
||||
|
||||
div(2, 0)
|
||||
t.Error("Shouldn't have been able to divide those")
|
||||
}
|
67
vendor/github.com/codahale/sss/polynomial.go
generated
vendored
Normal file
67
vendor/github.com/codahale/sss/polynomial.go
generated
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
package sss
|
||||
|
||||
import "io"
|
||||
|
||||
// the degree of the polynomial
|
||||
func degree(p []byte) int {
|
||||
return len(p) - 1
|
||||
}
|
||||
|
||||
// evaluate the polynomial at the given point
|
||||
func eval(p []byte, x byte) (result byte) {
|
||||
// Horner's scheme
|
||||
for i := 1; i <= len(p); i++ {
|
||||
result = mul(result, x) ^ p[len(p)-i]
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// generates a random n-degree polynomial w/ a given x-intercept
|
||||
func generate(degree byte, x byte, rand io.Reader) ([]byte, error) {
|
||||
result := make([]byte, degree+1)
|
||||
result[0] = x
|
||||
|
||||
buf := make([]byte, degree-1)
|
||||
if _, err := io.ReadFull(rand, buf); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for i := byte(1); i < degree; i++ {
|
||||
result[i] = buf[i-1]
|
||||
}
|
||||
|
||||
// the Nth term can't be zero, or else it's a (N-1) degree polynomial
|
||||
for {
|
||||
buf = make([]byte, 1)
|
||||
if _, err := io.ReadFull(rand, buf); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if buf[0] != 0 {
|
||||
result[degree] = buf[0]
|
||||
return result, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// an input/output pair
|
||||
type pair struct {
|
||||
x, y byte
|
||||
}
|
||||
|
||||
// Lagrange interpolation
|
||||
func interpolate(points []pair, x byte) (value byte) {
|
||||
for i, a := range points {
|
||||
weight := byte(1)
|
||||
for j, b := range points {
|
||||
if i != j {
|
||||
top := x ^ b.x
|
||||
bottom := a.x ^ b.x
|
||||
factor := div(top, bottom)
|
||||
weight = mul(weight, factor)
|
||||
}
|
||||
}
|
||||
value = value ^ mul(weight, a.y)
|
||||
}
|
||||
return
|
||||
}
|
89
vendor/github.com/codahale/sss/polynomial_test.go
generated
vendored
Normal file
89
vendor/github.com/codahale/sss/polynomial_test.go
generated
vendored
Normal file
@ -0,0 +1,89 @@
|
||||
package sss
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var (
|
||||
p = []byte{1, 0, 2, 3}
|
||||
p2 = []byte{70, 32, 6}
|
||||
)
|
||||
|
||||
func TestDegree(t *testing.T) {
|
||||
if v, want := degree(p), 3; v != want {
|
||||
t.Errorf("Was %v, but expected %v", v, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEval(t *testing.T) {
|
||||
if v, want := eval(p, 2), byte(17); v != want {
|
||||
t.Errorf("Was %v, but expected %v", v, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGenerate(t *testing.T) {
|
||||
b := []byte{1, 2, 3}
|
||||
|
||||
expected := []byte{10, 1, 2, 3}
|
||||
actual, err := generate(3, 10, bytes.NewReader(b))
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
if !bytes.Equal(actual, expected) {
|
||||
t.Errorf("Was %v, but expected %v", actual, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGenerateEOF(t *testing.T) {
|
||||
b := []byte{1}
|
||||
|
||||
p, err := generate(3, 10, bytes.NewReader(b))
|
||||
if p != nil {
|
||||
t.Errorf("Was %v, but expected an error", p)
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
t.Error("No error returned")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGeneratePolyEOFFullSize(t *testing.T) {
|
||||
b := []byte{1, 2, 0, 0, 0, 0}
|
||||
|
||||
p, err := generate(3, 10, bytes.NewReader(b))
|
||||
if p != nil {
|
||||
t.Errorf("Was %v, but xpected an error", p)
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
t.Error("No error returned")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGenerateFullSize(t *testing.T) {
|
||||
b := []byte{1, 2, 0, 4}
|
||||
|
||||
expected := []byte{10, 1, 2, 4}
|
||||
actual, err := generate(3, 10, bytes.NewReader(b))
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
if !bytes.Equal(actual, expected) {
|
||||
t.Errorf("Was %v but expected %v", actual, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestInterpolate(t *testing.T) {
|
||||
in := []pair{
|
||||
pair{x: 1, y: 1},
|
||||
pair{x: 2, y: 2},
|
||||
pair{x: 3, y: 3},
|
||||
}
|
||||
|
||||
if v, want := interpolate(in, 0), byte(0); v != want {
|
||||
t.Errorf("Was %v, but expected %v", v, want)
|
||||
}
|
||||
}
|
102
vendor/github.com/codahale/sss/sss.go
generated
vendored
Normal file
102
vendor/github.com/codahale/sss/sss.go
generated
vendored
Normal file
@ -0,0 +1,102 @@
|
||||
// Package sss implements Shamir's Secret Sharing algorithm over GF(2^8).
|
||||
//
|
||||
// Shamir's Secret Sharing algorithm allows you to securely share a secret with
|
||||
// N people, allowing the recovery of that secret if K of those people combine
|
||||
// their shares.
|
||||
//
|
||||
// It begins by encoding a secret as a number (e.g., 42), and generating N
|
||||
// random polynomial equations of degree K-1 which have an X-intercept equal to
|
||||
// the secret. Given K=3, the following equations might be generated:
|
||||
//
|
||||
// f1(x) = 78x^2 + 19x + 42
|
||||
// f2(x) = 128x^2 + 171x + 42
|
||||
// f3(x) = 121x^2 + 3x + 42
|
||||
// f4(x) = 91x^2 + 95x + 42
|
||||
// etc.
|
||||
//
|
||||
// These polynomials are then evaluated for values of X > 0:
|
||||
//
|
||||
// f1(1) = 139
|
||||
// f2(2) = 896
|
||||
// f3(3) = 1140
|
||||
// f4(4) = 1783
|
||||
// etc.
|
||||
//
|
||||
// These (x, y) pairs are the shares given to the parties. In order to combine
|
||||
// shares to recover the secret, these (x, y) pairs are used as the input points
|
||||
// for Lagrange interpolation, which produces a polynomial which matches the
|
||||
// given points. This polynomial can be evaluated for f(0), producing the secret
|
||||
// value--the common x-intercept for all the generated polynomials.
|
||||
//
|
||||
// If fewer than K shares are combined, the interpolated polynomial will be
|
||||
// wrong, and the result of f(0) will not be the secret.
|
||||
//
|
||||
// This package constructs polynomials over the field GF(2^8) for each byte of
|
||||
// the secret, allowing for fast splitting and combining of anything which can
|
||||
// be encoded as bytes.
|
||||
//
|
||||
// This package has not been audited by cryptography or security professionals.
|
||||
package sss
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"errors"
|
||||
)
|
||||
|
||||
var (
|
||||
// ErrInvalidCount is returned when the count parameter is invalid.
|
||||
ErrInvalidCount = errors.New("N must be > 1")
|
||||
// ErrInvalidThreshold is returned when the threshold parameter is invalid.
|
||||
ErrInvalidThreshold = errors.New("K must be > 1")
|
||||
)
|
||||
|
||||
// Split the given secret into N shares of which K are required to recover the
|
||||
// secret. Returns a map of share IDs (1-255) to shares.
|
||||
func Split(n, k byte, secret []byte) (map[byte][]byte, error) {
|
||||
if n <= 1 {
|
||||
return nil, ErrInvalidCount
|
||||
}
|
||||
|
||||
if k <= 1 {
|
||||
return nil, ErrInvalidThreshold
|
||||
}
|
||||
|
||||
shares := make(map[byte][]byte, n)
|
||||
|
||||
for _, b := range secret {
|
||||
p, err := generate(k-1, b, rand.Reader)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for x := byte(1); x <= n; x++ {
|
||||
shares[x] = append(shares[x], eval(p, x))
|
||||
}
|
||||
}
|
||||
|
||||
return shares, nil
|
||||
}
|
||||
|
||||
// Combine the given shares into the original secret.
|
||||
//
|
||||
// N.B.: There is no way to know whether the returned value is, in fact, the
|
||||
// original secret.
|
||||
func Combine(shares map[byte][]byte) []byte {
|
||||
var secret []byte
|
||||
for _, v := range shares {
|
||||
secret = make([]byte, len(v))
|
||||
break
|
||||
}
|
||||
|
||||
points := make([]pair, len(shares))
|
||||
for i := range secret {
|
||||
p := 0
|
||||
for k, v := range shares {
|
||||
points[p] = pair{x: k, y: v[i]}
|
||||
p++
|
||||
}
|
||||
secret[i] = interpolate(points, 0)
|
||||
}
|
||||
|
||||
return secret
|
||||
}
|
32
vendor/github.com/codahale/sss/sss_test.go
generated
vendored
Normal file
32
vendor/github.com/codahale/sss/sss_test.go
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
package sss
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
func Example() {
|
||||
secret := "well hello there!" // our secret
|
||||
n := byte(30) // create 30 shares
|
||||
k := byte(2) // require 2 of them to combine
|
||||
|
||||
shares, err := Split(n, k, []byte(secret)) // split into 30 shares
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return
|
||||
}
|
||||
|
||||
// select a random subset of the total shares
|
||||
subset := make(map[byte][]byte, k)
|
||||
for x, y := range shares { // just iterate since maps are randomized
|
||||
subset[x] = y
|
||||
if len(subset) == int(k) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// combine two shares and recover the secret
|
||||
recovered := string(Combine(subset))
|
||||
fmt.Println(recovered)
|
||||
|
||||
// Output: well hello there!
|
||||
}
|
23
vendor/github.com/dchest/blake2b/README
generated
vendored
Normal file
23
vendor/github.com/dchest/blake2b/README
generated
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
Go implementation of BLAKE2b collision-resistant cryptographic hash function
|
||||
created by Jean-Philippe Aumasson, Samuel Neves, Zooko Wilcox-O'Hearn, and
|
||||
Christian Winnerlein (https://blake2.net).
|
||||
|
||||
INSTALLATION
|
||||
|
||||
$ go get github.com/dchest/blake2b
|
||||
|
||||
|
||||
DOCUMENTATION
|
||||
|
||||
See http://godoc.org/github.com/dchest/blake2b
|
||||
|
||||
|
||||
PUBLIC DOMAIN DEDICATION
|
||||
|
||||
Written in 2012 by Dmitry Chestnykh.
|
||||
|
||||
To the extent possible under law, the author have dedicated all copyright
|
||||
and related and neighboring rights to this software to the public domain
|
||||
worldwide. This software is distributed without any warranty.
|
||||
http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
299
vendor/github.com/dchest/blake2b/blake2b.go
generated
vendored
Normal file
299
vendor/github.com/dchest/blake2b/blake2b.go
generated
vendored
Normal file
@ -0,0 +1,299 @@
|
||||
// Written in 2012 by Dmitry Chestnykh.
|
||||
//
|
||||
// To the extent possible under law, the author have dedicated all copyright
|
||||
// and related and neighboring rights to this software to the public domain
|
||||
// worldwide. This software is distributed without any warranty.
|
||||
// http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
// Package blake2b implements BLAKE2b cryptographic hash function.
|
||||
package blake2b
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"hash"
|
||||
)
|
||||
|
||||
const (
|
||||
BlockSize = 128 // block size of algorithm
|
||||
Size = 64 // maximum digest size
|
||||
SaltSize = 16 // maximum salt size
|
||||
PersonSize = 16 // maximum personalization string size
|
||||
KeySize = 64 // maximum size of key
|
||||
)
|
||||
|
||||
type digest struct {
|
||||
h [8]uint64 // current chain value
|
||||
t [2]uint64 // message bytes counter
|
||||
f [2]uint64 // finalization flags
|
||||
x [BlockSize]byte // buffer for data not yet compressed
|
||||
nx int // number of bytes in buffer
|
||||
|
||||
ih [8]uint64 // initial chain value (after config)
|
||||
paddedKey [BlockSize]byte // copy of key, padded with zeros
|
||||
isKeyed bool // indicates whether hash was keyed
|
||||
size uint8 // digest size in bytes
|
||||
isLastNode bool // indicates processing of the last node in tree hashing
|
||||
}
|
||||
|
||||
// Initialization values.
|
||||
var iv = [8]uint64{
|
||||
0x6a09e667f3bcc908, 0xbb67ae8584caa73b,
|
||||
0x3c6ef372fe94f82b, 0xa54ff53a5f1d36f1,
|
||||
0x510e527fade682d1, 0x9b05688c2b3e6c1f,
|
||||
0x1f83d9abfb41bd6b, 0x5be0cd19137e2179,
|
||||
}
|
||||
|
||||
// Config is used to configure hash function parameters and keying.
|
||||
// All parameters are optional.
|
||||
type Config struct {
|
||||
Size uint8 // digest size (if zero, default size of 64 bytes is used)
|
||||
Key []byte // key for prefix-MAC
|
||||
Salt []byte // salt (if < 16 bytes, padded with zeros)
|
||||
Person []byte // personalization (if < 16 bytes, padded with zeros)
|
||||
Tree *Tree // parameters for tree hashing
|
||||
}
|
||||
|
||||
// Tree represents parameters for tree hashing.
|
||||
type Tree struct {
|
||||
Fanout uint8 // fanout
|
||||
MaxDepth uint8 // maximal depth
|
||||
LeafSize uint32 // leaf maximal byte length (0 for unlimited)
|
||||
NodeOffset uint64 // node offset (0 for first, leftmost or leaf)
|
||||
NodeDepth uint8 // node depth (0 for leaves)
|
||||
InnerHashSize uint8 // inner hash byte length
|
||||
IsLastNode bool // indicates processing of the last node of layer
|
||||
}
|
||||
|
||||
var (
|
||||
defaultConfig = &Config{Size: Size}
|
||||
config256 = &Config{Size: 32}
|
||||
)
|
||||
|
||||
func verifyConfig(c *Config) error {
|
||||
if c.Size > Size {
|
||||
return errors.New("digest size is too large")
|
||||
}
|
||||
if len(c.Key) > KeySize {
|
||||
return errors.New("key is too large")
|
||||
}
|
||||
if len(c.Salt) > SaltSize {
|
||||
// Smaller salt is okay: it will be padded with zeros.
|
||||
return errors.New("salt is too large")
|
||||
}
|
||||
if len(c.Person) > PersonSize {
|
||||
// Smaller personalization is okay: it will be padded with zeros.
|
||||
return errors.New("personalization is too large")
|
||||
}
|
||||
if c.Tree != nil {
|
||||
if c.Tree.Fanout == 1 {
|
||||
return errors.New("fanout of 1 is not allowed in tree mode")
|
||||
}
|
||||
if c.Tree.MaxDepth < 2 {
|
||||
return errors.New("incorrect tree depth")
|
||||
}
|
||||
if c.Tree.InnerHashSize < 1 || c.Tree.InnerHashSize > Size {
|
||||
return errors.New("incorrect tree inner hash size")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// New returns a new hash.Hash configured with the given Config.
|
||||
// Config can be nil, in which case the default one is used, calculating 64-byte digest.
|
||||
// Returns non-nil error if Config contains invalid parameters.
|
||||
func New(c *Config) (hash.Hash, error) {
|
||||
if c == nil {
|
||||
c = defaultConfig
|
||||
} else {
|
||||
if c.Size == 0 {
|
||||
// Set default size if it's zero.
|
||||
c.Size = Size
|
||||
}
|
||||
if err := verifyConfig(c); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
d := new(digest)
|
||||
d.initialize(c)
|
||||
return d, nil
|
||||
}
|
||||
|
||||
// initialize initializes digest with the given
|
||||
// config, which must be non-nil and verified.
|
||||
func (d *digest) initialize(c *Config) {
|
||||
// Create parameter block.
|
||||
var p [BlockSize]byte
|
||||
p[0] = c.Size
|
||||
p[1] = uint8(len(c.Key))
|
||||
if c.Salt != nil {
|
||||
copy(p[32:], c.Salt)
|
||||
}
|
||||
if c.Person != nil {
|
||||
copy(p[48:], c.Person)
|
||||
}
|
||||
if c.Tree != nil {
|
||||
p[2] = c.Tree.Fanout
|
||||
p[3] = c.Tree.MaxDepth
|
||||
binary.LittleEndian.PutUint32(p[4:], c.Tree.LeafSize)
|
||||
binary.LittleEndian.PutUint64(p[8:], c.Tree.NodeOffset)
|
||||
p[16] = c.Tree.NodeDepth
|
||||
p[17] = c.Tree.InnerHashSize
|
||||
} else {
|
||||
p[2] = 1
|
||||
p[3] = 1
|
||||
}
|
||||
// Initialize.
|
||||
d.size = c.Size
|
||||
for i := 0; i < 8; i++ {
|
||||
d.h[i] = iv[i] ^ binary.LittleEndian.Uint64(p[i*8:])
|
||||
}
|
||||
if c.Tree != nil && c.Tree.IsLastNode {
|
||||
d.isLastNode = true
|
||||
}
|
||||
// Process key.
|
||||
if c.Key != nil {
|
||||
copy(d.paddedKey[:], c.Key)
|
||||
d.Write(d.paddedKey[:])
|
||||
d.isKeyed = true
|
||||
}
|
||||
// Save a copy of initialized state.
|
||||
copy(d.ih[:], d.h[:])
|
||||
}
|
||||
|
||||
// New512 returns a new hash.Hash computing the BLAKE2b 64-byte checksum.
|
||||
func New512() hash.Hash {
|
||||
d := new(digest)
|
||||
d.initialize(defaultConfig)
|
||||
return d
|
||||
}
|
||||
|
||||
// New256 returns a new hash.Hash computing the BLAKE2b 32-byte checksum.
|
||||
func New256() hash.Hash {
|
||||
d := new(digest)
|
||||
d.initialize(config256)
|
||||
return d
|
||||
}
|
||||
|
||||
// NewMAC returns a new hash.Hash computing BLAKE2b prefix-
|
||||
// Message Authentication Code of the given size in bytes
|
||||
// (up to 64) with the given key (up to 64 bytes in length).
|
||||
func NewMAC(outBytes uint8, key []byte) hash.Hash {
|
||||
d, err := New(&Config{Size: outBytes, Key: key})
|
||||
if err != nil {
|
||||
panic(err.Error())
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
// Reset resets the state of digest to the initial state
|
||||
// after configuration and keying.
|
||||
func (d *digest) Reset() {
|
||||
copy(d.h[:], d.ih[:])
|
||||
d.t[0] = 0
|
||||
d.t[1] = 0
|
||||
d.f[0] = 0
|
||||
d.f[1] = 0
|
||||
d.nx = 0
|
||||
if d.isKeyed {
|
||||
d.Write(d.paddedKey[:])
|
||||
}
|
||||
}
|
||||
|
||||
// Size returns the digest size in bytes.
|
||||
func (d *digest) Size() int { return int(d.size) }
|
||||
|
||||
// BlockSize returns the algorithm block size in bytes.
|
||||
func (d *digest) BlockSize() int { return BlockSize }
|
||||
|
||||
func (d *digest) Write(p []byte) (nn int, err error) {
|
||||
nn = len(p)
|
||||
left := BlockSize - d.nx
|
||||
if len(p) > left {
|
||||
// Process buffer.
|
||||
copy(d.x[d.nx:], p[:left])
|
||||
p = p[left:]
|
||||
blocks(d, d.x[:])
|
||||
d.nx = 0
|
||||
}
|
||||
// Process full blocks except for the last one.
|
||||
if len(p) > BlockSize {
|
||||
n := len(p) &^ (BlockSize - 1)
|
||||
if n == len(p) {
|
||||
n -= BlockSize
|
||||
}
|
||||
blocks(d, p[:n])
|
||||
p = p[n:]
|
||||
}
|
||||
// Fill buffer.
|
||||
d.nx += copy(d.x[d.nx:], p)
|
||||
return
|
||||
}
|
||||
|
||||
// Sum returns the calculated checksum.
|
||||
func (d0 *digest) Sum(in []byte) []byte {
|
||||
// Make a copy of d0 so that caller can keep writing and summing.
|
||||
d := *d0
|
||||
hash := d.checkSum()
|
||||
return append(in, hash[:d.size]...)
|
||||
}
|
||||
|
||||
func (d *digest) checkSum() [Size]byte {
|
||||
// Do not create unnecessary copies of the key.
|
||||
if d.isKeyed {
|
||||
for i := 0; i < len(d.paddedKey); i++ {
|
||||
d.paddedKey[i] = 0
|
||||
}
|
||||
}
|
||||
|
||||
dec := BlockSize - uint64(d.nx)
|
||||
if d.t[0] < dec {
|
||||
d.t[1]--
|
||||
}
|
||||
d.t[0] -= dec
|
||||
|
||||
// Pad buffer with zeros.
|
||||
for i := d.nx; i < len(d.x); i++ {
|
||||
d.x[i] = 0
|
||||
}
|
||||
// Set last block flag.
|
||||
d.f[0] = 0xffffffffffffffff
|
||||
if d.isLastNode {
|
||||
d.f[1] = 0xffffffffffffffff
|
||||
}
|
||||
// Compress last block.
|
||||
blocks(d, d.x[:])
|
||||
|
||||
var out [Size]byte
|
||||
j := 0
|
||||
for _, s := range d.h[:(d.size-1)/8+1] {
|
||||
out[j+0] = byte(s >> 0)
|
||||
out[j+1] = byte(s >> 8)
|
||||
out[j+2] = byte(s >> 16)
|
||||
out[j+3] = byte(s >> 24)
|
||||
out[j+4] = byte(s >> 32)
|
||||
out[j+5] = byte(s >> 40)
|
||||
out[j+6] = byte(s >> 48)
|
||||
out[j+7] = byte(s >> 56)
|
||||
j += 8
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// Sum512 returns a 64-byte BLAKE2b hash of data.
|
||||
func Sum512(data []byte) [64]byte {
|
||||
var d digest
|
||||
d.initialize(defaultConfig)
|
||||
d.Write(data)
|
||||
return d.checkSum()
|
||||
}
|
||||
|
||||
// Sum256 returns a 32-byte BLAKE2b hash of data.
|
||||
func Sum256(data []byte) (out [32]byte) {
|
||||
var d digest
|
||||
d.initialize(config256)
|
||||
d.Write(data)
|
||||
sum := d.checkSum()
|
||||
copy(out[:], sum[:32])
|
||||
return
|
||||
}
|
625
vendor/github.com/dchest/blake2b/blake2b_test.go
generated
vendored
Normal file
625
vendor/github.com/dchest/blake2b/blake2b_test.go
generated
vendored
Normal file
@ -0,0 +1,625 @@
|
||||
// Written in 2012 by Dmitry Chestnykh.
|
||||
//
|
||||
// To the extent possible under law, the author have dedicated all copyright
|
||||
// and related and neighboring rights to this software to the public domain
|
||||
// worldwide. This software is distributed without any warranty.
|
||||
// http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
package blake2b
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSum(t *testing.T) {
|
||||
buf := make([]byte, len(golden))
|
||||
for i := range buf {
|
||||
buf[i] = byte(i)
|
||||
}
|
||||
h := New512()
|
||||
for i, v := range golden {
|
||||
if v != fmt.Sprintf("%x", Sum512(buf[:i])) {
|
||||
t.Errorf("%d: Sum512(): \nexpected %s\ngot %x", i, v, Sum512(buf[:i]))
|
||||
}
|
||||
h.Reset()
|
||||
h.Write(buf[:i])
|
||||
sum := h.Sum(nil)
|
||||
if fmt.Sprintf("%x", sum) != v {
|
||||
t.Errorf("%d:\nexpected %s\ngot %x", i, v, sum)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func TestSum256(t *testing.T) {
|
||||
// Simple one-hash test.
|
||||
in := "The cryptographic hash function BLAKE2 is an improved version of the SHA-3 finalist BLAKE"
|
||||
good := "e5866d0c42b4e27e89a316fa5c3ba8cacae754e53d8267da37ba1893c2fcd92c"
|
||||
if good != fmt.Sprintf("%x", Sum256([]byte(in))) {
|
||||
t.Errorf("Sum256(): \nexpected %s\ngot %x", good, Sum256([]byte(in)))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestSumLength(t *testing.T) {
|
||||
h, _ := New(&Config{Size: 19})
|
||||
sum := h.Sum(nil)
|
||||
if len(sum) != 19 {
|
||||
t.Fatalf("Sum() returned a slice larger than the given hash size")
|
||||
}
|
||||
}
|
||||
|
||||
func TestKeyedSum(t *testing.T) {
|
||||
buf := make([]byte, len(goldenKeyed))
|
||||
for i := range buf {
|
||||
buf[i] = byte(i)
|
||||
}
|
||||
h := NewMAC(64, buf[:64])
|
||||
for i, v := range goldenKeyed {
|
||||
h.Reset()
|
||||
h.Write(buf[:i])
|
||||
sum := h.Sum(nil)
|
||||
if fmt.Sprintf("%x", sum) != v {
|
||||
t.Errorf("%d:\nexpected %s\ngot %x", i, v, sum)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
var bench = New512()
|
||||
var buf = make([]byte, 8<<10)
|
||||
|
||||
func BenchmarkWrite1K(b *testing.B) {
|
||||
b.SetBytes(1024)
|
||||
for i := 0; i < b.N; i++ {
|
||||
bench.Write(buf[:1024])
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkWrite8K(b *testing.B) {
|
||||
b.SetBytes(int64(len(buf)))
|
||||
for i := 0; i < b.N; i++ {
|
||||
bench.Write(buf)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkHash64(b *testing.B) {
|
||||
b.SetBytes(64)
|
||||
for i := 0; i < b.N; i++ {
|
||||
Sum512(buf[:64])
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkHash128(b *testing.B) {
|
||||
b.SetBytes(128)
|
||||
for i := 0; i < b.N; i++ {
|
||||
Sum512(buf[:128])
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkHash1K(b *testing.B) {
|
||||
b.SetBytes(1024)
|
||||
for i := 0; i < b.N; i++ {
|
||||
Sum512(buf[:1024])
|
||||
}
|
||||
}
|
||||
|
||||
// Test vectors taken from reference implementation in C#.
|
||||
var golden = []string{
|
||||
"786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce",
|
||||
"2fa3f686df876995167e7c2e5d74c4c7b6e48f8068fe0e44208344d480f7904c36963e44115fe3eb2a3ac8694c28bcb4f5a0f3276f2e79487d8219057a506e4b",
|
||||
"1c08798dc641aba9dee435e22519a4729a09b2bfe0ff00ef2dcd8ed6f8a07d15eaf4aee52bbf18ab5608a6190f70b90486c8a7d4873710b1115d3debbb4327b5",
|
||||
"40a374727302d9a4769c17b5f409ff32f58aa24ff122d7603e4fda1509e919d4107a52c57570a6d94e50967aea573b11f86f473f537565c66f7039830a85d186",
|
||||
"77ddf4b14425eb3d053c1e84e3469d92c4cd910ed20f92035e0c99d8a7a86cecaf69f9663c20a7aa230bc82f60d22fb4a00b09d3eb8fc65ef547fe63c8d3ddce",
|
||||
"cbaa0ba7d482b1f301109ae41051991a3289bc1198005af226c5e4f103b66579f461361044c8ba3439ff12c515fb29c52161b7eb9c2837b76a5dc33f7cb2e2e8",
|
||||
"f95d45cf69af5c2023bdb505821e62e85d7caedf7beda12c0248775b0c88205eeb35af3a90816f6608ce7dd44ec28db1140614e1ddebf3aa9cd1843e0fad2c36",
|
||||
"8f945ba700f2530e5c2a7df7d5dce0f83f9efc78c073fe71ae1f88204a4fd1cf70a073f5d1f942ed623aa16e90a871246c90c45b621b3401a5ddbd9df6264165",
|
||||
"e998e0dc03ec30eb99bb6bfaaf6618acc620320d7220b3af2b23d112d8e9cb1262f3c0d60d183b1ee7f096d12dae42c958418600214d04f5ed6f5e718be35566",
|
||||
"6a9a090c61b3410aede7ec9138146ceb2c69662f460c3da53c6515c1eb31f41ca3d280e567882f95cf664a94147d78f42cfc714a40d22ef19470e053493508a2",
|
||||
"29102511d749db3cc9b4e335fa1f5e8faca8421d558f6a3f3321d50d044a248ba595cfc3efd3d2adc97334da732413f5cbf4751c362ba1d53862ac1e8dabeee8",
|
||||
"c97a4779d47e6f77729b5917d0138abb35980ab641bd73a8859eb1ac98c05362ed7d608f2e9587d6ba9e271d343125d40d933a8ed04ec1fe75ec407c7a53c34e",
|
||||
"10f0dc91b9f845fb95fad6860e6ce1adfa002c7fc327116d44d047cd7d5870d772bb12b5fac00e02b08ac2a0174d0446c36ab35f14ca31894cd61c78c849b48a",
|
||||
"dea9101cac62b8f6a3c650f90eea5bfae2653a4eafd63a6d1f0f132db9e4f2b1b662432ec85b17bcac41e775637881f6aab38dd66dcbd080f0990a7a6e9854fe",
|
||||
"441ffaa08cd79dff4afc9b9e5b5620eec086730c25f661b1d6fbfbd1cec3148dd72258c65641f2fca5eb155fadbcabb13c6e21dc11faf72c2a281b7d56145f19",
|
||||
"444b240fe3ed86d0e2ef4ce7d851edde22155582aa0914797b726cd058b6f45932e0e129516876527b1dd88fc66d7119f4ab3bed93a61a0e2d2d2aeac336d958",
|
||||
"bfbabbef45554ccfa0dc83752a19cc35d5920956b301d558d772282bc867009168e9e98606bb5ba73a385de5749228c925a85019b71f72fe29b3cd37ca52efe6",
|
||||
"9c4d0c3e1cdbbf485bec86f41cec7c98373f0e09f392849aaa229ebfbf397b22085529cb7ef39f9c7c2222a514182b1effaa178cc3687b1b2b6cbcb6fdeb96f8",
|
||||
"477176b3bfcbadd7657c23c24625e4d0d674d1868f006006398af97aa41877c8e70d3d14c3bbc9bbcdcea801bd0e1599af1f3eec67405170f4e26c964a57a8b7",
|
||||
"a78c490eda3173bb3f10dee52f110fb1c08e0302230b85ddd7c11257d92de148785ef00c039c0bb8eb9808a35b2d8c080f572859714c9d4069c5bcaf090e898e",
|
||||
"58d023397beb5b4145cb2255b07d74290b36d9fd1e594afbd8eea47c205b2efbfe6f46190faf95af504ab072e36f6c85d767a321bfd7f22687a4abbf494a689c",
|
||||
"4001ec74d5a46fd29c2c3cdbe5d1b9f20e51a941be98d2a4e1e2fbf866a672121db6f81a514cfd10e7358d571bdba48e4ce708b9d124894bc0b5ed554935f73a",
|
||||
"ccd1b22dab6511225d2401ea2d8625d206a12473cc732b615e5640cefff0a4adf971b0e827a619e0a80f5db9ccd0962329010d07e34a2064e731c520817b2183",
|
||||
"b4a0a9e3574edb9e1e72aa31e39cc5f30dbf943f8cabc408449654a39131e66d718a18819143e3ea96b4a1895988a1c0056cf2b6e04f9ac19d657383c2910c44",
|
||||
"447becab16630608d39f4f058b16f7af95b85a76aa0fa7cea2b80755fb76e9c804f2ca78f02643c915fbf2fce5e19de86000de03b18861815a83126071f8a37b",
|
||||
"54e6dab9977380a5665822db93374eda528d9beb626f9b94027071cb26675e112b4a7fec941ee60a81e4d2ea3ff7bc52cfc45dfbfe735a1c646b2cf6d6a49b62",
|
||||
"3ea62625949e3646704d7e3c906f82f6c028f540f5f72a794b0c57bf97b7649bfeb90b01d3ca3e829de21b3826e6f87014d3c77350cb5a15ff5d468a81bec160",
|
||||
"213cfe145c54a33691569980e5938c8883a46d84d149c8ff1a67cd287b4d49c6da69d3a035443db085983d0efe63706bd5b6f15a7da459e8d50a19093db55e80",
|
||||
"5716c4a38f38db104e494a0a27cbe89a26a6bb6f499ec01c8c01aa7cb88497e75148cd6eee12a7168b6f78ab74e4be749251a1a74c38c86d6129177e2889e0b6",
|
||||
"030460a98bdf9ff17cd96404f28fc304f2b7c04eaade53677fd28f788ca22186b8bc80dd21d17f8549c711aff0e514e19d4e15f5990252a03e082f28dc2052f6",
|
||||
"19e7f1ccee88a10672333e390cf22013a8c734c6cb9eab41f17c3c8032a2e4aca0569ea36f0860c7a1af28fa476840d66011168859334a9e4ef9cc2e61a0e29e",
|
||||
"29f8b8c78c80f2fcb4bdf7825ed90a70d625ff785d262677e250c04f3720c888d03f8045e4edf3f5285bd39d928a10a7d0a5df00b8484ac2868142a1e8bea351",
|
||||
"5c52920a7263e39d57920ca0cb752ac6d79a04fef8a7a216a1ecb7115ce06d89fd7d735bd6f4272555dba22c2d1c96e6352322c62c5630fde0f4777a76c3de2c",
|
||||
"83b098f262251bf660064a9d3511ce7687a09e6dfbb878299c30e93dfb43a9314db9a600337db26ebeedaf2256a96dabe9b29e7573ad11c3523d874dde5be7ed",
|
||||
"9447d98aa5c9331352f43d3e56d0a9a9f9581865998e2885cc56dd0a0bd5a7b50595bd10f7529bcd31f37dc16a1465d594079667da2a3fcb70401498837cedeb",
|
||||
"867732f2feeb23893097561ac710a4bff453be9cfbedba8ba324f9d312a82d732e1b83b829fdcd177b882ca0c1bf544b223be529924a246a63cf059bfdc50a1b",
|
||||
"f15ab26d4cdfcf56e196bb6ba170a8fccc414de9285afd98a3d3cf2fb88fcbc0f19832ac433a5b2cc2392a4ce34332987d8d2c2bef6c3466138db0c6e42fa47b",
|
||||
"2813516d68ed4a08b39d648aa6aacd81e9d655ecd5f0c13556c60fdf0d333ea38464b36c02baccd746e9575e96c63014f074ae34a0a25b320f0fbedd6acf7665",
|
||||
"d3259afca8a48962fa892e145acf547f26923ae8d4924c8a531581526b04b44c7af83c643ef5a0bc282d36f3fb04c84e28b351f40c74b69dc7840bc717b6f15f",
|
||||
"f14b061ae359fa31b989e30332bfe8de8cc8cdb568e14be214a2223b84caab7419549ecfcc96ce2acec119485d87d157d3a8734fc426597d64f36570ceaf224d",
|
||||
"55e70b01d1fbf8b23b57fb62e26c2ce54f13f8fa2464e6eb98d16a6117026d8b90819012496d4071ebe2e59557ece3519a7aa45802f9615374877332b73490b3",
|
||||
"25261eb296971d6e4a71b2928e64839c67d422872bf9f3c31993615222de9f8f0b2c4be8548559b4b354e736416e3218d4e8a1e219a4a6d43e1a9a521d0e75fc",
|
||||
"08307f347c41294e34bb54cb42b1522d22f824f7b6e5db50fda096798e181a8f026fa27b4ae45d52a62caf9d5198e24a4913c6671775b2d723c1239bfbf016d7",
|
||||
"1e5c62e7e9bfa1b118747a2de08b3ca10112af96a46e4b22c3fc06f9bfee4eb5c49e057a4a4886234324572576bb9b5ecfde0d99b0de4f98ec16e4d1b85fa947",
|
||||
"c74a77395fb8bc126447454838e561e962853dc7eb49a1e3cb67c3d0851f3e39517be8c350ac910903d49cd2bfdf545c99316d0346170b739f0add5d533c2cfc",
|
||||
"0dd57b423cc01eb2861391eb886a0d17079b933fc76eb3fc08a19f8a74952cb68f6bcdc644f77370966e4d13e80560bcf082ef0479d48fbbab4df03b53a4e178",
|
||||
"4d8dc3923edccdfce70072398b8a3da5c31fcb3ee3b645c85f717cbaeb4b673a19394425a585bfb464d92f1597d0b754d163f97ced343b25db5a70ef48ebb34f",
|
||||
"f0a50553e4dfb0c4e3e3d3ba82034857e3b1e50918f5b8a7d698e10d242b0fb544af6c92d0c3aaf9932220416117b4e78ecb8a8f430e13b82a5915290a5819c5",
|
||||
"b15543f3f736086627cc5365e7e8988c2ef155c0fd4f428961b00d1526f04d6d6a658b4b8ed32c5d8621e7f4f8e8a933d9ecc9dd1b8333cbe28cfc37d9719e1c",
|
||||
"7b4fa158e415fef023247264cbbe15d16d91a44424a8db707eb1e2033c30e9e1e7c8c0864595d2cb8c580eb47e9d16abbd7e44e824f7cedb7def57130e52cfe9",
|
||||
"60424ff23234c34dc9687ad502869372cc31a59380186bc2361c835d972f49666eb1ac69629de646f03f9b4db9e2ace093fbfdf8f20ab5f98541978be8ef549f",
|
||||
"7406018ce704d84f5eb9c79fea97da345699468a350ee0b2d0f3a4bf2070304ea862d72a51c57d3064947286f531e0eaf7563702262e6c724abf5ed8c8398d17",
|
||||
"14ef5c6d647b3bd1e6e32006c231199810de5c4dc88e70240273b0ea18e651a3eb4f5ca3114b8a56716969c7cda27e0c8db832ad5e89a2dc6cb0adbe7d93abd1",
|
||||
"38cf6c24e3e08bcf1f6cf3d1b1f65b905239a3118033249e448113ec632ea6dc346feeb2571c38bd9a7398b2221280328002b23e1a45adaffe66d93f6564eaa2",
|
||||
"6cd7208a4bc7e7e56201bbba02a0f489cd384abe40afd4222f158b3d986ee72a54c50fb64fd4ed2530eda2c8af2928a0da6d4f830ae1c9db469dfd970f12a56f",
|
||||
"659858f0b5c9edab5b94fd732f6e6b17c51cc096104f09beb3afc3aa467c2ecf885c4c6541effa9023d3b5738ae5a14d867e15db06fe1f9d1127b77e1aabb516",
|
||||
"26cca0126f5d1a813c62e5c71001c046f9c92095704550be5873a495a999ad010a4f79491f24f286500adce1a137bc2084e4949f5b7294cefe51ecaff8e95cba",
|
||||
"4147c1f55172788c5567c561feef876f621fff1ce87786b8467637e70dfbcd0dbdb6415cb600954ab9c04c0e457e625b407222c0fe1ae21b2143688ada94dc58",
|
||||
"5b1bf154c62a8af6e93d35f18f7f90abb16a6ef0e8d1aecd118bf70167bab2af08935c6fdc0663ce74482d17a8e54b546d1c296631c65f3b522a515839d43d71",
|
||||
"9f600419a4e8f4fb834c24b0f7fc13bf4e279d98e8a3c765ee934917403e3a66097182ea21453cb63ebbe8b73a9c2167596446438c57627f330badd4f569f7d6",
|
||||
"457ef6466a8924fd8011a34471a5a1ac8ccd9bd0d07a97414ac943021ce4b9e4b9c8db0a28f016ed43b1542481990022147b313e194671131e708dd43a3ed7dc",
|
||||
"9997b2194d9af6dfcb9143f41c0ed83d3a3f4388361103d38c2a49b280a581212715fd908d41c651f5c715ca38c0ce2830a37e00e508ced1bcdc320e5e4d1e2e",
|
||||
"5c6bbf16baa180f986bd40a1287ed4c549770e7284858fc47bc21ab95ebbf3374b4ee3fd9f2af60f3395221b2acc76f2d34c132954049f8a3a996f1e32ec84e5",
|
||||
"d10bf9a15b1c9fc8d41f89bb140bf0be08d2f3666176d13baac4d381358ad074c9d4748c300520eb026daeaea7c5b158892fde4e8ec17dc998dcd507df26eb63",
|
||||
"2fc6e69fa26a89a5ed269092cb9b2a449a4409a7a44011eecad13d7c4b0456602d402fa5844f1a7a758136ce3d5d8d0e8b86921ffff4f692dd95bdc8e5ff0052",
|
||||
"fcbe8be7dcb49a32dbdf239459e26308b84dff1ea480df8d104eeff34b46fae98627b450c2267d48c0946a697c5b59531452ac0484f1c84e3a33d0c339bb2e28",
|
||||
"a19093a6e3bcf5952f850f2030f69b9606f147f90b8baee3362da71d9f35b44ef9d8f0a7712ba1877fddcd2d8ea8f1e5a773d0b745d4725605983a2de901f803",
|
||||
"3c2006423f73e268fa59d2920377eb29a4f9a8b462be15983ee3b85ae8a78e992633581a9099893b63db30241c34f643027dc878279af5850d7e2d4a2653073a",
|
||||
"d0f2f2e3787653f77cce2fa24835785bbd0c433fc779465a115149905a9dd1cb827a628506d457fcf124a0c2aef9ce2d2a0a0f63545570d8667ff9e2eba07334",
|
||||
"78a9fc048e25c6dcb5de45667de8ffdd3a93711141d594e9fa62a959475da6075ea8f0916e84e45ad911b75467077ee52d2c9aebf4d58f20ce4a3a00458b05d4",
|
||||
"45813f441769ab6ed37d349ff6e72267d76ae6bb3e3c612ec05c6e02a12af5a37c918b52bf74267c3f6a3f183a8064ff84c07b193d08066789a01accdb6f9340",
|
||||
"956da1c68d83a7b881e01b9a966c3c0bf27f68606a8b71d457bd016d4c41dd8a380c709a296cb4c6544792920fd788835771a07d4a16fb52ed48050331dc4c8b",
|
||||
"df186c2dc09caa48e14e942f75de5ac1b7a21e4f9f072a5b371e09e07345b0740c76177b01278808fec025eded9822c122afd1c63e6f0ce2e32631041063145c",
|
||||
"87475640966a9fdcd6d3a3b5a2cca5c08f0d882b10243c0ec1bf3c6b1c37f2cd3212f19a057864477d5eaf8faed73f2937c768a0af415e84bbce6bd7de23b660",
|
||||
"c3b573bbe10949a0fbd4ff884c446f2229b76902f9dfdbb8a0353da5c83ca14e8151bbaac82fd1576a009adc6f1935cf26edd4f1fb8da483e6c5cd9d8923adc3",
|
||||
"b09d8d0bba8a7286e43568f7907550e42036d674e3c8fc34d8ca46f771d6466b70fb605875f6a863c877d12f07063fdc2e90ccd459b1910dcd52d8f10b2b0a15",
|
||||
"af3a22bf75b21abfb0acd54422ba1b7300a952eff02ebeb65b5c234471a98df32f4f9643ce1904108a168767924280bd76c83f8c82d9a79d9259b195362a2a04",
|
||||
"bf4ff2221b7e6957a724cd964aa3d5d0d9941f540413752f4699d8101b3e537508bf09f8508b317736ffd265f2847aa7d84bd2d97569c49d632aed9945e5fa5e",
|
||||
"9c6b6b78199b1bdacb4300e31479fa622a6b5bc80d4678a6078f88a8268cd7206a2799e8d4621a464ef6b43dd8adffe97caf221b22b6b8778b149a822aefbb09",
|
||||
"890656f09c99d280b5ecb381f56427b813751bc652c7828078b23a4af83b4e3a61fdbac61f89bee84ea6bee760c047f25c6b0a201c69a38fd6fd971af18588bb",
|
||||
"31a046f7882ffe6f83ce472e9a0701832ec7b3f76fbcfd1df60fe3ea48fde1651254247c3fd95e100f9172731e17fd5297c11f4bb328363ca361624a81af797c",
|
||||
"27a60b2d00e7a671d47d0aec2a686a0ac04b52f40ab6629028eb7d13f4baa99ac0fe46ee6c814944f2f4b4d20e9378e4847ea44c13178091e277b87ea7a55711",
|
||||
"8b5ccef194162c1f19d68f91e0b0928f289ec5283720840c2f73d253111238dcfe94af2b59c2c1ca2591901a7bc060e7459b6c47df0f71701a35cc0aa831b5b6",
|
||||
"57ab6c4b2229aeb3b70476d803cd63812f107ce6da17fed9b17875e8f86c724f49e024cbf3a1b8b119c50357652b81879d2ade2d588b9e4f7cedba0e4644c9ee",
|
||||
"0190a8dac320a739f322e15731aa140ddaf5bed294d5c82e54fef29f214e18aafaa84f8be99af62950266b8f901f15dd4c5d35516fc35b4cab2e96e4695bbe1c",
|
||||
"d14d7c4c415eeb0e10b159224bea127ebd84f9591c702a330f5bb7bb7aa44ea39de6ed01f18da7adf40cfb97c5d152c27528824b21e239526af8f36b214e0cfb",
|
||||
"be28c4be706970488fac7d29c3bd5c4e986085c4c3332f1f3fd30973db614164ba2f31a78875ffdc150325c88327a9443ed04fdfe5be93876d1628560c764a80",
|
||||
"031da1069e3a2e9c3382e436ffd79df74b1ca6a8adb2deabe676ab45994cbc054f037d2f0eace858d32c14e2d1c8b46077308e3bdc2c1b53172ecf7a8c14e349",
|
||||
"4665cef8ba4db4d0acb118f2987f0bb09f8f86aa445aa3d5fc9a8b346864787489e8fcecc125d17e9b56e12988eac5ecc7286883db0661b8ff05da2afff30fe4",
|
||||
"63b7032e5f930cc9939517f9e986816cfbec2be59b9568b13f2ead05bae7777cab620c6659404f7409e4199a3be5f7865aa7cbdf8c4253f7e8219b1bd5f46fea",
|
||||
"9f09bf093a2b0ff8c2634b49e37f1b2135b447aa9144c9787dbfd92129316c99e88aab8a21fdef2372d1189aec500f95775f1f92bfb45545e4259fb9b7b02d14",
|
||||
"f9f8493c68088807df7f6a2693d64ea59f03e9e05a223e68524ca32195a4734b654fcea4d2734c866cf95c889fb10c49159be2f5043dc98bb55e02ef7bdcb082",
|
||||
"3c9a7359ab4febce07b20ac447b06a240b7fe1dae5439c49b60b5819f7812e4c172406c1aac316713cf0dded1038077258e2eff5b33913d9d95caeb4e6c6b970",
|
||||
"ad6aab8084510e822cfce8625d62cf4de655f4763884c71e80bab9ac9d5318dba4a6033ed29084e65216c031606ca17615dcfe3ba11d26851ae0999ca6e232cf",
|
||||
"156e9e6261374c9dc884f36e70f0fe1ab9297997b836fa7d170a9c9ebf575b881e7bcea44d6c0248d35597907154828955be19135852f9228815eca024a8adfb",
|
||||
"4215407633f4cca9b6788be93e6aa3d963c7d6ce4b147247099f46a3acb500a30038cb3e788c3d29f132ad844e80e9e99251f6db96acd8a091cfc770af53847b",
|
||||
"1c077e279de6548523502b6df800ffdab5e2c3e9442eb838f58c295f3b147cef9d701c41c321283f00c71affa0619310399126295b78dd4d1a74572ef9ed5135",
|
||||
"f07a555f49fe481cf4cd0a87b71b82e4a95064d06677fdd90a0eb598877ba1c83d4677b393c3a3b6661c421f5b12cb99d20376ba7275c2f3a8f5a9b7821720da",
|
||||
"b5911b380d20c7b04323e4026b38e200f534259233b581e02c1e3e2d8438d6c66d5a4eb201d5a8b75072c4ec29106334da70bc79521b0ced2cfd533f5ff84f95",
|
||||
"01f070a09bae911296361f91aa0e8e0d09a7725478536d9d48c5fe1e5e7c3c5b9b9d6eb07796f6da57ae562a7d70e882e37adfde83f0c433c2cd363536bb22c8",
|
||||
"6f793eb4374a48b0775acaf9adcf8e45e54270c9475f004ad8d5973e2aca52747ff4ed04ae967275b9f9eb0e1ff75fb4f794fa8be9add7a41304868d103fab10",
|
||||
"965f20f139765fcc4ce4ba3794675863cac24db472cd2b799d035bce3dbea502da7b524865f6b811d8c5828d3a889646fe64a380da1aa7c7044e9f245dced128",
|
||||
"ec295b5783601244c30e4641e3b45be222c4dce77a58700f53bc8ec52a941690b4d0b087fb6fcb3f39832b9de8f75ec20bd43079811749cdc907edb94157d180",
|
||||
"61c72f8ccc91dbb54ca6750bc489672de09faedb8fdd4f94ff2320909a303f5d5a98481c0bc1a625419fb4debfbf7f8a53bb07ec3d985e8ea11e72d559940780",
|
||||
"afd8145b259eefc8d12620c3c5b03e1ed8fd2ccefe0365078c80fd42c1770e28b44948f27e65a1886690110db814397b68e43d80d1ba16dfa358e739c898cfa3",
|
||||
"552fc7893cf1ce933ada35c0da98844e41545e244c3157a1428d7b4c21f9cd7e4071aed77b7ca9f1c38fba32237412ef21a342742ec8324378f21e507fafdd88",
|
||||
"467a33fbadf5ebc52596ef86aaaefc6faba8ee651b1ce04de368a03a5a9040ef2835e00adb09abb3fbd2bce818a2413d0b0253b5bda4fc5b2f6f85f3fd5b55f2",
|
||||
"22eff8e6dd5236f5f57d94ede874d6c9428e8f5d566f17cd6d1848cd752fe13c655cb10fbaaff76872f2bf2da99e15dc624075e1ec2f58a3f64072121838569e",
|
||||
"9cec6bbf62c4bce4138abae1cbec8dad31950444e90321b1347196834c114b864af3f3cc3508f83751ffb4eda7c84d140734bb4263c3625c00f04f4c8068981b",
|
||||
"a8b60fa4fc2442f6f1514ad7402626920cc7c2c9f72124b8cba8ee2cb7c4586f658a4410cffcc0ab88343955e094c6af0d20d0c714fb0a988f543f300f58d389",
|
||||
"8271cc45dfa5e4170e847e8630b952cf9c2aa777d06f26a7585b8381f188dacc7337391cfcc94b053dc4ec29cc17f077870428f1ac23fddda165ef5a3f155f39",
|
||||
"bf23c0c25c8060e4f6995f1623a3bebecaa96e308680000a8aa3cd56bb1a6da099e10d9231b37f4519b2efd2c24de72f31a5f19535241b4a59fa3c03ceb790e7",
|
||||
"877fd652c05281009c0a5250e7a3a671f8b18c108817fe4a874de22da8e45db11958a600c5f62e67d36cbf84474cf244a9c2b03a9fb9dc711cd1a2cab6f3fae0",
|
||||
"29df4d87ea444baf5bcdf5f4e41579e28a67de84149f06c03f110ea84f572a9f676addd04c4878f49c5c00accda441b1a387caceb2e993bb7a10cd8c2d6717e1",
|
||||
"710dacb166844639cd7b637c274209424e2449dc35d790bbfa4f76177054a36b3b76fac0ca6e61df1e687000678ac0746df75d0a3954897681fd393a155a1bb4",
|
||||
"c1d5f93b8dea1f2571babccbc01764541a0cda87e444d673c50966ca559c33354b3acb26e5d5781ffb28847a4b4754d77008c62a835835f500dea7c3b58bdae2",
|
||||
"a41e41271cdab8af4d72b104bfb2ad041ac4df14677da671d85640c4b187f50c2b66513c4619fbd5d5dc4fe65dd37b9042e9848dda556a504caa2b1c6afe4730",
|
||||
"e7bcbacdc379c43d81ebadcb37781552fc1d753e8cf310d968392d06c91f1d64cc9e90ce1d22c32d277fc6cda433a4d442c762e9eacf2c259f32d64cf9da3a22",
|
||||
"51755b4ac5456b13218a19c5b9242f57c4a981e4d4ecdce09a3193362b808a579345d4881c2607a56534dd7f21956aff72c2f4173a6e7b6cc2212ba0e3daee1f",
|
||||
"dcc2c4beb9c1f2607b786c20c631972347034c1cc02fcc7d02ff01099cfe1c6989840ac213923629113aa8bad713ccf0fe4ce13264fb32b8b0fe372da382544a",
|
||||
"3d55176acea4a7e3a65ffa9fb10a7a1767199cf077cee9f71532d67cd7c73c9f93cfc37ccdcc1fdef50aad46a504a650d298d597a3a9fa95c6c40cb71fa5e725",
|
||||
"d07713c005de96dd21d2eb8bbeca66746ea51a31ae922a3e74864889540a48db27d7e4c90311638b224bf0201b501891754848113c266108d0adb13db71909c7",
|
||||
"58983c21433d950caa23e4bc18543b8e601c204318532152daf5e159a0cd1480183d29285c05f129cb0cc3164687928086ffe380158df1d394c6ac0d4288bca8",
|
||||
"8100a8dc528d2b682ab4250801ba33f02a3e94c54dac0ae1482aa21f51ef3a82f3807e6facb0aeb05947bf7aa2adcb034356f90fa4560ede02201a37e411ec1a",
|
||||
"07025f1bb6c784f3fe49de5c14b936a5acacacaab33f6ac4d0e00ab6a12483d6bec00b4fe67c7ca5cc508c2a53efb5bfa5398769d843ff0d9e8b14d36a01a77f",
|
||||
"ba6aefd972b6186e027a76273a4a723321a3f580cfa894da5a9ce8e721c828552c64dacee3a7fd2d743b5c35ad0c8efa71f8ce99bf96334710e2c2346e8f3c52",
|
||||
"e0721e02517aedfa4e7e9ba503e025fd46e714566dc889a84cbfe56a55dfbe2fc4938ac4120588335deac8ef3fa229adc9647f54ad2e3472234f9b34efc46543",
|
||||
"b6292669ccd38d5f01caae96ba272c76a879a45743afa0725d83b9ebb26665b731f1848c52f11972b6644f554c064fa90780dbbbf3a89d4fc31f67df3e5857ef",
|
||||
"2319e3789c47e2daa5fe807f61bec2a1a6537fa03f19ff32e87eecbfd64b7e0e8ccff439ac333b040f19b0c4ddd11a61e24ac1fe0f10a039806c5dcc0da3d115",
|
||||
"f59711d44a031d5f97a9413c065d1e614c417ede998590325f49bad2fd444d3e4418be19aec4e11449ac1a57207898bc57d76a1bcf3566292c20c683a5c4648f",
|
||||
"df0a9d0c212843a6a934e3902b2dd30d17fba5f969d2030b12a546d8a6a45e80cf5635f071f0452e9c919275da99bed51eb1173c1af0518726b75b0ec3bae2b5",
|
||||
"a3eb6e6c7bf2fb8b28bfe8b15e15bb500f781ecc86f778c3a4e655fc5869bf2846a245d4e33b7b14436a17e63be79b36655c226a50ffbc7124207b0202342db5",
|
||||
"56d4cbcd070563426a017069425c2cd2ae540668287a5fb9dac432eb8ab1a353a30f2fe1f40d83333afe696a267795408a92fe7da07a0c1814cf77f36e105ee8",
|
||||
"e59b9987d428b3eda37d80abdb16cd2b0aef674c2b1dda4432ea91ee6c935c684b48b4428a8cc740e579a30deff35a803013820dd23f14ae1d8413b5c8672aec",
|
||||
"cd9fcc99f99d4cc16d031900b2a736e1508db4b586814e6345857f354a70ccecb1df3b50a19adaf43c278efa423ff4bb6c523ec7fd7859b97b168a7ebff8467c",
|
||||
"0602185d8c3a78738b99164b8bc6ffb21c7debebbf806372e0da44d121545597b9c662a255dc31542cf995ecbe6a50fb5e6e0ee4ef240fe557eded1188087e86",
|
||||
"c08afa5b927bf08097afc5fff9ca4e7800125c1f52f2af3553fa2b89e1e3015c4f87d5e0a48956ad31450b083dad147ffb5ec03434a26830cf37d103ab50c5da",
|
||||
"36f1e1c11d6ef6bc3b536d505d544a871522c5c2a253067ec9933b6ec25464daf985525f5b9560a16d890259ac1bb5cc67c0c469cde133def000ea1d686f4f5d",
|
||||
"bf2ab2e2470f5438c3b689e66e7686fffa0cb1e1798ad3a86ff99075bf6138e33d9c0ce59afb24ac67a02af34428191a9a0a6041c07471b7c3b1a752d6fc0b8b",
|
||||
"d400601f9728ccc4c92342d9787d8d28ab323af375ca5624b4bb91d17271fbae862e413be73f1f68e615b8c5c391be0dbd9144746eb339ad541547ba9c468a17",
|
||||
"79fe2fe157eb85a038abb8ebbc647731d2c83f51b0ac6ee14aa284cb6a3549a4dcceb300740a825f52f5fb30b03b8c4d8b0f4aa67a63f4a94e3303c4eda4c02b",
|
||||
"75351313b52a8529298d8c186b1768666dcca8595317d7a4816eb88c062020c0c8efc554bb341b64688db5ccafc35f3c3cd09d6564b36d7b04a248e146980d4b",
|
||||
"e3128b1d311d02179d7f25f97a5a8bee2cc8c86303644fcd664e157d1fef00f23e46f9a5e8e5c890ce565bb6abd4302ce06469d52a5bd53e1c5a54d04649dc03",
|
||||
"c2382a72d2d3ace9d5933d00b60827ed380cda08d0ba5f6dd41e29ee6dbe8ecb9235f06be95d83b6816a2fb7a5ad47035e8a4b69a4884b99e4bece58cab25d44",
|
||||
"6b1c69460bbd50ac2ed6f32e6e887cfed407d47dcf0aaa60387fe320d780bd03eab6d7baeb2a07d10cd552a300341354ea9a5f03183a623f92a2d4d9f00926af",
|
||||
"6cda206c80cdc9c44ba990e0328c314f819b142d00630404c48c05dc76d1b00ce4d72fc6a48e1469ddef609412c364820854214b4869af090f00d3c1ba443e1b",
|
||||
"7ffc8c26fbd6a0f7a609e6e1939f6a9edf1b0b066641fb76c4f9602ed748d11602496b35355b1aa255850a509d2f8ee18c8f3e1d7dcbc37a136598f56a59ed17",
|
||||
"70de1f08dd4e09d5fc151f17fc991a23abfc05104290d50468882efaf582b6ec2f14f577c0d68c3ad06626916e3c86e6daab6c53e5163e82b6bd0ce49fc0d8df",
|
||||
"4f81935756ed35ee2058ee0c6a6110d6fac5cb6a4f46aa9411603f99965823b6da4838276c5c06bc7880e376d92758369ee7305bcec8d3cfd28ccabb7b4f0579",
|
||||
"abcb61cb3683d18f27ad527908ed2d32a0426cb7bb4bf18061903a7dc42e7e76f982382304d18af8c80d91dd58dd47af76f8e2c36e28af2476b4bccf82e89fdf",
|
||||
"02d261ad56a526331b643dd2186de9a82e72a58223cd1e723686c53d869b83b94632b7b647ab2afc0d522e29da3a5615b741d82852e0df41b66007dbcba90543",
|
||||
"c5832741fa30c5436823015383d297ff4c4a5d7276c3f902122066e04be5431b1a85faf73b918434f9300963d1dea9e8ac3924ef490226edeea5f743e410669f",
|
||||
"cfaeab268cd075a5a6aed515023a032d54f2f2ff733ce0cbc78db51db4504d675923f82746d6594606ad5d67734b11a67cc6a468c2032e43ca1a94c6273a985e",
|
||||
"860850f92eb268272b67d133609bd64e34f61bf03f4c1738645c17fec818465d7ecd2be2907641130025fda79470ab731646e7f69440e8367ea76ac4cee8a1df",
|
||||
"84b154ed29bbedefa648286839046f4b5aa34430e2d67f7496e4c39f2c7ea78995f69e1292200016f16ac3b37700e6c7e7861afc396b64a59a1dbf47a55c4bbc",
|
||||
"aeeec260a5d8eff5ccab8b95da435a63ed7a21ea7fc7559413fd617e33609f8c290e64bbacc528f6c080262288b0f0a3219be223c991bee92e72349593e67638",
|
||||
"8ad78a9f26601d127e8d2f2f976e63d19a054a17dcf59e0f013ab54a6887bbdffde7aaae117e0fbf3271016595b9d9c712c01b2c53e9655a382bc4522e616645",
|
||||
"8934159dade1ac74147dfa282c75954fcef443ef25f80dfe9fb6ea633b8545111d08b34ef43fff17026c7964f5deac6d2b3c29dacf2747f022df5967dfdc1a0a",
|
||||
"cd36dd0b240614cf2fa2b9e959679dcdd72ec0cd58a43da3790a92f6cdeb9e1e795e478a0a47d371100d340c5cedcdbbc9e68b3f460818e5bdff7b4cda4c2744",
|
||||
"00df4e099b807137a85990f49d3a94315e5a5f7f7a6076b303e96b056fb93800111f479628e2f8db59aeb6ac70c3b61f51f9b46e80ffdeae25ebddb4af6cb4ee",
|
||||
"2b9c955e6caed4b7c9e246b86f9a1726e810c59d126cee66ed71bf015b83558a4b6d84d18dc3ff4620c2ffb722359fdef85ba0d4e2d22ecbe0ed784f99afe587",
|
||||
"181df0a261a2f7d29ea5a15772715105d450a4b6c236f699f462d60ca76487feedfc9f5eb92df838e8fb5dc3694e84c5e0f4a10b761f506762be052c745a6ee8",
|
||||
"21fb203458bf3a7e9a80439f9a902899cd5de0139dfd56f7110c9dec8437b26bda63de2f565926d85edb1d6c6825669743dd9992653d13979544d5dc8228bfaa",
|
||||
"ef021f29c5ffb830e64b9aa9058dd660fd2fcb81c497a7e698bcfbf59de5ad4a86ff93c10a4b9d1ae5774725f9072dcde9e1f199bab91f8bff921864aa502eee",
|
||||
"b3cfda40526b7f1d37569bdfcdf911e5a6efe6b2ec90a0454c47b2c046bf130fc3b352b34df4813d48d33ab8e269b69b075676cb6d00a8dcf9e1f967ec191b2c",
|
||||
"b4c6c3b267071eefb9c8c72e0e2b941293641f8673cb70c1cc26ad1e73cf141755860ad19b34c2f34ed35bb52ec4507cc1fe59047743a5f0c6febde625e26091",
|
||||
"57a34f2bcca60d4b85103b830c9d7952a416be5263ae429c9e5e53fe8590a8f78ec65a51109ea85dcdf7b6223f9f2b340539fad81923dbf8edabf95129e4dff6",
|
||||
"9cf46662fcd61a232277b685663b8b5da832dfd9a3b8ccfeec993ec6ac415ad07e048adfe414df272770dba867da5c1224c6fd0aa0c2187d426ac647e9887361",
|
||||
"5ce1042ab4d542c2f9ee9d17262af8164098935bef173d0e18489b04841746cd2f2df866bd7da6e5ef9024c648023ec723ab9c62fd80285739d84f15d2ab515a",
|
||||
"8488396bd4a8729b7a473178f232dadf3f0f8e22678ba5a43e041e72da1e2cf82194c307207a54cb8156293339eaec693ff66bfcd5efc65e95e4ecaf54530abd",
|
||||
"f598da901c3835bca560779037dfde9f0c51dc61c0b760fc1522d7b470ee63f5bdc6498476e86049ad86e4e21af2854a984cc905427d2f17f66b1f41c3da6f61",
|
||||
"5f93269798cf02132107337660a8d7a177354c0212eb93e555e7c37a08aef3d8dce01217011cd965c04dd2c105f2e2b6cae5e4e6bcaf09dfbee3e0a6a6357c37",
|
||||
"0ecf581d47bac9230986faabd70c2f5b80e91066f0ec55a842937882286d2ca007bb4e973b0b091d52167ff7c4009c7ab4ad38fff1dceacdb7be81ef4a452952",
|
||||
"5aeca8abe1528582b2a307b4009585498a3d467ca6101cb0c5126f9976056e9ffc123cc20c302b2a737f492c75d21f01512c90ca0541dfa56e950a321dcb28d8",
|
||||
"732fbf8f1cb2b8329263ede27858fe46f8d3354d376bcda0548e7ce1fa9dd11f85eb661fe950b543aa635ca4d3f04ede5b32d6b656e5ce1c44d35c4a6c56cff8",
|
||||
"d5e938735d63788c80100aefd18648d18cf272f69f20ff24cfe2895c088ad08b0104da1672a4eb26fc52545cc7d7a01b266cf546c403c45bd129eb41bdd9200b",
|
||||
"65a245b49352ee297d91af8c8be00528ac6e046dd83ac7bd465a98816dd68f3e00e1ae8f895327a7e9a8c9326598379a29c9fc91ec0c6eef08f3e2b216c11008",
|
||||
"c95654b63019130ab45dd0fb4941b98aeb3af2a123913eca2ce99b3e97410a7bf8661cc7fbaa2bc1cf2b13113b1ed40a0118b88e5fffc3542759ea007ed4c58d",
|
||||
"1eb262f38fa494431f017dad44c0dfb69324ac032f04b657fc91a88647bb74760f24e7c956514f0cf002990b182c1642b9b2426e96a61187e4e012f00e217d84",
|
||||
"3b955aeebfa5151ac1ab8e3f5cc1e3767084c842a575d36269836e97353d41622b731dddcd5f269550a3a5b87be1e90326340b6e0e62555815d9600597ac6ef9",
|
||||
"68289f6605473ba0e4f241baf7477a9885426a858f19ef2a18b0d40ef8e41282ed5526b519799e270f13881327918278755711071d8511fe963e3b5606aa3716",
|
||||
"80a33787542612c38f6bcd7cd86cab460227509b1cbad5ec408a91413d51155a0476dadbf3a2518e4a6e77cc346622e347a469bf8baa5f04eb2d98705355d063",
|
||||
"34629bc6d831391c4cdf8af1b4b7b6b8e8ee17cf98c70e5dd586cd99f14b11df945166236a9571e6d591bb83ee4d164d46f6b9d8ef86ff865a81bfb91b00424b",
|
||||
"8b7cc339163863bb4383e542b0ef0e7cf36b84ad932cdf5a80419ec9ad692e7a7e784d2c7cb3796a18b8f800035f3aa06c824100611120a7bdeb35618ccb81b7",
|
||||
"4f084e4939dd5a7f5a658fad58a18a15c25c32ec1c7fd5c5c6c3e892b3971aeaac308304ef17b1c47239ea4bb398b3fd6d4528d8de8e768ae0f1a5a5c6b5c297",
|
||||
"48f407a1af5b8009b2051742e8cf5cd5656669e7d722ee8e7bd202060849442168d8facc117c012bfb7bf449d99befff6a34aea203f1d8d352722be5014ec818",
|
||||
"a6aa82cd1e426f9a73bfa39a29037876114655b8c22d6d3ff8b638ae7dea6b17843e09e52eb66fa1e475e4a8a3de429b7d0f4a776fcb8bdc9b9fede7d52e815f",
|
||||
"5817027d6bdd00c5dd10ac593cd560372270775a18526d7e6f13872a2e20eab664625be7168ac4bd7c9e0ce7fc4099e0f48442e2c767191c6e1284e9b2ccea8c",
|
||||
"08e41028340a45c74e4052b3a8d6389e22e043a1adab5e28d97619450d723469b620caa519b81c14523854f619fd3027e3847bd03276e60604a80ddb4de876d6",
|
||||
"130b8420537eb07d72abda07c85acbd8b9a44f16321dd0422145f809673d30f2b5321326e2bff317ef3fef983c51c4f8ab24a325d298e34afce569a82555774c",
|
||||
"ac49b844afaa012e31c474ca263648844fd2f6307992c2f752aca02c3828965175794deee2d2ee95c61cd284f6b5a2d75e2ef2b29ee8149e77fb81447b2fd04b",
|
||||
"b9d7ca81cc60bb9578e44024e5a0a0be80f27336a6a9f4e53df3999cb191280b090e2ac2d29c5baad9d71415bdc129e69aa2667af6a7fd5e189fccdcee817340",
|
||||
"a755e113386572c75ced61d719706070b9146048e42a9f8cd35667a088b42f08808abdf77e618abd959afc757379ca2c00bcc1a48390fa2bff618b1e0078a613",
|
||||
"a73c7debed326f1c0db0795ee7d6e3946894b826b1f8101c56c823ba17168312e7f53fc7dbe52c3e11e69852c40485e2ef182477862ea6a34ec136e2dfeea6f4",
|
||||
"6cb8f9d52c56d82cac28f39ea1593e8bb2506293ac0d68376a1709b62a46df14a4ae64b2d8fab76733a1ced2d548e3f3c6fcb49d40c3d5808e449cd83d1c2aa2",
|
||||
"683fa2b2369a10162c1c1c7b24bc970ee67da220564f32203f625696c0352a0b9ad96624362d952d84463c1106a2dba7a092599884b35a0b89c8f1b6a9b5a61e",
|
||||
"aad9ad44610118b77d508aeb1bbcd1c1b7d0171397fb510a401bbc0ec34623670d86a2dc3c8f3ab5a2044df730256727545f0860ce21a1eac717dfc48f5d228e",
|
||||
"c42578de23b4c987d5e1ac4d689ed5de4b0417f9704bc6bce969fa13471585d62c2cb1212a944f397fc9ca2c3747c3beb694ec4c5be68828dda53ef43faec6c0",
|
||||
"470f00841ee8244e63ed2c7ea30e2e419897c197462ecccecf713b42a5065fff5914bc9b79affe8f6b657875e789ae213bd914cd35bd174d46e9d18bd843773d",
|
||||
"34fc4213730f47a5e9a3580f643e12945cfcb31bf206f6ad450ce528da3fa432e005d6b0ecce10dca7c5995f6aacc5150e1b009e19751e8309f8859531844374",
|
||||
"fb3c1f0f56a56f8e316fdf5d853c8c872c39635d083634c3904fc3ac07d1b578e85ff0e480e92d44ade33b62e893ee32343e79ddf6ef292e89b582d312502314",
|
||||
"c7c97fc65dd2b9e3d3d607d31598d3f84261e9919251e9c8e57bb5f829377d5f73eabbed55c6c381180f29ad02e5be797ffec7e57bdecbc50ad3d062f0993ab0",
|
||||
"a57a49cdbe67ae7d9f797bb5cc7efc2df07f4e1b15955f85dae74b76e2ecb85afb6cd9eeed8888d5ca3ec5ab65d27a7b19e578475760a045ac3c92e13a938e77",
|
||||
"c7143fce9614a17fd653aeb140726dc9c3dbb1de6cc581b2726897ec24b7a50359ad492243be66d9edd8c933b5b80e0b91bb61ea98056006516976fae8d99a35",
|
||||
"65bb58d07f937e2d3c7e65385f9c54730b704105ccdb691f6e146d4ee8f6c086f49511035110a9ad6031fdceb943e0f9613bcb276dd40f0624ef0f924f809783",
|
||||
"e540277f683b1186dd3b5b3f61433396581a35feb12002be8c6a6231fc40ffa70f08081bc58b2d94f7649543614a435faa2d62110e13dabc7b86629b63af9c24",
|
||||
"418500878c5fbcb584c432f4285e05e49f2e3e075399a0dbfcf874ebf8c03d02bf16bc6989d161c77ca0786b05053c6c709433712319192128835cf0b660595b",
|
||||
"889090dbb1944bdc9433ee5ef1010c7a4a24a8e71ecea8e12a31318ce49dcab0aca5c3802334aab2cc84b14c6b9321fe586bf3f876f19cd406eb1127fb944801",
|
||||
"53b6a28910aa92e27e536fb549cf9b9918791060898e0b9fe183577ff43b5e9c7689c745b32e412269837c31b89e6cc12bf76e13cad366b74ece48bb85fd09e9",
|
||||
"7c092080c6a80d672409d081d3d177106bcd63567785140719490950ae07ae8fcaabbaaab330cfbcf7374482c220af2eadeeb73dcbb35ed823344e144e7d4899",
|
||||
"9ccde566d2400509181111f32dde4cd63209fe59a30c114546ad2776d889a41bad8fa1bb468cb2f9d42ca9928a7770fef8e8ba4d0c812d9a1e75c3d8d2ccd75a",
|
||||
"6e293bf5d03fe43977cfe3f57ccdb3ae282a85455dca33f37f4b74f8398cc612433d755cbec412f8f82a3bd3bc4a278f7ecd0dfa9bbdc40be7a787c8f159b2df",
|
||||
"c56546fb2178456f336164c18b90deffc83ae2b5a3aca77b6884d36d2c1db39501b3e65e36c758c66e3188451fdb3515ee162c001f06c3e8cb573adf30f7a101",
|
||||
"6f82f89f299ebca2fe014b59bffe1aa84e88b1915fe256afb646fd8448af2b8891a7fab37a4ea6f9a50e6c317039d8cf878f4c8e1a0dd464f0b4d6ff1c7ea853",
|
||||
"2b8599ff9c3d6198637ad51e57d1998b0d75313fe2dd61a533c964a6dd9607c6f723e9452ce46e014b1c1d6de77ba5b88c914d1c597bf1eae13474b4290e89b2",
|
||||
"08bf346d38e1df06c8260edb1da75579275948d5c0a0aa9ed2886f8856de5417a156998758f5b17e52f101ca957a71137473dfd18d7d209c4c10d9233c93691d",
|
||||
"6df2156d773114d310b63db9ee5350d77e6bcf25b05fcd910f9b31bc42bb13fe8225ebcb2a23a62280777b6bf74e2cd0917c7640b43defe468cd1e18c943c66a",
|
||||
"7c7038bc13a91151828a5ba82b4a96040f258a4dfb1b1373f0d359168afb0517a20b28a12d3644046be66b8d08d8ae7f6a923ea1c00187c6d11dc502bac71305",
|
||||
"bcd1b30d808fb739b987cbf154bea00da9d40380b861d4c1d6377122dadd61c0e59018b71941cfb62e00dcd70aeb9abf0473e80f0a7eca6b6dea246ab229dd2b",
|
||||
"7ed4468d968530fe7ab2c33540b26d8c3bd3ed44b34fbe8c2a9d7f805b5ada0ea252eeade4fce97f89728ad85bc8bb2430b1bef2cddd32c8446e59b8e8ba3c67",
|
||||
"6d30b7c6ce8a3236c0ca2f8d728b1088ca06983a8043e621d5dcf0c537d13b08791edeb01a3cf0943ec1c890ab6e29b146a236cd46bcb9d93bf516fb67c63fe5",
|
||||
"97fe03cef31438508911bded975980a66029305dc5e3fa8ad1b4fb22fcdf5a19a733320327d8f71ccf496cb3a44a77af56e3dde73d3a5f176896cc57c9a5ad99",
|
||||
"785a9d0fbd21136dbce8fa7eafd63c9dad220052978416b31d9753eaa149097847ed9b30a65c70507eff01879149ed5cf0471d37798edc05abd56ad4a2cccb1d",
|
||||
"ad408d2abddfd37b3bf34794c1a3371d928ed7fc8d966225333584c5665817832a37c07f0dc7cb5aa874cd7d20fe8fab8eabcb9b33d2e0841f6e200960899d95",
|
||||
"97668f745b6032fc815d9579322769dccd9501a5080029b8ae826befb6742331bd9f76efeb3e2b8e81a9786b282f5068a3a2424697a77c41876b7e753f4c7767",
|
||||
"26bb985f47e7fee0cfd252d4ef96bed42b9c370c1c6a3e8c9eb04ef7f7818b833a0d1f043ebafb911dc779e02740a02a44d3a1ea45ed4ad55e686c927cafe97e",
|
||||
"5bfe2b1dcf7fe9b95088acedb575c19016c743b2e763bf5851ac407c9eda43715edfa48b4825492c5179593fff21351b76e8b7e034e4c53c79f61f29c479bd08",
|
||||
"c76509ef72f4a6f9c9c40618ed52b2084f83502232e0ac8bdaf3264368e4d0180f6854c4abf4f6509c79caafc44cf3194afc57bd077bd7b3c9bda3d4b8775816",
|
||||
"d66f2beab990e354ccb910e4e9c7ac618c7b63ef292a96b552341de78dc46d3ec8cfabc699b50af41fda39cf1b0173660923510ad67faedef5207cffe8641d20",
|
||||
"7d8f0672992b79be3a364d8e5904f4ab713bbc8ab01b4f309ad8ccf223ce1034a860dcb0b00550612cc2fa17f2969e18f22e1427d254b4a82b3a03a3eb394adf",
|
||||
"a56d6725bfb3de47c1414adf25fc8f0fc9846f6987722bc06366d5ca4e89722925ebbc881418844075397a0ca89842c7b9e9e07e1d9d183ebeb39e120b483bf7",
|
||||
"af5e03d7fe60c67e10313344434e79485a03a758d6dce985574745763c1c5c77d4fb3e6fb12230368370993bf90feed0c5d1607524562d7c09c0c210ed393d7c",
|
||||
"7a20540cc07bf72b582421fc342e82f52134b69841ec28ed189e2ea6a29dd2f82a640352d222b52f2911dc72a7dab31caadd80c6118f13c56b2a1e4373be0ea3",
|
||||
"486f02c63e5467ea1fdde7e82bfacc2c1ba5d636d9f3d08b210da3f372f706ec218cc17ff60aef703bbe0c15c38ae55d286a684f864c78211ccab4178c92adba",
|
||||
"1c7a5c1dedcd04a921788f7eb23361ca1953b04b9c7aec35d65ea3e4996db26f281278ea4ae666ad81027d98af57262cdbfa4c085f4210568c7e15eec7805114",
|
||||
"9ce3fa9a860bdbd5378fd6d7b8b671c6cb7692910ce8f9b6cb4122cbcbe6ac06ca0422cef1225935053b7d193a81b9e972eb85a1d3074f14cbb5ec9f0573892d",
|
||||
"a91187be5c371c4265c174fd4653b8ab708551f83d1fee1cc1479581bc006d6fb78fcc9a5dee1db3666f508f9780a37593ebcccf5fbed39667dc6361e921f779",
|
||||
"4625767d7b1d3d3ed2fbc674af14e0244152f2a4021fcf3311505d89bd81e2f9f9a500c3b199914db49500b3c98d03ea93286751a686a3b875daab0ccd63b44f",
|
||||
"43dfdfe1b014fed3a2acabb7f3e9a182f2aa18019d27e3e6cdcf31a15b428e91e7b08cf5e5c376fce2d8a28ff85ab0a0a1656edb4a0a91532620096d9a5a652d",
|
||||
"279e3202be3989ba3112772585177487e4fe3ee3eab49c2f7fa7fe87cfe7b80d3e0355edff6d031e6c96c795db1c6f041880ec3824defacf9263820a8e7327de",
|
||||
"ea2d066ac229d4d4b616a8bedec734325224e4b4e58f1ae6dad7e40c2da29196c3b1ea9571dacc81e87328caa0211e09027b0524aa3f4a849917b3586747ebbb",
|
||||
"49f014f5c61822c899ab5cae51be4044a4495e777deb7da9b6d8490efbb87530adf293daf079f94c33b7044ef62e2e5bb3eb11e17304f8453ee6ce24f033ddb0",
|
||||
"9233490344e5b0dc5912671b7ae54cee7730dbe1f4c7d92a4d3e3aab50571708db51dcf9c2944591db651db32d22935b86944969be77d5b5feae6c3840a8db26",
|
||||
"b6e75e6f4c7f453b7465d25b5ac8c7196902eaa953875228c8634e16e2ae1f38bc3275304335f5989eccc1e34167d4e68d7719968fba8e2fe67947c35c48e806",
|
||||
"cc14ca665af1483efbc3af80080e650d5046a3932f4f51f3fe90a0705ec25104adf07839265dc51d43401411246e474f0d5e5637af94767283d53e0617e981f4",
|
||||
"230a1c857cb2e7852e41b647e90e4585d2d881e1734dc38955356e8dd7bff39053092c6b38e236e1899525647073dddf6895d64206325e7647f275567b255909",
|
||||
"cbb65321ac436e2ffdab2936359ce49023f7dee7614ef28d173c3d27c5d1bffa51553d433f8ee3c9e49c05a2b883cce954c9a8093b80612a0cdd4732e041f995",
|
||||
"3e7e570074337275efb51315588034c3cf0dddca20b4612e0bd5b881e7e5476d319ce4fe9f19186e4c0826f44f131eb048e65be242b1172c63badb123ab0cbe8",
|
||||
"d32e9ec02d38d4e1b8249df8dcb00c5b9c68eb8922672e3505393b6a210ba56f9496e5ee0490ef387c3cdec061f06bc0382d9304cafbb8e0cd33d57029e62df2",
|
||||
"8c1512466089f05b3775c262b62d22b83854a83218130b4ec91b3ccbd293d2a54302cecaab9b100c68d1e6ddc8f07cddbdfe6fdaaaf099cc09d6b725879c6369",
|
||||
"91a7f61c97c2911e4c812ef71d780ad8fa788794561d08303fd1c1cb608a46a12563086ec5b39d471aed94fb0f6c678a43b8792932f9028d772a22768ea23a9b",
|
||||
"4f6bb222a395e8b18f6ba155477aed3f0729ac9e83e16d31a2a8bc655422b837c891c6199e6f0d75799e3b691525c581953517f252c4b9e3a27a28fbaf49644c",
|
||||
"5d06c07e7a646c413a501c3f4bb2fc38127de7509b7077c4d9b5613201c1aa02fd5f79d2745915dd57fbcb4ce08695f6efc0cb3d2d330e19b4b0e6004ea6471e",
|
||||
"b96756e57909968f14b796a5d30f4c9d671472cf82c8cfb2caca7ac7a44ca0a14c9842d00c82e337502c94d5960aca4c492ea7b0df919ddf1aada2a275bb10d4",
|
||||
"ff0a015e98db9c99f03977710aac3e658c0d896f6d71d618ba79dc6cf72ac75b7c038eb6862dede4543e145413a6368d69f5722c827ba3ef25b6ae6440d39276",
|
||||
"5b21c5fd8868367612474fa2e70e9cfa2201ffeee8fafab5797ad58fefa17c9b5b107da4a3db6320baaf2c8617d5a51df914ae88da3867c2d41f0cc14fa67928",
|
||||
}
|
||||
|
||||
var goldenKeyed = []string{
|
||||
"10ebb67700b1868efb4417987acf4690ae9d972fb7a590c2f02871799aaa4786b5e996e8f0f4eb981fc214b005f42d2ff4233499391653df7aefcbc13fc51568",
|
||||
"961f6dd1e4dd30f63901690c512e78e4b45e4742ed197c3c5e45c549fd25f2e4187b0bc9fe30492b16b0d0bc4ef9b0f34c7003fac09a5ef1532e69430234cebd",
|
||||
"da2cfbe2d8409a0f38026113884f84b50156371ae304c4430173d08a99d9fb1b983164a3770706d537f49e0c916d9f32b95cc37a95b99d857436f0232c88a965",
|
||||
"33d0825dddf7ada99b0e7e307104ad07ca9cfd9692214f1561356315e784f3e5a17e364ae9dbb14cb2036df932b77f4b292761365fb328de7afdc6d8998f5fc1",
|
||||
"beaa5a3d08f3807143cf621d95cd690514d0b49efff9c91d24b59241ec0eefa5f60196d407048bba8d2146828ebcb0488d8842fd56bb4f6df8e19c4b4daab8ac",
|
||||
"098084b51fd13deae5f4320de94a688ee07baea2800486689a8636117b46c1f4c1f6af7f74ae7c857600456a58a3af251dc4723a64cc7c0a5ab6d9cac91c20bb",
|
||||
"6044540d560853eb1c57df0077dd381094781cdb9073e5b1b3d3f6c7829e12066bbaca96d989a690de72ca3133a83652ba284a6d62942b271ffa2620c9e75b1f",
|
||||
"7a8cfe9b90f75f7ecb3acc053aaed6193112b6f6a4aeeb3f65d3de541942deb9e2228152a3c4bbbe72fc3b12629528cfbb09fe630f0474339f54abf453e2ed52",
|
||||
"380beaf6ea7cc9365e270ef0e6f3a64fb902acae51dd5512f84259ad2c91f4bc4108db73192a5bbfb0cbcf71e46c3e21aee1c5e860dc96e8eb0b7b8426e6abe9",
|
||||
"60fe3c4535e1b59d9a61ea8500bfac41a69dffb1ceadd9aca323e9a625b64da5763bad7226da02b9c8c4f1a5de140ac5a6c1124e4f718ce0b28ea47393aa6637",
|
||||
"4fe181f54ad63a2983feaaf77d1e7235c2beb17fa328b6d9505bda327df19fc37f02c4b6f0368ce23147313a8e5738b5fa2a95b29de1c7f8264eb77b69f585cd",
|
||||
"f228773ce3f3a42b5f144d63237a72d99693adb8837d0e112a8a0f8ffff2c362857ac49c11ec740d1500749dac9b1f4548108bf3155794dcc9e4082849e2b85b",
|
||||
"962452a8455cc56c8511317e3b1f3b2c37df75f588e94325fdd77070359cf63a9ae6e930936fdf8e1e08ffca440cfb72c28f06d89a2151d1c46cd5b268ef8563",
|
||||
"43d44bfa18768c59896bf7ed1765cb2d14af8c260266039099b25a603e4ddc5039d6ef3a91847d1088d401c0c7e847781a8a590d33a3c6cb4df0fab1c2f22355",
|
||||
"dcffa9d58c2a4ca2cdbb0c7aa4c4c1d45165190089f4e983bb1c2cab4aaeff1fa2b5ee516fecd780540240bf37e56c8bcca7fab980e1e61c9400d8a9a5b14ac6",
|
||||
"6fbf31b45ab0c0b8dad1c0f5f4061379912dde5aa922099a030b725c73346c524291adef89d2f6fd8dfcda6d07dad811a9314536c2915ed45da34947e83de34e",
|
||||
"a0c65bddde8adef57282b04b11e7bc8aab105b99231b750c021f4a735cb1bcfab87553bba3abb0c3e64a0b6955285185a0bd35fb8cfde557329bebb1f629ee93",
|
||||
"f99d815550558e81eca2f96718aed10d86f3f1cfb675cce06b0eff02f617c5a42c5aa760270f2679da2677c5aeb94f1142277f21c7f79f3c4f0cce4ed8ee62b1",
|
||||
"95391da8fc7b917a2044b3d6f5374e1ca072b41454d572c7356c05fd4bc1e0f40b8bb8b4a9f6bce9be2c4623c399b0dca0dab05cb7281b71a21b0ebcd9e55670",
|
||||
"04b9cd3d20d221c09ac86913d3dc63041989a9a1e694f1e639a3ba7e451840f750c2fc191d56ad61f2e7936bc0ac8e094b60caeed878c18799045402d61ceaf9",
|
||||
"ec0e0ef707e4ed6c0c66f9e089e4954b058030d2dd86398fe84059631f9ee591d9d77375355149178c0cf8f8e7c49ed2a5e4f95488a2247067c208510fadc44c",
|
||||
"9a37cce273b79c09913677510eaf7688e89b3314d3532fd2764c39de022a2945b5710d13517af8ddc0316624e73bec1ce67df15228302036f330ab0cb4d218dd",
|
||||
"4cf9bb8fb3d4de8b38b2f262d3c40f46dfe747e8fc0a414c193d9fcf753106ce47a18f172f12e8a2f1c26726545358e5ee28c9e2213a8787aafbc516d2343152",
|
||||
"64e0c63af9c808fd893137129867fd91939d53f2af04be4fa268006100069b2d69daa5c5d8ed7fddcb2a70eeecdf2b105dd46a1e3b7311728f639ab489326bc9",
|
||||
"5e9c93158d659b2def06b0c3c7565045542662d6eee8a96a89b78ade09fe8b3dcc096d4fe48815d88d8f82620156602af541955e1f6ca30dce14e254c326b88f",
|
||||
"7775dff889458dd11aef417276853e21335eb88e4dec9cfb4e9edb49820088551a2ca60339f12066101169f0dfe84b098fddb148d9da6b3d613df263889ad64b",
|
||||
"f0d2805afbb91f743951351a6d024f9353a23c7ce1fc2b051b3a8b968c233f46f50f806ecb1568ffaa0b60661e334b21dde04f8fa155ac740eeb42e20b60d764",
|
||||
"86a2af316e7d7754201b942e275364ac12ea8962ab5bd8d7fb276dc5fbffc8f9a28cae4e4867df6780d9b72524160927c855da5b6078e0b554aa91e31cb9ca1d",
|
||||
"10bdf0caa0802705e706369baf8a3f79d72c0a03a80675a7bbb00be3a45e516424d1ee88efb56f6d5777545ae6e27765c3a8f5e493fc308915638933a1dfee55",
|
||||
"b01781092b1748459e2e4ec178696627bf4ebafebba774ecf018b79a68aeb84917bf0b84bb79d17b743151144cd66b7b33a4b9e52c76c4e112050ff5385b7f0b",
|
||||
"c6dbc61dec6eaeac81e3d5f755203c8e220551534a0b2fd105a91889945a638550204f44093dd998c076205dffad703a0e5cd3c7f438a7e634cd59fededb539e",
|
||||
"eba51acffb4cea31db4b8d87e9bf7dd48fe97b0253ae67aa580f9ac4a9d941f2bea518ee286818cc9f633f2a3b9fb68e594b48cdd6d515bf1d52ba6c85a203a7",
|
||||
"86221f3ada52037b72224f105d7999231c5e5534d03da9d9c0a12acb68460cd375daf8e24386286f9668f72326dbf99ba094392437d398e95bb8161d717f8991",
|
||||
"5595e05c13a7ec4dc8f41fb70cb50a71bce17c024ff6de7af618d0cc4e9c32d9570d6d3ea45b86525491030c0d8f2b1836d5778c1ce735c17707df364d054347",
|
||||
"ce0f4f6aca89590a37fe034dd74dd5fa65eb1cbd0a41508aaddc09351a3cea6d18cb2189c54b700c009f4cbf0521c7ea01be61c5ae09cb54f27bc1b44d658c82",
|
||||
"7ee80b06a215a3bca970c77cda8761822bc103d44fa4b33f4d07dcb997e36d55298bceae12241b3fa07fa63be5576068da387b8d5859aeab701369848b176d42",
|
||||
"940a84b6a84d109aab208c024c6ce9647676ba0aaa11f86dbb7018f9fd2220a6d901a9027f9abcf935372727cbf09ebd61a2a2eeb87653e8ecad1bab85dc8327",
|
||||
"2020b78264a82d9f4151141adba8d44bf20c5ec062eee9b595a11f9e84901bf148f298e0c9f8777dcdbc7cc4670aac356cc2ad8ccb1629f16f6a76bcefbee760",
|
||||
"d1b897b0e075ba68ab572adf9d9c436663e43eb3d8e62d92fc49c9be214e6f27873fe215a65170e6bea902408a25b49506f47babd07cecf7113ec10c5dd31252",
|
||||
"b14d0c62abfa469a357177e594c10c194243ed2025ab8aa5ad2fa41ad318e0ff48cd5e60bec07b13634a711d2326e488a985f31e31153399e73088efc86a5c55",
|
||||
"4169c5cc808d2697dc2a82430dc23e3cd356dc70a94566810502b8d655b39abf9e7f902fe717e0389219859e1945df1af6ada42e4ccda55a197b7100a30c30a1",
|
||||
"258a4edb113d66c839c8b1c91f15f35ade609f11cd7f8681a4045b9fef7b0b24c82cda06a5f2067b368825e3914e53d6948ede92efd6e8387fa2e537239b5bee",
|
||||
"79d2d8696d30f30fb34657761171a11e6c3f1e64cbe7bebee159cb95bfaf812b4f411e2f26d9c421dc2c284a3342d823ec293849e42d1e46b0a4ac1e3c86abaa",
|
||||
"8b9436010dc5dee992ae38aea97f2cd63b946d94fedd2ec9671dcde3bd4ce9564d555c66c15bb2b900df72edb6b891ebcadfeff63c9ea4036a998be7973981e7",
|
||||
"c8f68e696ed28242bf997f5b3b34959508e42d613810f1e2a435c96ed2ff560c7022f361a9234b9837feee90bf47922ee0fd5f8ddf823718d86d1e16c6090071",
|
||||
"b02d3eee4860d5868b2c39ce39bfe81011290564dd678c85e8783f29302dfc1399ba95b6b53cd9ebbf400cca1db0ab67e19a325f2d115812d25d00978ad1bca4",
|
||||
"7693ea73af3ac4dad21ca0d8da85b3118a7d1c6024cfaf557699868217bc0c2f44a199bc6c0edd519798ba05bd5b1b4484346a47c2cadf6bf30b785cc88b2baf",
|
||||
"a0e5c1c0031c02e48b7f09a5e896ee9aef2f17fc9e18e997d7f6cac7ae316422c2b1e77984e5f3a73cb45deed5d3f84600105e6ee38f2d090c7d0442ea34c46d",
|
||||
"41daa6adcfdb69f1440c37b596440165c15ada596813e2e22f060fcd551f24dee8e04ba6890387886ceec4a7a0d7fc6b44506392ec3822c0d8c1acfc7d5aebe8",
|
||||
"14d4d40d5984d84c5cf7523b7798b254e275a3a8cc0a1bd06ebc0bee726856acc3cbf516ff667cda2058ad5c3412254460a82c92187041363cc77a4dc215e487",
|
||||
"d0e7a1e2b9a447fee83e2277e9ff8010c2f375ae12fa7aaa8ca5a6317868a26a367a0b69fbc1cf32a55d34eb370663016f3d2110230eba754028a56f54acf57c",
|
||||
"e771aa8db5a3e043e8178f39a0857ba04a3f18e4aa05743cf8d222b0b095825350ba422f63382a23d92e4149074e816a36c1cd28284d146267940b31f8818ea2",
|
||||
"feb4fd6f9e87a56bef398b3284d2bda5b5b0e166583a66b61e538457ff0584872c21a32962b9928ffab58de4af2edd4e15d8b35570523207ff4e2a5aa7754caa",
|
||||
"462f17bf005fb1c1b9e671779f665209ec2873e3e411f98dabf240a1d5ec3f95ce6796b6fc23fe171903b502023467dec7273ff74879b92967a2a43a5a183d33",
|
||||
"d3338193b64553dbd38d144bea71c5915bb110e2d88180dbc5db364fd6171df317fc7268831b5aef75e4342b2fad8797ba39eddcef80e6ec08159350b1ad696d",
|
||||
"e1590d585a3d39f7cb599abd479070966409a6846d4377acf4471d065d5db94129cc9be92573b05ed226be1e9b7cb0cabe87918589f80dadd4ef5ef25a93d28e",
|
||||
"f8f3726ac5a26cc80132493a6fedcb0e60760c09cfc84cad178175986819665e76842d7b9fedf76dddebf5d3f56faaad4477587af21606d396ae570d8e719af2",
|
||||
"30186055c07949948183c850e9a756cc09937e247d9d928e869e20bafc3cd9721719d34e04a0899b92c736084550186886efba2e790d8be6ebf040b209c439a4",
|
||||
"f3c4276cb863637712c241c444c5cc1e3554e0fddb174d035819dd83eb700b4ce88df3ab3841ba02085e1a99b4e17310c5341075c0458ba376c95a6818fbb3e2",
|
||||
"0aa007c4dd9d5832393040a1583c930bca7dc5e77ea53add7e2b3f7c8e231368043520d4a3ef53c969b6bbfd025946f632bd7f765d53c21003b8f983f75e2a6a",
|
||||
"08e9464720533b23a04ec24f7ae8c103145f765387d738777d3d343477fd1c58db052142cab754ea674378e18766c53542f71970171cc4f81694246b717d7564",
|
||||
"d37ff7ad297993e7ec21e0f1b4b5ae719cdc83c5db687527f27516cbffa822888a6810ee5c1ca7bfe3321119be1ab7bfa0a502671c8329494df7ad6f522d440f",
|
||||
"dd9042f6e464dcf86b1262f6accfafbd8cfd902ed3ed89abf78ffa482dbdeeb6969842394c9a1168ae3d481a017842f660002d42447c6b22f7b72f21aae021c9",
|
||||
"bd965bf31e87d70327536f2a341cebc4768eca275fa05ef98f7f1b71a0351298de006fba73fe6733ed01d75801b4a928e54231b38e38c562b2e33ea1284992fa",
|
||||
"65676d800617972fbd87e4b9514e1c67402b7a331096d3bfac22f1abb95374abc942f16e9ab0ead33b87c91968a6e509e119ff07787b3ef483e1dcdccf6e3022",
|
||||
"939fa189699c5d2c81ddd1ffc1fa207c970b6a3685bb29ce1d3e99d42f2f7442da53e95a72907314f4588399a3ff5b0a92beb3f6be2694f9f86ecf2952d5b41c",
|
||||
"c516541701863f91005f314108ceece3c643e04fc8c42fd2ff556220e616aaa6a48aeb97a84bad74782e8dff96a1a2fa949339d722edcaa32b57067041df88cc",
|
||||
"987fd6e0d6857c553eaebb3d34970a2c2f6e89a3548f492521722b80a1c21a153892346d2cba6444212d56da9a26e324dccbc0dcde85d4d2ee4399eec5a64e8f",
|
||||
"ae56deb1c2328d9c4017706bce6e99d41349053ba9d336d677c4c27d9fd50ae6aee17e853154e1f4fe7672346da2eaa31eea53fcf24a22804f11d03da6abfc2b",
|
||||
"49d6a608c9bde4491870498572ac31aac3fa40938b38a7818f72383eb040ad39532bc06571e13d767e6945ab77c0bdc3b0284253343f9f6c1244ebf2ff0df866",
|
||||
"da582ad8c5370b4469af862aa6467a2293b2b28bd80ae0e91f425ad3d47249fdf98825cc86f14028c3308c9804c78bfeeeee461444ce243687e1a50522456a1d",
|
||||
"d5266aa3331194aef852eed86d7b5b2633a0af1c735906f2e13279f14931a9fc3b0eac5ce9245273bd1aa92905abe16278ef7efd47694789a7283b77da3c70f8",
|
||||
"2962734c28252186a9a1111c732ad4de4506d4b4480916303eb7991d659ccda07a9911914bc75c418ab7a4541757ad054796e26797feaf36e9f6ad43f14b35a4",
|
||||
"e8b79ec5d06e111bdfafd71e9f5760f00ac8ac5d8bf768f9ff6f08b8f026096b1cc3a4c973333019f1e3553e77da3f98cb9f542e0a90e5f8a940cc58e59844b3",
|
||||
"dfb320c44f9d41d1efdcc015f08dd5539e526e39c87d509ae6812a969e5431bf4fa7d91ffd03b981e0d544cf72d7b1c0374f8801482e6dea2ef903877eba675e",
|
||||
"d88675118fdb55a5fb365ac2af1d217bf526ce1ee9c94b2f0090b2c58a06ca58187d7fe57c7bed9d26fca067b4110eefcd9a0a345de872abe20de368001b0745",
|
||||
"b893f2fc41f7b0dd6e2f6aa2e0370c0cff7df09e3acfcc0e920b6e6fad0ef747c40668417d342b80d2351e8c175f20897a062e9765e6c67b539b6ba8b9170545",
|
||||
"6c67ec5697accd235c59b486d7b70baeedcbd4aa64ebd4eef3c7eac189561a726250aec4d48cadcafbbe2ce3c16ce2d691a8cce06e8879556d4483ed7165c063",
|
||||
"f1aa2b044f8f0c638a3f362e677b5d891d6fd2ab0765f6ee1e4987de057ead357883d9b405b9d609eea1b869d97fb16d9b51017c553f3b93c0a1e0f1296fedcd",
|
||||
"cbaa259572d4aebfc1917acddc582b9f8dfaa928a198ca7acd0f2aa76a134a90252e6298a65b08186a350d5b7626699f8cb721a3ea5921b753ae3a2dce24ba3a",
|
||||
"fa1549c9796cd4d303dcf452c1fbd5744fd9b9b47003d920b92de34839d07ef2a29ded68f6fc9e6c45e071a2e48bd50c5084e96b657dd0404045a1ddefe282ed",
|
||||
"5cf2ac897ab444dcb5c8d87c495dbdb34e1838b6b629427caa51702ad0f9688525f13bec503a3c3a2c80a65e0b5715e8afab00ffa56ec455a49a1ad30aa24fcd",
|
||||
"9aaf80207bace17bb7ab145757d5696bde32406ef22b44292ef65d4519c3bb2ad41a59b62cc3e94b6fa96d32a7faadae28af7d35097219aa3fd8cda31e40c275",
|
||||
"af88b163402c86745cb650c2988fb95211b94b03ef290eed9662034241fd51cf398f8073e369354c43eae1052f9b63b08191caa138aa54fea889cc7024236897",
|
||||
"48fa7d64e1ceee27b9864db5ada4b53d00c9bc7626555813d3cd6730ab3cc06ff342d727905e33171bde6e8476e77fb1720861e94b73a2c538d254746285f430",
|
||||
"0e6fd97a85e904f87bfe85bbeb34f69e1f18105cf4ed4f87aec36c6e8b5f68bd2a6f3dc8a9ecb2b61db4eedb6b2ea10bf9cb0251fb0f8b344abf7f366b6de5ab",
|
||||
"06622da5787176287fdc8fed440bad187d830099c94e6d04c8e9c954cda70c8bb9e1fc4a6d0baa831b9b78ef6648681a4867a11da93ee36e5e6a37d87fc63f6f",
|
||||
"1da6772b58fabf9c61f68d412c82f182c0236d7d575ef0b58dd22458d643cd1dfc93b03871c316d8430d312995d4197f0874c99172ba004a01ee295abac24e46",
|
||||
"3cd2d9320b7b1d5fb9aab951a76023fa667be14a9124e394513918a3f44096ae4904ba0ffc150b63bc7ab1eeb9a6e257e5c8f000a70394a5afd842715de15f29",
|
||||
"04cdc14f7434e0b4be70cb41db4c779a88eaef6accebcb41f2d42fffe7f32a8e281b5c103a27021d0d08362250753cdf70292195a53a48728ceb5844c2d98bab",
|
||||
"9071b7a8a075d0095b8fb3ae5113785735ab98e2b52faf91d5b89e44aac5b5d4ebbf91223b0ff4c71905da55342e64655d6ef8c89a4768c3f93a6dc0366b5bc8",
|
||||
"ebb30240dd96c7bc8d0abe49aa4edcbb4afdc51ff9aaf720d3f9e7fbb0f9c6d6571350501769fc4ebd0b2141247ff400d4fd4be414edf37757bb90a32ac5c65a",
|
||||
"8532c58bf3c8015d9d1cbe00eef1f5082f8f3632fbe9f1ed4f9dfb1fa79e8283066d77c44c4af943d76b300364aecbd0648c8a8939bd204123f4b56260422dec",
|
||||
"fe9846d64f7c7708696f840e2d76cb4408b6595c2f81ec6a28a7f2f20cb88cfe6ac0b9e9b8244f08bd7095c350c1d0842f64fb01bb7f532dfcd47371b0aeeb79",
|
||||
"28f17ea6fb6c42092dc264257e29746321fb5bdaea9873c2a7fa9d8f53818e899e161bc77dfe8090afd82bf2266c5c1bc930a8d1547624439e662ef695f26f24",
|
||||
"ec6b7d7f030d4850acae3cb615c21dd25206d63e84d1db8d957370737ba0e98467ea0ce274c66199901eaec18a08525715f53bfdb0aacb613d342ebdceeddc3b",
|
||||
"b403d3691c03b0d3418df327d5860d34bbfcc4519bfbce36bf33b208385fadb9186bc78a76c489d89fd57e7dc75412d23bcd1dae8470ce9274754bb8585b13c5",
|
||||
"31fc79738b8772b3f55cd8178813b3b52d0db5a419d30ba9495c4b9da0219fac6df8e7c23a811551a62b827f256ecdb8124ac8a6792ccfecc3b3012722e94463",
|
||||
"bb2039ec287091bcc9642fc90049e73732e02e577e2862b32216ae9bedcd730c4c284ef3968c368b7d37584f97bd4b4dc6ef6127acfe2e6ae2509124e66c8af4",
|
||||
"f53d68d13f45edfcb9bd415e2831e938350d5380d3432278fc1c0c381fcb7c65c82dafe051d8c8b0d44e0974a0e59ec7bf7ed0459f86e96f329fc79752510fd3",
|
||||
"8d568c7984f0ecdf7640fbc483b5d8c9f86634f6f43291841b309a350ab9c1137d24066b09da9944bac54d5bb6580d836047aac74ab724b887ebf93d4b32eca9",
|
||||
"c0b65ce5a96ff774c456cac3b5f2c4cd359b4ff53ef93a3da0778be4900d1e8da1601e769e8f1b02d2a2f8c5b9fa10b44f1c186985468feeb008730283a6657d",
|
||||
"4900bba6f5fb103ece8ec96ada13a5c3c85488e05551da6b6b33d988e611ec0fe2e3c2aa48ea6ae8986a3a231b223c5d27cec2eadde91ce07981ee652862d1e4",
|
||||
"c7f5c37c7285f927f76443414d4357ff789647d7a005a5a787e03c346b57f49f21b64fa9cf4b7e45573e23049017567121a9c3d4b2b73ec5e9413577525db45a",
|
||||
"ec7096330736fdb2d64b5653e7475da746c23a4613a82687a28062d3236364284ac01720ffb406cfe265c0df626a188c9e5963ace5d3d5bb363e32c38c2190a6",
|
||||
"82e744c75f4649ec52b80771a77d475a3bc091989556960e276a5f9ead92a03f718742cdcfeaee5cb85c44af198adc43a4a428f5f0c2ddb0be36059f06d7df73",
|
||||
"2834b7a7170f1f5b68559ab78c1050ec21c919740b784a9072f6e5d69f828d70c919c5039fb148e39e2c8a52118378b064ca8d5001cd10a5478387b966715ed6",
|
||||
"16b4ada883f72f853bb7ef253efcab0c3e2161687ad61543a0d2824f91c1f81347d86be709b16996e17f2dd486927b0288ad38d13063c4a9672c39397d3789b6",
|
||||
"78d048f3a69d8b54ae0ed63a573ae350d89f7c6cf1f3688930de899afa037697629b314e5cd303aa62feea72a25bf42b304b6c6bcb27fae21c16d925e1fbdac3",
|
||||
"0f746a48749287ada77a82961f05a4da4abdb7d77b1220f836d09ec814359c0ec0239b8c7b9ff9e02f569d1b301ef67c4612d1de4f730f81c12c40cc063c5caa",
|
||||
"f0fc859d3bd195fbdc2d591e4cdac15179ec0f1dc821c11df1f0c1d26e6260aaa65b79fafacafd7d3ad61e600f250905f5878c87452897647a35b995bcadc3a3",
|
||||
"2620f687e8625f6a412460b42e2cef67634208ce10a0cbd4dff7044a41b7880077e9f8dc3b8d1216d3376a21e015b58fb279b521d83f9388c7382c8505590b9b",
|
||||
"227e3aed8d2cb10b918fcb04f9de3e6d0a57e08476d93759cd7b2ed54a1cbf0239c528fb04bbf288253e601d3bc38b21794afef90b17094a182cac557745e75f",
|
||||
"1a929901b09c25f27d6b35be7b2f1c4745131fdebca7f3e2451926720434e0db6e74fd693ad29b777dc3355c592a361c4873b01133a57c2e3b7075cbdb86f4fc",
|
||||
"5fd7968bc2fe34f220b5e3dc5af9571742d73b7d60819f2888b629072b96a9d8ab2d91b82d0a9aaba61bbd39958132fcc4257023d1eca591b3054e2dc81c8200",
|
||||
"dfcce8cf32870cc6a503eadafc87fd6f78918b9b4d0737db6810be996b5497e7e5cc80e312f61e71ff3e9624436073156403f735f56b0b01845c18f6caf772e6",
|
||||
"02f7ef3a9ce0fff960f67032b296efca3061f4934d690749f2d01c35c81c14f39a67fa350bc8a0359bf1724bffc3bca6d7c7bba4791fd522a3ad353c02ec5aa8",
|
||||
"64be5c6aba65d594844ae78bb022e5bebe127fd6b6ffa5a13703855ab63b624dcd1a363f99203f632ec386f3ea767fc992e8ed9686586aa27555a8599d5b808f",
|
||||
"f78585505c4eaa54a8b5be70a61e735e0ff97af944ddb3001e35d86c4e2199d976104b6ae31750a36a726ed285064f5981b503889fef822fcdc2898dddb7889a",
|
||||
"e4b5566033869572edfd87479a5bb73c80e8759b91232879d96b1dda36c012076ee5a2ed7ae2de63ef8406a06aea82c188031b560beafb583fb3de9e57952a7e",
|
||||
"e1b3e7ed867f6c9484a2a97f7715f25e25294e992e41f6a7c161ffc2adc6daaeb7113102d5e6090287fe6ad94ce5d6b739c6ca240b05c76fb73f25dd024bf935",
|
||||
"85fd085fdc12a080983df07bd7012b0d402a0f4043fcb2775adf0bad174f9b08d1676e476985785c0a5dcc41dbff6d95ef4d66a3fbdc4a74b82ba52da0512b74",
|
||||
"aed8fa764b0fbff821e05233d2f7b0900ec44d826f95e93c343c1bc3ba5a24374b1d616e7e7aba453a0ada5e4fab5382409e0d42ce9c2bc7fb39a99c340c20f0",
|
||||
"7ba3b2e297233522eeb343bd3ebcfd835a04007735e87f0ca300cbee6d416565162171581e4020ff4cf176450f1291ea2285cb9ebffe4c56660627685145051c",
|
||||
"de748bcf89ec88084721e16b85f30adb1a6134d664b5843569babc5bbd1a15ca9b61803c901a4fef32965a1749c9f3a4e243e173939dc5a8dc495c671ab52145",
|
||||
"aaf4d2bdf200a919706d9842dce16c98140d34bc433df320aba9bd429e549aa7a3397652a4d768277786cf993cde2338673ed2e6b66c961fefb82cd20c93338f",
|
||||
"c408218968b788bf864f0997e6bc4c3dba68b276e2125a4843296052ff93bf5767b8cdce7131f0876430c1165fec6c4f47adaa4fd8bcfacef463b5d3d0fa61a0",
|
||||
"76d2d819c92bce55fa8e092ab1bf9b9eab237a25267986cacf2b8ee14d214d730dc9a5aa2d7b596e86a1fd8fa0804c77402d2fcd45083688b218b1cdfa0dcbcb",
|
||||
"72065ee4dd91c2d8509fa1fc28a37c7fc9fa7d5b3f8ad3d0d7a25626b57b1b44788d4caf806290425f9890a3a2a35a905ab4b37acfd0da6e4517b2525c9651e4",
|
||||
"64475dfe7600d7171bea0b394e27c9b00d8e74dd1e416a79473682ad3dfdbb706631558055cfc8a40e07bd015a4540dcdea15883cbbf31412df1de1cd4152b91",
|
||||
"12cd1674a4488a5d7c2b3160d2e2c4b58371bedad793418d6f19c6ee385d70b3e06739369d4df910edb0b0a54cbff43d54544cd37ab3a06cfa0a3ddac8b66c89",
|
||||
"60756966479dedc6dd4bcff8ea7d1d4ce4d4af2e7b097e32e3763518441147cc12b3c0ee6d2ecabf1198cec92e86a3616fba4f4e872f5825330adbb4c1dee444",
|
||||
"a7803bcb71bc1d0f4383dde1e0612e04f872b715ad30815c2249cf34abb8b024915cb2fc9f4e7cc4c8cfd45be2d5a91eab0941c7d270e2da4ca4a9f7ac68663a",
|
||||
"b84ef6a7229a34a750d9a98ee2529871816b87fbe3bc45b45fa5ae82d5141540211165c3c5d7a7476ba5a4aa06d66476f0d9dc49a3f1ee72c3acabd498967414",
|
||||
"fae4b6d8efc3f8c8e64d001dabec3a21f544e82714745251b2b4b393f2f43e0da3d403c64db95a2cb6e23ebb7b9e94cdd5ddac54f07c4a61bd3cb10aa6f93b49",
|
||||
"34f7286605a122369540141ded79b8957255da2d4155abbf5a8dbb89c8eb7ede8eeef1daa46dc29d751d045dc3b1d658bb64b80ff8589eddb3824b13da235a6b",
|
||||
"3b3b48434be27b9eababba43bf6b35f14b30f6a88dc2e750c358470d6b3aa3c18e47db4017fa55106d8252f016371a00f5f8b070b74ba5f23cffc5511c9f09f0",
|
||||
"ba289ebd6562c48c3e10a8ad6ce02e73433d1e93d7c9279d4d60a7e879ee11f441a000f48ed9f7c4ed87a45136d7dccdca482109c78a51062b3ba4044ada2469",
|
||||
"022939e2386c5a37049856c850a2bb10a13dfea4212b4c732a8840a9ffa5faf54875c5448816b2785a007da8a8d2bc7d71a54e4e6571f10b600cbdb25d13ede3",
|
||||
"e6fec19d89ce8717b1a087024670fe026f6c7cbda11caef959bb2d351bf856f8055d1c0ebdaaa9d1b17886fc2c562b5e99642fc064710c0d3488a02b5ed7f6fd",
|
||||
"94c96f02a8f576aca32ba61c2b206f907285d9299b83ac175c209a8d43d53bfe683dd1d83e7549cb906c28f59ab7c46f8751366a28c39dd5fe2693c9019666c8",
|
||||
"31a0cd215ebd2cb61de5b9edc91e6195e31c59a5648d5c9f737e125b2605708f2e325ab3381c8dce1a3e958886f1ecdc60318f882cfe20a24191352e617b0f21",
|
||||
"91ab504a522dce78779f4c6c6ba2e6b6db5565c76d3e7e7c920caf7f757ef9db7c8fcf10e57f03379ea9bf75eb59895d96e149800b6aae01db778bb90afbc989",
|
||||
"d85cabc6bd5b1a01a5afd8c6734740da9fd1c1acc6db29bfc8a2e5b668b028b6b3154bfb8703fa3180251d589ad38040ceb707c4bad1b5343cb426b61eaa49c1",
|
||||
"d62efbec2ca9c1f8bd66ce8b3f6a898cb3f7566ba6568c618ad1feb2b65b76c3ce1dd20f7395372faf28427f61c9278049cf0140df434f5633048c86b81e0399",
|
||||
"7c8fdc6175439e2c3db15bafa7fb06143a6a23bc90f449e79deef73c3d492a671715c193b6fea9f036050b946069856b897e08c00768f5ee5ddcf70b7cd6d0e0",
|
||||
"58602ee7468e6bc9df21bd51b23c005f72d6cb013f0a1b48cbec5eca299299f97f09f54a9a01483eaeb315a6478bad37ba47ca1347c7c8fc9e6695592c91d723",
|
||||
"27f5b79ed256b050993d793496edf4807c1d85a7b0a67c9c4fa99860750b0ae66989670a8ffd7856d7ce411599e58c4d77b232a62bef64d15275be46a68235ff",
|
||||
"3957a976b9f1887bf004a8dca942c92d2b37ea52600f25e0c9bc5707d0279c00c6e85a839b0d2d8eb59c51d94788ebe62474a791cadf52cccf20f5070b6573fc",
|
||||
"eaa2376d55380bf772ecca9cb0aa4668c95c707162fa86d518c8ce0ca9bf7362b9f2a0adc3ff59922df921b94567e81e452f6c1a07fc817cebe99604b3505d38",
|
||||
"c1e2c78b6b2734e2480ec550434cb5d613111adcc21d475545c3b1b7e6ff12444476e5c055132e2229dc0f807044bb919b1a5662dd38a9ee65e243a3911aed1a",
|
||||
"8ab48713389dd0fcf9f965d3ce66b1e559a1f8c58741d67683cd971354f452e62d0207a65e436c5d5d8f8ee71c6abfe50e669004c302b31a7ea8311d4a916051",
|
||||
"24ce0addaa4c65038bd1b1c0f1452a0b128777aabc94a29df2fd6c7e2f85f8ab9ac7eff516b0e0a825c84a24cfe492eaad0a6308e46dd42fe8333ab971bb30ca",
|
||||
"5154f929ee03045b6b0c0004fa778edee1d139893267cc84825ad7b36c63de32798e4a166d24686561354f63b00709a1364b3c241de3febf0754045897467cd4",
|
||||
"e74e907920fd87bd5ad636dd11085e50ee70459c443e1ce5809af2bc2eba39f9e6d7128e0e3712c316da06f4705d78a4838e28121d4344a2c79c5e0db307a677",
|
||||
"bf91a22334bac20f3fd80663b3cd06c4e8802f30e6b59f90d3035cc9798a217ed5a31abbda7fa6842827bdf2a7a1c21f6fcfccbb54c6c52926f32da816269be1",
|
||||
"d9d5c74be5121b0bd742f26bffb8c89f89171f3f934913492b0903c271bbe2b3395ef259669bef43b57f7fcc3027db01823f6baee66e4f9fead4d6726c741fce",
|
||||
"50c8b8cf34cd879f80e2faab3230b0c0e1cc3e9dcadeb1b9d97ab923415dd9a1fe38addd5c11756c67990b256e95ad6d8f9fedce10bf1c90679cde0ecf1be347",
|
||||
"0a386e7cd5dd9b77a035e09fe6fee2c8ce61b5383c87ea43205059c5e4cd4f4408319bb0a82360f6a58e6c9ce3f487c446063bf813bc6ba535e17fc1826cfc91",
|
||||
"1f1459cb6b61cbac5f0efe8fc487538f42548987fcd56221cfa7beb22504769e792c45adfb1d6b3d60d7b749c8a75b0bdf14e8ea721b95dca538ca6e25711209",
|
||||
"e58b3836b7d8fedbb50ca5725c6571e74c0785e97821dab8b6298c10e4c079d4a6cdf22f0fedb55032925c16748115f01a105e77e00cee3d07924dc0d8f90659",
|
||||
"b929cc6505f020158672deda56d0db081a2ee34c00c1100029bdf8ea98034fa4bf3e8655ec697fe36f40553c5bb46801644a627d3342f4fc92b61f03290fb381",
|
||||
"72d353994b49d3e03153929a1e4d4f188ee58ab9e72ee8e512f29bc773913819ce057ddd7002c0433ee0a16114e3d156dd2c4a7e80ee53378b8670f23e33ef56",
|
||||
"c70ef9bfd775d408176737a0736d68517ce1aaad7e81a93c8c1ed967ea214f56c8a377b1763e676615b60f3988241eae6eab9685a5124929d28188f29eab06f7",
|
||||
"c230f0802679cb33822ef8b3b21bf7a9a28942092901d7dac3760300831026cf354c9232df3e084d9903130c601f63c1f4a4a4b8106e468cd443bbe5a734f45f",
|
||||
"6f43094cafb5ebf1f7a4937ec50f56a4c9da303cbb55ac1f27f1f1976cd96beda9464f0e7b9c54620b8a9fba983164b8be3578425a024f5fe199c36356b88972",
|
||||
"3745273f4c38225db2337381871a0c6aafd3af9b018c88aa02025850a5dc3a42a1a3e03e56cbf1b0876d63a441f1d2856a39b8801eb5af325201c415d65e97fe",
|
||||
"c50c44cca3ec3edaae779a7e179450ebdda2f97067c690aa6c5a4ac7c30139bb27c0df4db3220e63cb110d64f37ffe078db72653e2daacf93ae3f0a2d1a7eb2e",
|
||||
"8aef263e385cbc61e19b28914243262af5afe8726af3ce39a79c27028cf3ecd3f8d2dfd9cfc9ad91b58f6f20778fd5f02894a3d91c7d57d1e4b866a7f364b6be",
|
||||
"28696141de6e2d9bcb3235578a66166c1448d3e905a1b482d423be4bc5369bc8c74dae0acc9cc123e1d8ddce9f97917e8c019c552da32d39d2219b9abf0fa8c8",
|
||||
"2fb9eb2085830181903a9dafe3db428ee15be7662224efd643371fb25646aee716e531eca69b2bdc8233f1a8081fa43da1500302975a77f42fa592136710e9dc",
|
||||
"66f9a7143f7a3314a669bf2e24bbb35014261d639f495b6c9c1f104fe8e320aca60d4550d69d52edbd5a3cdeb4014ae65b1d87aa770b69ae5c15f4330b0b0ad8",
|
||||
"f4c4dd1d594c3565e3e25ca43dad82f62abea4835ed4cd811bcd975e46279828d44d4c62c3679f1b7f7b9dd4571d7b49557347b8c5460cbdc1bef690fb2a08c0",
|
||||
"8f1dc9649c3a84551f8f6e91cac68242a43b1f8f328ee92280257387fa7559aa6db12e4aeadc2d26099178749c6864b357f3f83b2fb3efa8d2a8db056bed6bcc",
|
||||
"3139c1a7f97afd1675d460ebbc07f2728aa150df849624511ee04b743ba0a833092f18c12dc91b4dd243f333402f59fe28abdbbbae301e7b659c7a26d5c0f979",
|
||||
"06f94a2996158a819fe34c40de3cf0379fd9fb85b3e363ba3926a0e7d960e3f4c2e0c70c7ce0ccb2a64fc29869f6e7ab12bd4d3f14fce943279027e785fb5c29",
|
||||
"c29c399ef3eee8961e87565c1ce263925fc3d0ce267d13e48dd9e732ee67b0f69fad56401b0f10fcaac119201046cca28c5b14abdea3212ae65562f7f138db3d",
|
||||
"4cec4c9df52eef05c3f6faaa9791bc7445937183224ecc37a1e58d0132d35617531d7e795f52af7b1eb9d147de1292d345fe341823f8e6bc1e5badca5c656108",
|
||||
"898bfbae93b3e18d00697eab7d9704fa36ec339d076131cefdf30edbe8d9cc81c3a80b129659b163a323bab9793d4feed92d54dae966c77529764a09be88db45",
|
||||
"ee9bd0469d3aaf4f14035be48a2c3b84d9b4b1fff1d945e1f1c1d38980a951be197b25fe22c731f20aeacc930ba9c4a1f4762227617ad350fdabb4e80273a0f4",
|
||||
"3d4d3113300581cd96acbf091c3d0f3c310138cd6979e6026cde623e2dd1b24d4a8638bed1073344783ad0649cc6305ccec04beb49f31c633088a99b65130267",
|
||||
"95c0591ad91f921ac7be6d9ce37e0663ed8011c1cfd6d0162a5572e94368bac02024485e6a39854aa46fe38e97d6c6b1947cd272d86b06bb5b2f78b9b68d559d",
|
||||
"227b79ded368153bf46c0a3ca978bfdbef31f3024a5665842468490b0ff748ae04e7832ed4c9f49de9b1706709d623e5c8c15e3caecae8d5e433430ff72f20eb",
|
||||
"5d34f3952f0105eef88ae8b64c6ce95ebfade0e02c69b08762a8712d2e4911ad3f941fc4034dc9b2e479fdbcd279b902faf5d838bb2e0c6495d372b5b7029813",
|
||||
"7f939bf8353abce49e77f14f3750af20b7b03902e1a1e7fb6aaf76d0259cd401a83190f15640e74f3e6c5a90e839c7821f6474757f75c7bf9002084ddc7a62dc",
|
||||
"062b61a2f9a33a71d7d0a06119644c70b0716a504de7e5e1be49bd7b86e7ed6817714f9f0fc313d06129597e9a2235ec8521de36f7290a90ccfc1ffa6d0aee29",
|
||||
"f29e01eeae64311eb7f1c6422f946bf7bea36379523e7b2bbaba7d1d34a22d5ea5f1c5a09d5ce1fe682cced9a4798d1a05b46cd72dff5c1b355440b2a2d476bc",
|
||||
"ec38cd3bbab3ef35d7cb6d5c914298351d8a9dc97fcee051a8a02f58e3ed6184d0b7810a5615411ab1b95209c3c810114fdeb22452084e77f3f847c6dbaafe16",
|
||||
"c2aef5e0ca43e82641565b8cb943aa8ba53550caef793b6532fafad94b816082f0113a3ea2f63608ab40437ecc0f0229cb8fa224dcf1c478a67d9b64162b92d1",
|
||||
"15f534efff7105cd1c254d074e27d5898b89313b7d366dc2d7d87113fa7d53aae13f6dba487ad8103d5e854c91fdb6e1e74b2ef6d1431769c30767dde067a35c",
|
||||
"89acbca0b169897a0a2714c2df8c95b5b79cb69390142b7d6018bb3e3076b099b79a964152a9d912b1b86412b7e372e9cecad7f25d4cbab8a317be36492a67d7",
|
||||
"e3c0739190ed849c9c962fd9dbb55e207e624fcac1eb417691515499eea8d8267b7e8f1287a63633af5011fde8c4ddf55bfdf722edf88831414f2cfaed59cb9a",
|
||||
"8d6cf87c08380d2d1506eee46fd4222d21d8c04e585fbfd08269c98f702833a156326a0724656400ee09351d57b440175e2a5de93cc5f80db6daf83576cf75fa",
|
||||
"da24bede383666d563eeed37f6319baf20d5c75d1635a6ba5ef4cfa1ac95487e96f8c08af600aab87c986ebad49fc70a58b4890b9c876e091016daf49e1d322e",
|
||||
"f9d1d1b1e87ea7ae753a029750cc1cf3d0157d41805e245c5617bb934e732f0ae3180b78e05bfe76c7c3051e3e3ac78b9b50c05142657e1e03215d6ec7bfd0fc",
|
||||
"11b7bc1668032048aa43343de476395e814bbbc223678db951a1b03a021efac948cfbe215f97fe9a72a2f6bc039e3956bfa417c1a9f10d6d7ba5d3d32ff323e5",
|
||||
"b8d9000e4fc2b066edb91afee8e7eb0f24e3a201db8b6793c0608581e628ed0bcc4e5aa6787992a4bcc44e288093e63ee83abd0bc3ec6d0934a674a4da13838a",
|
||||
"ce325e294f9b6719d6b61278276ae06a2564c03bb0b783fafe785bdf89c7d5acd83e78756d301b445699024eaeb77b54d477336ec2a4f332f2b3f88765ddb0c3",
|
||||
"29acc30e9603ae2fccf90bf97e6cc463ebe28c1b2f9b4b765e70537c25c702a29dcbfbf14c99c54345ba2b51f17b77b5f15db92bbad8fa95c471f5d070a137cc",
|
||||
"3379cbaae562a87b4c0425550ffdd6bfe1203f0d666cc7ea095be407a5dfe61ee91441cd5154b3e53b4f5fb31ad4c7a9ad5c7af4ae679aa51a54003a54ca6b2d",
|
||||
"3095a349d245708c7cf550118703d7302c27b60af5d4e67fc978f8a4e60953c7a04f92fcf41aee64321ccb707a895851552b1e37b00bc5e6b72fa5bcef9e3fff",
|
||||
"07262d738b09321f4dbccec4bb26f48cb0f0ed246ce0b31b9a6e7bc683049f1f3e5545f28ce932dd985c5ab0f43bd6de0770560af329065ed2e49d34624c2cbb",
|
||||
"b6405eca8ee3316c87061cc6ec18dba53e6c250c63ba1f3bae9e55dd3498036af08cd272aa24d713c6020d77ab2f3919af1a32f307420618ab97e73953994fb4",
|
||||
"7ee682f63148ee45f6e5315da81e5c6e557c2c34641fc509c7a5701088c38a74756168e2cd8d351e88fd1a451f360a01f5b2580f9b5a2e8cfc138f3dd59a3ffc",
|
||||
"1d263c179d6b268f6fa016f3a4f29e943891125ed8593c81256059f5a7b44af2dcb2030d175c00e62ecaf7ee96682aa07ab20a611024a28532b1c25b86657902",
|
||||
"106d132cbdb4cd2597812846e2bc1bf732fec5f0a5f65dbb39ec4e6dc64ab2ce6d24630d0f15a805c3540025d84afa98e36703c3dbee713e72dde8465bc1be7e",
|
||||
"0e79968226650667a8d862ea8da4891af56a4e3a8b6d1750e394f0dea76d640d85077bcec2cc86886e506751b4f6a5838f7f0b5fef765d9dc90dcdcbaf079f08",
|
||||
"521156a82ab0c4e566e5844d5e31ad9aaf144bbd5a464fdca34dbd5717e8ff711d3ffebbfa085d67fe996a34f6d3e4e60b1396bf4b1610c263bdbb834d560816",
|
||||
"1aba88befc55bc25efbce02db8b9933e46f57661baeabeb21cc2574d2a518a3cba5dc5a38e49713440b25f9c744e75f6b85c9d8f4681f676160f6105357b8406",
|
||||
"5a9949fcb2c473cda968ac1b5d08566dc2d816d960f57e63b898fa701cf8ebd3f59b124d95bfbbedc5f1cf0e17d5eaed0c02c50b69d8a402cabcca4433b51fd4",
|
||||
"b0cead09807c672af2eb2b0f06dde46cf5370e15a4096b1a7d7cbb36ec31c205fbefca00b7a4162fa89fb4fb3eb78d79770c23f44e7206664ce3cd931c291e5d",
|
||||
"bb6664931ec97044e45b2ae420ae1c551a8874bc937d08e969399c3964ebdba8346cdd5d09caafe4c28ba7ec788191ceca65ddd6f95f18583e040d0f30d0364d",
|
||||
"65bc770a5faa3792369803683e844b0be7ee96f29f6d6a35568006bd5590f9a4ef639b7a8061c7b0424b66b60ac34af3119905f33a9d8c3ae18382ca9b689900",
|
||||
"ea9b4dca333336aaf839a45c6eaa48b8cb4c7ddabffea4f643d6357ea6628a480a5b45f2b052c1b07d1fedca918b6f1139d80f74c24510dcbaa4be70eacc1b06",
|
||||
"e6342fb4a780ad975d0e24bce149989b91d360557e87994f6b457b895575cc02d0c15bad3ce7577f4c63927ff13f3e381ff7e72bdbe745324844a9d27e3f1c01",
|
||||
"3e209c9b33e8e461178ab46b1c64b49a07fb745f1c8bc95fbfb94c6b87c69516651b264ef980937fad41238b91ddc011a5dd777c7efd4494b4b6ecd3a9c22ac0",
|
||||
"fd6a3d5b1875d80486d6e69694a56dbb04a99a4d051f15db2689776ba1c4882e6d462a603b7015dc9f4b7450f05394303b8652cfb404a266962c41bae6e18a94",
|
||||
"951e27517e6bad9e4195fc8671dee3e7e9be69cee1422cb9fecfce0dba875f7b310b93ee3a3d558f941f635f668ff832d2c1d033c5e2f0997e4c66f147344e02",
|
||||
"8eba2f874f1ae84041903c7c4253c82292530fc8509550bfdc34c95c7e2889d5650b0ad8cb988e5c4894cb87fbfbb19612ea93ccc4c5cad17158b9763464b492",
|
||||
"16f712eaa1b7c6354719a8e7dbdfaf55e4063a4d277d947550019b38dfb564830911057d50506136e2394c3b28945cc964967d54e3000c2181626cfb9b73efd2",
|
||||
"c39639e7d5c7fb8cdd0fd3e6a52096039437122f21c78f1679cea9d78a734c56ecbeb28654b4f18e342c331f6f7229ec4b4bc281b2d80a6eb50043f31796c88c",
|
||||
"72d081af99f8a173dcc9a0ac4eb3557405639a29084b54a40172912a2f8a395129d5536f0918e902f9e8fa6000995f4168ddc5f893011be6a0dbc9b8a1a3f5bb",
|
||||
"c11aa81e5efd24d5fc27ee586cfd8847fbb0e27601ccece5ecca0198e3c7765393bb74457c7e7a27eb9170350e1fb53857177506be3e762cc0f14d8c3afe9077",
|
||||
"c28f2150b452e6c0c424bcde6f8d72007f9310fed7f2f87de0dbb64f4479d6c1441ba66f44b2accee61609177ed340128b407ecec7c64bbe50d63d22d8627727",
|
||||
"f63d88122877ec30b8c8b00d22e89000a966426112bd44166e2f525b769ccbe9b286d437a0129130dde1a86c43e04bedb594e671d98283afe64ce331de9828fd",
|
||||
"348b0532880b88a6614a8d7408c3f913357fbb60e995c60205be9139e74998aede7f4581e42f6b52698f7fa1219708c14498067fd1e09502de83a77dd281150c",
|
||||
"5133dc8bef725359dff59792d85eaf75b7e1dcd1978b01c35b1b85fcebc63388ad99a17b6346a217dc1a9622ebd122ecf6913c4d31a6b52a695b86af00d741a0",
|
||||
"2753c4c0e98ecad806e88780ec27fccd0f5c1ab547f9e4bf1659d192c23aa2cc971b58b6802580baef8adc3b776ef7086b2545c2987f348ee3719cdef258c403",
|
||||
"b1663573ce4b9d8caefc865012f3e39714b9898a5da6ce17c25a6a47931a9ddb9bbe98adaa553beed436e89578455416c2a52a525cf2862b8d1d49a2531b7391",
|
||||
"64f58bd6bfc856f5e873b2a2956ea0eda0d6db0da39c8c7fc67c9f9feefcff3072cdf9e6ea37f69a44f0c61aa0da3693c2db5b54960c0281a088151db42b11e8",
|
||||
"0764c7be28125d9065c4b98a69d60aede703547c66a12e17e1c618994132f5ef82482c1e3fe3146cc65376cc109f0138ed9a80e49f1f3c7d610d2f2432f20605",
|
||||
"f748784398a2ff03ebeb07e155e66116a839741a336e32da71ec696001f0ad1b25cd48c69cfca7265eca1dd71904a0ce748ac4124f3571076dfa7116a9cf00e9",
|
||||
"3f0dbc0186bceb6b785ba78d2a2a013c910be157bdaffae81bb6663b1a73722f7f1228795f3ecada87cf6ef0078474af73f31eca0cc200ed975b6893f761cb6d",
|
||||
"d4762cd4599876ca75b2b8fe249944dbd27ace741fdab93616cbc6e425460feb51d4e7adcc38180e7fc47c89024a7f56191adb878dfde4ead62223f5a2610efe",
|
||||
"cd36b3d5b4c91b90fcbba79513cfee1907d8645a162afd0cd4cf4192d4a5f4c892183a8eacdb2b6b6a9d9aa8c11ac1b261b380dbee24ca468f1bfd043c58eefe",
|
||||
"98593452281661a53c48a9d8cd790826c1a1ce567738053d0bee4a91a3d5bd92eefdbabebe3204f2031ca5f781bda99ef5d8ae56e5b04a9e1ecd21b0eb05d3e1",
|
||||
"771f57dd2775ccdab55921d3e8e30ccf484d61fe1c1b9c2ae819d0fb2a12fab9be70c4a7a138da84e8280435daade5bbe66af0836a154f817fb17f3397e725a3",
|
||||
"c60897c6f828e21f16fbb5f15b323f87b6c8955eabf1d38061f707f608abdd993fac3070633e286cf8339ce295dd352df4b4b40b2f29da1dd50b3a05d079e6bb",
|
||||
"8210cd2c2d3b135c2cf07fa0d1433cd771f325d075c6469d9c7f1ba0943cd4ab09808cabf4acb9ce5bb88b498929b4b847f681ad2c490d042db2aec94214b06b",
|
||||
"1d4edfffd8fd80f7e4107840fa3aa31e32598491e4af7013c197a65b7f36dd3ac4b478456111cd4309d9243510782fa31b7c4c95fa951520d020eb7e5c36e4ef",
|
||||
"af8e6e91fab46ce4873e1a50a8ef448cc29121f7f74deef34a71ef89cc00d9274bc6c2454bbb3230d8b2ec94c62b1dec85f3593bfa30ea6f7a44d7c09465a253",
|
||||
"29fd384ed4906f2d13aa9fe7af905990938bed807f1832454a372ab412eea1f5625a1fcc9ac8343b7c67c5aba6e0b1cc4644654913692c6b39eb9187ceacd3ec",
|
||||
"a268c7885d9874a51c44dffed8ea53e94f78456e0b2ed99ff5a3924760813826d960a15edbedbb5de5226ba4b074e71b05c55b9756bb79e55c02754c2c7b6c8a",
|
||||
"0cf8545488d56a86817cd7ecb10f7116b7ea530a45b6ea497b6c72c997e09e3d0da8698f46bb006fc977c2cd3d1177463ac9057fdd1662c85d0c126443c10473",
|
||||
"b39614268fdd8781515e2cfebf89b4d5402bab10c226e6344e6b9ae000fb0d6c79cb2f3ec80e80eaeb1980d2f8698916bd2e9f747236655116649cd3ca23a837",
|
||||
"74bef092fc6f1e5dba3663a3fb003b2a5ba257496536d99f62b9d73f8f9eb3ce9ff3eec709eb883655ec9eb896b9128f2afc89cf7d1ab58a72f4a3bf034d2b4a",
|
||||
"3a988d38d75611f3ef38b8774980b33e573b6c57bee0469ba5eed9b44f29945e7347967fba2c162e1c3be7f310f2f75ee2381e7bfd6b3f0baea8d95dfb1dafb1",
|
||||
"58aedfce6f67ddc85a28c992f1c0bd0969f041e66f1ee88020a125cbfcfebcd61709c9c4eba192c15e69f020d462486019fa8dea0cd7a42921a19d2fe546d43d",
|
||||
"9347bd291473e6b4e368437b8e561e065f649a6d8ada479ad09b1999a8f26b91cf6120fd3bfe014e83f23acfa4c0ad7b3712b2c3c0733270663112ccd9285cd9",
|
||||
"b32163e7c5dbb5f51fdc11d2eac875efbbcb7e7699090a7e7ff8a8d50795af5d74d9ff98543ef8cdf89ac13d0485278756e0ef00c817745661e1d59fe38e7537",
|
||||
"1085d78307b1c4b008c57a2e7e5b234658a0a82e4ff1e4aaac72b312fda0fe27d233bc5b10e9cc17fdc7697b540c7d95eb215a19a1a0e20e1abfa126efd568c7",
|
||||
"4e5c734c7dde011d83eac2b7347b373594f92d7091b9ca34cb9c6f39bdf5a8d2f134379e16d822f6522170ccf2ddd55c84b9e6c64fc927ac4cf8dfb2a17701f2",
|
||||
"695d83bd990a1117b3d0ce06cc888027d12a054c2677fd82f0d4fbfc93575523e7991a5e35a3752e9b70ce62992e268a877744cdd435f5f130869c9a2074b338",
|
||||
"a6213743568e3b3158b9184301f3690847554c68457cb40fc9a4b8cfd8d4a118c301a07737aeda0f929c68913c5f51c80394f53bff1c3e83b2e40ca97eba9e15",
|
||||
"d444bfa2362a96df213d070e33fa841f51334e4e76866b8139e8af3bb3398be2dfaddcbc56b9146de9f68118dc5829e74b0c28d7711907b121f9161cb92b69a9",
|
||||
"142709d62e28fcccd0af97fad0f8465b971e82201dc51070faa0372aa43e92484be1c1e73ba10906d5d1853db6a4106e0a7bf9800d373d6dee2d46d62ef2a461",
|
||||
}
|
1420
vendor/github.com/dchest/blake2b/block.go
generated
vendored
Normal file
1420
vendor/github.com/dchest/blake2b/block.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
26
vendor/github.com/jessevdk/go-flags/LICENSE
generated
vendored
Normal file
26
vendor/github.com/jessevdk/go-flags/LICENSE
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
Copyright (c) 2012 Jesse van den Kieboom. All rights reserved.
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
135
vendor/github.com/jessevdk/go-flags/README.md
generated
vendored
Normal file
135
vendor/github.com/jessevdk/go-flags/README.md
generated
vendored
Normal file
@ -0,0 +1,135 @@
|
||||
go-flags: a go library for parsing command line arguments
|
||||
=========================================================
|
||||
|
||||
[](https://godoc.org/github.com/jessevdk/go-flags) [](https://travis-ci.org/jessevdk/go-flags) [](https://coveralls.io/r/jessevdk/go-flags?branch=master)
|
||||
|
||||
This library provides similar functionality to the builtin flag library of
|
||||
go, but provides much more functionality and nicer formatting. From the
|
||||
documentation:
|
||||
|
||||
Package flags provides an extensive command line option parser.
|
||||
The flags package is similar in functionality to the go builtin flag package
|
||||
but provides more options and uses reflection to provide a convenient and
|
||||
succinct way of specifying command line options.
|
||||
|
||||
Supported features:
|
||||
* Options with short names (-v)
|
||||
* Options with long names (--verbose)
|
||||
* Options with and without arguments (bool v.s. other type)
|
||||
* Options with optional arguments and default values
|
||||
* Multiple option groups each containing a set of options
|
||||
* Generate and print well-formatted help message
|
||||
* Passing remaining command line arguments after -- (optional)
|
||||
* Ignoring unknown command line options (optional)
|
||||
* Supports -I/usr/include -I=/usr/include -I /usr/include option argument specification
|
||||
* Supports multiple short options -aux
|
||||
* Supports all primitive go types (string, int{8..64}, uint{8..64}, float)
|
||||
* Supports same option multiple times (can store in slice or last option counts)
|
||||
* Supports maps
|
||||
* Supports function callbacks
|
||||
* Supports namespaces for (nested) option groups
|
||||
|
||||
The flags package uses structs, reflection and struct field tags
|
||||
to allow users to specify command line options. This results in very simple
|
||||
and concise specification of your application options. For example:
|
||||
|
||||
```go
|
||||
type Options struct {
|
||||
Verbose []bool `short:"v" long:"verbose" description:"Show verbose debug information"`
|
||||
}
|
||||
```
|
||||
|
||||
This specifies one option with a short name -v and a long name --verbose.
|
||||
When either -v or --verbose is found on the command line, a 'true' value
|
||||
will be appended to the Verbose field. e.g. when specifying -vvv, the
|
||||
resulting value of Verbose will be {[true, true, true]}.
|
||||
|
||||
Example:
|
||||
--------
|
||||
```go
|
||||
var opts struct {
|
||||
// Slice of bool will append 'true' each time the option
|
||||
// is encountered (can be set multiple times, like -vvv)
|
||||
Verbose []bool `short:"v" long:"verbose" description:"Show verbose debug information"`
|
||||
|
||||
// Example of automatic marshalling to desired type (uint)
|
||||
Offset uint `long:"offset" description:"Offset"`
|
||||
|
||||
// Example of a callback, called each time the option is found.
|
||||
Call func(string) `short:"c" description:"Call phone number"`
|
||||
|
||||
// Example of a required flag
|
||||
Name string `short:"n" long:"name" description:"A name" required:"true"`
|
||||
|
||||
// Example of a value name
|
||||
File string `short:"f" long:"file" description:"A file" value-name:"FILE"`
|
||||
|
||||
// Example of a pointer
|
||||
Ptr *int `short:"p" description:"A pointer to an integer"`
|
||||
|
||||
// Example of a slice of strings
|
||||
StringSlice []string `short:"s" description:"A slice of strings"`
|
||||
|
||||
// Example of a slice of pointers
|
||||
PtrSlice []*string `long:"ptrslice" description:"A slice of pointers to string"`
|
||||
|
||||
// Example of a map
|
||||
IntMap map[string]int `long:"intmap" description:"A map from string to int"`
|
||||
}
|
||||
|
||||
// Callback which will invoke callto:<argument> to call a number.
|
||||
// Note that this works just on OS X (and probably only with
|
||||
// Skype) but it shows the idea.
|
||||
opts.Call = func(num string) {
|
||||
cmd := exec.Command("open", "callto:"+num)
|
||||
cmd.Start()
|
||||
cmd.Process.Release()
|
||||
}
|
||||
|
||||
// Make some fake arguments to parse.
|
||||
args := []string{
|
||||
"-vv",
|
||||
"--offset=5",
|
||||
"-n", "Me",
|
||||
"-p", "3",
|
||||
"-s", "hello",
|
||||
"-s", "world",
|
||||
"--ptrslice", "hello",
|
||||
"--ptrslice", "world",
|
||||
"--intmap", "a:1",
|
||||
"--intmap", "b:5",
|
||||
"arg1",
|
||||
"arg2",
|
||||
"arg3",
|
||||
}
|
||||
|
||||
// Parse flags from `args'. Note that here we use flags.ParseArgs for
|
||||
// the sake of making a working example. Normally, you would simply use
|
||||
// flags.Parse(&opts) which uses os.Args
|
||||
args, err := flags.ParseArgs(&opts, args)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
fmt.Printf("Verbosity: %v\n", opts.Verbose)
|
||||
fmt.Printf("Offset: %d\n", opts.Offset)
|
||||
fmt.Printf("Name: %s\n", opts.Name)
|
||||
fmt.Printf("Ptr: %d\n", *opts.Ptr)
|
||||
fmt.Printf("StringSlice: %v\n", opts.StringSlice)
|
||||
fmt.Printf("PtrSlice: [%v %v]\n", *opts.PtrSlice[0], *opts.PtrSlice[1])
|
||||
fmt.Printf("IntMap: [a:%v b:%v]\n", opts.IntMap["a"], opts.IntMap["b"])
|
||||
fmt.Printf("Remaining args: %s\n", strings.Join(args, " "))
|
||||
|
||||
// Output: Verbosity: [true true]
|
||||
// Offset: 5
|
||||
// Name: Me
|
||||
// Ptr: 3
|
||||
// StringSlice: [hello world]
|
||||
// PtrSlice: [hello world]
|
||||
// IntMap: [a:1 b:5]
|
||||
// Remaining args: arg1 arg2 arg3
|
||||
```
|
||||
|
||||
More information can be found in the godocs: <http://godoc.org/github.com/jessevdk/go-flags>
|
24
vendor/github.com/jessevdk/go-flags/arg.go
generated
vendored
Normal file
24
vendor/github.com/jessevdk/go-flags/arg.go
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// Arg represents a positional argument on the command line.
|
||||
type Arg struct {
|
||||
// The name of the positional argument (used in the help)
|
||||
Name string
|
||||
|
||||
// A description of the positional argument (used in the help)
|
||||
Description string
|
||||
|
||||
// Whether a positional argument is required
|
||||
Required int
|
||||
|
||||
value reflect.Value
|
||||
tag multiTag
|
||||
}
|
||||
|
||||
func (a *Arg) isRemaining() bool {
|
||||
return a.value.Type().Kind() == reflect.Slice
|
||||
}
|
133
vendor/github.com/jessevdk/go-flags/arg_test.go
generated
vendored
Normal file
133
vendor/github.com/jessevdk/go-flags/arg_test.go
generated
vendored
Normal file
@ -0,0 +1,133 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestPositional(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Positional struct {
|
||||
Command int
|
||||
Filename string
|
||||
Rest []string
|
||||
} `positional-args:"yes" required:"yes"`
|
||||
}{}
|
||||
|
||||
p := NewParser(&opts, Default)
|
||||
ret, err := p.ParseArgs([]string{"10", "arg_test.go", "a", "b"})
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if opts.Positional.Command != 10 {
|
||||
t.Fatalf("Expected opts.Positional.Command to be 10, but got %v", opts.Positional.Command)
|
||||
}
|
||||
|
||||
if opts.Positional.Filename != "arg_test.go" {
|
||||
t.Fatalf("Expected opts.Positional.Filename to be \"arg_test.go\", but got %v", opts.Positional.Filename)
|
||||
}
|
||||
|
||||
assertStringArray(t, opts.Positional.Rest, []string{"a", "b"})
|
||||
assertStringArray(t, ret, []string{})
|
||||
}
|
||||
|
||||
func TestPositionalRequired(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Positional struct {
|
||||
Command int
|
||||
Filename string
|
||||
Rest []string
|
||||
} `positional-args:"yes" required:"yes"`
|
||||
}{}
|
||||
|
||||
p := NewParser(&opts, None)
|
||||
_, err := p.ParseArgs([]string{"10"})
|
||||
|
||||
assertError(t, err, ErrRequired, "the required argument `Filename` was not provided")
|
||||
}
|
||||
|
||||
func TestPositionalRequiredRest1Fail(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Positional struct {
|
||||
Rest []string `required:"yes"`
|
||||
} `positional-args:"yes"`
|
||||
}{}
|
||||
|
||||
p := NewParser(&opts, None)
|
||||
_, err := p.ParseArgs([]string{})
|
||||
|
||||
assertError(t, err, ErrRequired, "the required argument `Rest (at least 1 argument)` was not provided")
|
||||
}
|
||||
|
||||
func TestPositionalRequiredRest1Pass(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Positional struct {
|
||||
Rest []string `required:"yes"`
|
||||
} `positional-args:"yes"`
|
||||
}{}
|
||||
|
||||
p := NewParser(&opts, None)
|
||||
_, err := p.ParseArgs([]string{"rest1"})
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if len(opts.Positional.Rest) != 1 {
|
||||
t.Fatalf("Expected 1 positional rest argument")
|
||||
}
|
||||
|
||||
assertString(t, opts.Positional.Rest[0], "rest1")
|
||||
}
|
||||
|
||||
func TestPositionalRequiredRest2Fail(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Positional struct {
|
||||
Rest []string `required:"2"`
|
||||
} `positional-args:"yes"`
|
||||
}{}
|
||||
|
||||
p := NewParser(&opts, None)
|
||||
_, err := p.ParseArgs([]string{"rest1"})
|
||||
|
||||
assertError(t, err, ErrRequired, "the required argument `Rest (at least 2 arguments, but got only 1)` was not provided")
|
||||
}
|
||||
|
||||
func TestPositionalRequiredRest2Pass(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Positional struct {
|
||||
Rest []string `required:"2"`
|
||||
} `positional-args:"yes"`
|
||||
}{}
|
||||
|
||||
p := NewParser(&opts, None)
|
||||
_, err := p.ParseArgs([]string{"rest1", "rest2", "rest3"})
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if len(opts.Positional.Rest) != 3 {
|
||||
t.Fatalf("Expected 3 positional rest argument")
|
||||
}
|
||||
|
||||
assertString(t, opts.Positional.Rest[0], "rest1")
|
||||
assertString(t, opts.Positional.Rest[1], "rest2")
|
||||
assertString(t, opts.Positional.Rest[2], "rest3")
|
||||
}
|
177
vendor/github.com/jessevdk/go-flags/assert_test.go
generated
vendored
Normal file
177
vendor/github.com/jessevdk/go-flags/assert_test.go
generated
vendored
Normal file
@ -0,0 +1,177 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"runtime"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func assertCallerInfo() (string, int) {
|
||||
ptr := make([]uintptr, 15)
|
||||
n := runtime.Callers(1, ptr)
|
||||
|
||||
if n == 0 {
|
||||
return "", 0
|
||||
}
|
||||
|
||||
mef := runtime.FuncForPC(ptr[0])
|
||||
mefile, meline := mef.FileLine(ptr[0])
|
||||
|
||||
for i := 2; i < n; i++ {
|
||||
f := runtime.FuncForPC(ptr[i])
|
||||
file, line := f.FileLine(ptr[i])
|
||||
|
||||
if file != mefile {
|
||||
return file, line
|
||||
}
|
||||
}
|
||||
|
||||
return mefile, meline
|
||||
}
|
||||
|
||||
func assertErrorf(t *testing.T, format string, args ...interface{}) {
|
||||
msg := fmt.Sprintf(format, args...)
|
||||
|
||||
file, line := assertCallerInfo()
|
||||
|
||||
t.Errorf("%s:%d: %s", path.Base(file), line, msg)
|
||||
}
|
||||
|
||||
func assertFatalf(t *testing.T, format string, args ...interface{}) {
|
||||
msg := fmt.Sprintf(format, args...)
|
||||
|
||||
file, line := assertCallerInfo()
|
||||
|
||||
t.Fatalf("%s:%d: %s", path.Base(file), line, msg)
|
||||
}
|
||||
|
||||
func assertString(t *testing.T, a string, b string) {
|
||||
if a != b {
|
||||
assertErrorf(t, "Expected %#v, but got %#v", b, a)
|
||||
}
|
||||
}
|
||||
|
||||
func assertStringArray(t *testing.T, a []string, b []string) {
|
||||
if len(a) != len(b) {
|
||||
assertErrorf(t, "Expected %#v, but got %#v", b, a)
|
||||
return
|
||||
}
|
||||
|
||||
for i, v := range a {
|
||||
if b[i] != v {
|
||||
assertErrorf(t, "Expected %#v, but got %#v", b, a)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func assertBoolArray(t *testing.T, a []bool, b []bool) {
|
||||
if len(a) != len(b) {
|
||||
assertErrorf(t, "Expected %#v, but got %#v", b, a)
|
||||
return
|
||||
}
|
||||
|
||||
for i, v := range a {
|
||||
if b[i] != v {
|
||||
assertErrorf(t, "Expected %#v, but got %#v", b, a)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func assertParserSuccess(t *testing.T, data interface{}, args ...string) (*Parser, []string) {
|
||||
parser := NewParser(data, Default&^PrintErrors)
|
||||
ret, err := parser.ParseArgs(args)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected parse error: %s", err)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return parser, ret
|
||||
}
|
||||
|
||||
func assertParseSuccess(t *testing.T, data interface{}, args ...string) []string {
|
||||
_, ret := assertParserSuccess(t, data, args...)
|
||||
return ret
|
||||
}
|
||||
|
||||
func assertError(t *testing.T, err error, typ ErrorType, msg string) {
|
||||
if err == nil {
|
||||
assertFatalf(t, "Expected error: %s", msg)
|
||||
return
|
||||
}
|
||||
|
||||
if e, ok := err.(*Error); !ok {
|
||||
assertFatalf(t, "Expected Error type, but got %#v", err)
|
||||
} else {
|
||||
if e.Type != typ {
|
||||
assertErrorf(t, "Expected error type {%s}, but got {%s}", typ, e.Type)
|
||||
}
|
||||
|
||||
if e.Message != msg {
|
||||
assertErrorf(t, "Expected error message %#v, but got %#v", msg, e.Message)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func assertParseFail(t *testing.T, typ ErrorType, msg string, data interface{}, args ...string) []string {
|
||||
parser := NewParser(data, Default&^PrintErrors)
|
||||
ret, err := parser.ParseArgs(args)
|
||||
|
||||
assertError(t, err, typ, msg)
|
||||
return ret
|
||||
}
|
||||
|
||||
func diff(a, b string) (string, error) {
|
||||
atmp, err := ioutil.TempFile("", "help-diff")
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
btmp, err := ioutil.TempFile("", "help-diff")
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if _, err := io.WriteString(atmp, a); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if _, err := io.WriteString(btmp, b); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
ret, err := exec.Command("diff", "-u", "-d", "--label", "got", atmp.Name(), "--label", "expected", btmp.Name()).Output()
|
||||
|
||||
os.Remove(atmp.Name())
|
||||
os.Remove(btmp.Name())
|
||||
|
||||
if err.Error() == "exit status 1" {
|
||||
return string(ret), nil
|
||||
}
|
||||
|
||||
return string(ret), err
|
||||
}
|
||||
|
||||
func assertDiff(t *testing.T, actual, expected, msg string) {
|
||||
if actual == expected {
|
||||
return
|
||||
}
|
||||
|
||||
ret, err := diff(actual, expected)
|
||||
|
||||
if err != nil {
|
||||
assertErrorf(t, "Unexpected diff error: %s", err)
|
||||
assertErrorf(t, "Unexpected %s, expected:\n\n%s\n\nbut got\n\n%s", msg, expected, actual)
|
||||
} else {
|
||||
assertErrorf(t, "Unexpected %s:\n\n%s", msg, ret)
|
||||
}
|
||||
}
|
16
vendor/github.com/jessevdk/go-flags/check_crosscompile.sh
generated
vendored
Executable file
16
vendor/github.com/jessevdk/go-flags/check_crosscompile.sh
generated
vendored
Executable file
@ -0,0 +1,16 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
echo '# linux arm7'
|
||||
GOARM=7 GOARCH=arm GOOS=linux go build
|
||||
echo '# linux arm5'
|
||||
GOARM=5 GOARCH=arm GOOS=linux go build
|
||||
echo '# windows 386'
|
||||
GOARCH=386 GOOS=windows go build
|
||||
echo '# windows amd64'
|
||||
GOARCH=amd64 GOOS=windows go build
|
||||
echo '# darwin'
|
||||
GOARCH=amd64 GOOS=darwin go build
|
||||
echo '# freebsd'
|
||||
GOARCH=amd64 GOOS=freebsd go build
|
59
vendor/github.com/jessevdk/go-flags/closest.go
generated
vendored
Normal file
59
vendor/github.com/jessevdk/go-flags/closest.go
generated
vendored
Normal file
@ -0,0 +1,59 @@
|
||||
package flags
|
||||
|
||||
func levenshtein(s string, t string) int {
|
||||
if len(s) == 0 {
|
||||
return len(t)
|
||||
}
|
||||
|
||||
if len(t) == 0 {
|
||||
return len(s)
|
||||
}
|
||||
|
||||
dists := make([][]int, len(s)+1)
|
||||
for i := range dists {
|
||||
dists[i] = make([]int, len(t)+1)
|
||||
dists[i][0] = i
|
||||
}
|
||||
|
||||
for j := range t {
|
||||
dists[0][j] = j
|
||||
}
|
||||
|
||||
for i, sc := range s {
|
||||
for j, tc := range t {
|
||||
if sc == tc {
|
||||
dists[i+1][j+1] = dists[i][j]
|
||||
} else {
|
||||
dists[i+1][j+1] = dists[i][j] + 1
|
||||
if dists[i+1][j] < dists[i+1][j+1] {
|
||||
dists[i+1][j+1] = dists[i+1][j] + 1
|
||||
}
|
||||
if dists[i][j+1] < dists[i+1][j+1] {
|
||||
dists[i+1][j+1] = dists[i][j+1] + 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return dists[len(s)][len(t)]
|
||||
}
|
||||
|
||||
func closestChoice(cmd string, choices []string) (string, int) {
|
||||
if len(choices) == 0 {
|
||||
return "", 0
|
||||
}
|
||||
|
||||
mincmd := -1
|
||||
mindist := -1
|
||||
|
||||
for i, c := range choices {
|
||||
l := levenshtein(cmd, c)
|
||||
|
||||
if mincmd < 0 || l < mindist {
|
||||
mindist = l
|
||||
mincmd = i
|
||||
}
|
||||
}
|
||||
|
||||
return choices[mincmd], mindist
|
||||
}
|
441
vendor/github.com/jessevdk/go-flags/command.go
generated
vendored
Normal file
441
vendor/github.com/jessevdk/go-flags/command.go
generated
vendored
Normal file
@ -0,0 +1,441 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// Command represents an application command. Commands can be added to the
|
||||
// parser (which itself is a command) and are selected/executed when its name
|
||||
// is specified on the command line. The Command type embeds a Group and
|
||||
// therefore also carries a set of command specific options.
|
||||
type Command struct {
|
||||
// Embedded, see Group for more information
|
||||
*Group
|
||||
|
||||
// The name by which the command can be invoked
|
||||
Name string
|
||||
|
||||
// The active sub command (set by parsing) or nil
|
||||
Active *Command
|
||||
|
||||
// Whether subcommands are optional
|
||||
SubcommandsOptional bool
|
||||
|
||||
// Aliases for the command
|
||||
Aliases []string
|
||||
|
||||
// Whether positional arguments are required
|
||||
ArgsRequired bool
|
||||
|
||||
commands []*Command
|
||||
hasBuiltinHelpGroup bool
|
||||
args []*Arg
|
||||
}
|
||||
|
||||
// Commander is an interface which can be implemented by any command added in
|
||||
// the options. When implemented, the Execute method will be called for the last
|
||||
// specified (sub)command providing the remaining command line arguments.
|
||||
type Commander interface {
|
||||
// Execute will be called for the last active (sub)command. The
|
||||
// args argument contains the remaining command line arguments. The
|
||||
// error that Execute returns will be eventually passed out of the
|
||||
// Parse method of the Parser.
|
||||
Execute(args []string) error
|
||||
}
|
||||
|
||||
// Usage is an interface which can be implemented to show a custom usage string
|
||||
// in the help message shown for a command.
|
||||
type Usage interface {
|
||||
// Usage is called for commands to allow customized printing of command
|
||||
// usage in the generated help message.
|
||||
Usage() string
|
||||
}
|
||||
|
||||
type lookup struct {
|
||||
shortNames map[string]*Option
|
||||
longNames map[string]*Option
|
||||
|
||||
commands map[string]*Command
|
||||
}
|
||||
|
||||
// AddCommand adds a new command to the parser with the given name and data. The
|
||||
// data needs to be a pointer to a struct from which the fields indicate which
|
||||
// options are in the command. The provided data can implement the Command and
|
||||
// Usage interfaces.
|
||||
func (c *Command) AddCommand(command string, shortDescription string, longDescription string, data interface{}) (*Command, error) {
|
||||
cmd := newCommand(command, shortDescription, longDescription, data)
|
||||
|
||||
cmd.parent = c
|
||||
|
||||
if err := cmd.scan(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
c.commands = append(c.commands, cmd)
|
||||
return cmd, nil
|
||||
}
|
||||
|
||||
// AddGroup adds a new group to the command with the given name and data. The
|
||||
// data needs to be a pointer to a struct from which the fields indicate which
|
||||
// options are in the group.
|
||||
func (c *Command) AddGroup(shortDescription string, longDescription string, data interface{}) (*Group, error) {
|
||||
group := newGroup(shortDescription, longDescription, data)
|
||||
|
||||
group.parent = c
|
||||
|
||||
if err := group.scanType(c.scanSubcommandHandler(group)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
c.groups = append(c.groups, group)
|
||||
return group, nil
|
||||
}
|
||||
|
||||
// Commands returns a list of subcommands of this command.
|
||||
func (c *Command) Commands() []*Command {
|
||||
return c.commands
|
||||
}
|
||||
|
||||
// Find locates the subcommand with the given name and returns it. If no such
|
||||
// command can be found Find will return nil.
|
||||
func (c *Command) Find(name string) *Command {
|
||||
for _, cc := range c.commands {
|
||||
if cc.match(name) {
|
||||
return cc
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Find an option that is part of the command, or any of its
|
||||
// parent commands, by matching its long name
|
||||
// (including the option namespace).
|
||||
func (c *Command) FindOptionByLongName(longName string) (option *Option) {
|
||||
for option == nil && c != nil {
|
||||
option = c.Group.FindOptionByLongName(longName)
|
||||
|
||||
c, _ = c.parent.(*Command)
|
||||
}
|
||||
|
||||
return option
|
||||
}
|
||||
|
||||
// Find an option that is part of the command, or any of its
|
||||
// parent commands, by matching its long name
|
||||
// (including the option namespace).
|
||||
func (c *Command) FindOptionByShortName(shortName rune) (option *Option) {
|
||||
for option == nil && c != nil {
|
||||
option = c.Group.FindOptionByShortName(shortName)
|
||||
|
||||
c, _ = c.parent.(*Command)
|
||||
}
|
||||
|
||||
return option
|
||||
}
|
||||
|
||||
// Args returns a list of positional arguments associated with this command.
|
||||
func (c *Command) Args() []*Arg {
|
||||
ret := make([]*Arg, len(c.args))
|
||||
copy(ret, c.args)
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func newCommand(name string, shortDescription string, longDescription string, data interface{}) *Command {
|
||||
return &Command{
|
||||
Group: newGroup(shortDescription, longDescription, data),
|
||||
Name: name,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Command) scanSubcommandHandler(parentg *Group) scanHandler {
|
||||
f := func(realval reflect.Value, sfield *reflect.StructField) (bool, error) {
|
||||
mtag := newMultiTag(string(sfield.Tag))
|
||||
|
||||
if err := mtag.Parse(); err != nil {
|
||||
return true, err
|
||||
}
|
||||
|
||||
positional := mtag.Get("positional-args")
|
||||
|
||||
if len(positional) != 0 {
|
||||
stype := realval.Type()
|
||||
|
||||
for i := 0; i < stype.NumField(); i++ {
|
||||
field := stype.Field(i)
|
||||
|
||||
m := newMultiTag((string(field.Tag)))
|
||||
|
||||
if err := m.Parse(); err != nil {
|
||||
return true, err
|
||||
}
|
||||
|
||||
name := m.Get("positional-arg-name")
|
||||
|
||||
if len(name) == 0 {
|
||||
name = field.Name
|
||||
}
|
||||
|
||||
var required int
|
||||
|
||||
sreq := m.Get("required")
|
||||
|
||||
if sreq != "" {
|
||||
required = 1
|
||||
|
||||
if preq, err := strconv.ParseInt(sreq, 10, 32); err == nil {
|
||||
required = int(preq)
|
||||
}
|
||||
}
|
||||
|
||||
arg := &Arg{
|
||||
Name: name,
|
||||
Description: m.Get("description"),
|
||||
Required: required,
|
||||
|
||||
value: realval.Field(i),
|
||||
tag: m,
|
||||
}
|
||||
|
||||
c.args = append(c.args, arg)
|
||||
|
||||
if len(mtag.Get("required")) != 0 {
|
||||
c.ArgsRequired = true
|
||||
}
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
subcommand := mtag.Get("command")
|
||||
|
||||
if len(subcommand) != 0 {
|
||||
ptrval := reflect.NewAt(realval.Type(), unsafe.Pointer(realval.UnsafeAddr()))
|
||||
|
||||
shortDescription := mtag.Get("description")
|
||||
longDescription := mtag.Get("long-description")
|
||||
subcommandsOptional := mtag.Get("subcommands-optional")
|
||||
aliases := mtag.GetMany("alias")
|
||||
|
||||
subc, err := c.AddCommand(subcommand, shortDescription, longDescription, ptrval.Interface())
|
||||
if err != nil {
|
||||
return true, err
|
||||
}
|
||||
|
||||
subc.Hidden = mtag.Get("hidden") != ""
|
||||
|
||||
if len(subcommandsOptional) > 0 {
|
||||
subc.SubcommandsOptional = true
|
||||
}
|
||||
|
||||
if len(aliases) > 0 {
|
||||
subc.Aliases = aliases
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
return parentg.scanSubGroupHandler(realval, sfield)
|
||||
}
|
||||
|
||||
return f
|
||||
}
|
||||
|
||||
func (c *Command) scan() error {
|
||||
return c.scanType(c.scanSubcommandHandler(c.Group))
|
||||
}
|
||||
|
||||
func (c *Command) eachOption(f func(*Command, *Group, *Option)) {
|
||||
c.eachCommand(func(c *Command) {
|
||||
c.eachGroup(func(g *Group) {
|
||||
for _, option := range g.options {
|
||||
f(c, g, option)
|
||||
}
|
||||
})
|
||||
}, true)
|
||||
}
|
||||
|
||||
func (c *Command) eachCommand(f func(*Command), recurse bool) {
|
||||
f(c)
|
||||
|
||||
for _, cc := range c.commands {
|
||||
if recurse {
|
||||
cc.eachCommand(f, true)
|
||||
} else {
|
||||
f(cc)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Command) eachActiveGroup(f func(cc *Command, g *Group)) {
|
||||
c.eachGroup(func(g *Group) {
|
||||
f(c, g)
|
||||
})
|
||||
|
||||
if c.Active != nil {
|
||||
c.Active.eachActiveGroup(f)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Command) addHelpGroups(showHelp func() error) {
|
||||
if !c.hasBuiltinHelpGroup {
|
||||
c.addHelpGroup(showHelp)
|
||||
c.hasBuiltinHelpGroup = true
|
||||
}
|
||||
|
||||
for _, cc := range c.commands {
|
||||
cc.addHelpGroups(showHelp)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Command) makeLookup() lookup {
|
||||
ret := lookup{
|
||||
shortNames: make(map[string]*Option),
|
||||
longNames: make(map[string]*Option),
|
||||
commands: make(map[string]*Command),
|
||||
}
|
||||
|
||||
parent := c.parent
|
||||
|
||||
var parents []*Command
|
||||
|
||||
for parent != nil {
|
||||
if cmd, ok := parent.(*Command); ok {
|
||||
parents = append(parents, cmd)
|
||||
parent = cmd.parent
|
||||
} else {
|
||||
parent = nil
|
||||
}
|
||||
}
|
||||
|
||||
for i := len(parents) - 1; i >= 0; i-- {
|
||||
parents[i].fillLookup(&ret, true)
|
||||
}
|
||||
|
||||
c.fillLookup(&ret, false)
|
||||
return ret
|
||||
}
|
||||
|
||||
func (c *Command) fillLookup(ret *lookup, onlyOptions bool) {
|
||||
c.eachGroup(func(g *Group) {
|
||||
for _, option := range g.options {
|
||||
if option.ShortName != 0 {
|
||||
ret.shortNames[string(option.ShortName)] = option
|
||||
}
|
||||
|
||||
if len(option.LongName) > 0 {
|
||||
ret.longNames[option.LongNameWithNamespace()] = option
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if onlyOptions {
|
||||
return
|
||||
}
|
||||
|
||||
for _, subcommand := range c.commands {
|
||||
ret.commands[subcommand.Name] = subcommand
|
||||
|
||||
for _, a := range subcommand.Aliases {
|
||||
ret.commands[a] = subcommand
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Command) groupByName(name string) *Group {
|
||||
if grp := c.Group.groupByName(name); grp != nil {
|
||||
return grp
|
||||
}
|
||||
|
||||
for _, subc := range c.commands {
|
||||
prefix := subc.Name + "."
|
||||
|
||||
if strings.HasPrefix(name, prefix) {
|
||||
if grp := subc.groupByName(name[len(prefix):]); grp != nil {
|
||||
return grp
|
||||
}
|
||||
} else if name == subc.Name {
|
||||
return subc.Group
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type commandList []*Command
|
||||
|
||||
func (c commandList) Less(i, j int) bool {
|
||||
return c[i].Name < c[j].Name
|
||||
}
|
||||
|
||||
func (c commandList) Len() int {
|
||||
return len(c)
|
||||
}
|
||||
|
||||
func (c commandList) Swap(i, j int) {
|
||||
c[i], c[j] = c[j], c[i]
|
||||
}
|
||||
|
||||
func (c *Command) sortedVisibleCommands() []*Command {
|
||||
ret := commandList(c.visibleCommands())
|
||||
sort.Sort(ret)
|
||||
|
||||
return []*Command(ret)
|
||||
}
|
||||
|
||||
func (c *Command) visibleCommands() []*Command {
|
||||
ret := make([]*Command, 0, len(c.commands))
|
||||
|
||||
for _, cmd := range c.commands {
|
||||
if !cmd.Hidden {
|
||||
ret = append(ret, cmd)
|
||||
}
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func (c *Command) match(name string) bool {
|
||||
if c.Name == name {
|
||||
return true
|
||||
}
|
||||
|
||||
for _, v := range c.Aliases {
|
||||
if v == name {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (c *Command) hasCliOptions() bool {
|
||||
ret := false
|
||||
|
||||
c.eachGroup(func(g *Group) {
|
||||
if g.isBuiltinHelp {
|
||||
return
|
||||
}
|
||||
|
||||
for _, opt := range g.options {
|
||||
if opt.canCli() {
|
||||
ret = true
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func (c *Command) fillParseState(s *parseState) {
|
||||
s.positional = make([]*Arg, len(c.args))
|
||||
copy(s.positional, c.args)
|
||||
|
||||
s.lookup = c.makeLookup()
|
||||
s.command = c
|
||||
}
|
544
vendor/github.com/jessevdk/go-flags/command_test.go
generated
vendored
Normal file
544
vendor/github.com/jessevdk/go-flags/command_test.go
generated
vendored
Normal file
@ -0,0 +1,544 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCommandInline(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Command struct {
|
||||
G bool `short:"g"`
|
||||
} `command:"cmd"`
|
||||
}{}
|
||||
|
||||
p, ret := assertParserSuccess(t, &opts, "-v", "cmd", "-g")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
|
||||
if p.Active == nil {
|
||||
t.Errorf("Expected active command")
|
||||
}
|
||||
|
||||
if !opts.Value {
|
||||
t.Errorf("Expected Value to be true")
|
||||
}
|
||||
|
||||
if !opts.Command.G {
|
||||
t.Errorf("Expected Command.G to be true")
|
||||
}
|
||||
|
||||
if p.Command.Find("cmd") != p.Active {
|
||||
t.Errorf("Expected to find command `cmd' to be active")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCommandInlineMulti(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
C1 struct {
|
||||
} `command:"c1"`
|
||||
|
||||
C2 struct {
|
||||
G bool `short:"g"`
|
||||
} `command:"c2"`
|
||||
}{}
|
||||
|
||||
p, ret := assertParserSuccess(t, &opts, "-v", "c2", "-g")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
|
||||
if p.Active == nil {
|
||||
t.Errorf("Expected active command")
|
||||
}
|
||||
|
||||
if !opts.Value {
|
||||
t.Errorf("Expected Value to be true")
|
||||
}
|
||||
|
||||
if !opts.C2.G {
|
||||
t.Errorf("Expected C2.G to be true")
|
||||
}
|
||||
|
||||
if p.Command.Find("c1") == nil {
|
||||
t.Errorf("Expected to find command `c1'")
|
||||
}
|
||||
|
||||
if c2 := p.Command.Find("c2"); c2 == nil {
|
||||
t.Errorf("Expected to find command `c2'")
|
||||
} else if c2 != p.Active {
|
||||
t.Errorf("Expected to find command `c2' to be active")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCommandFlagOrder1(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Command struct {
|
||||
G bool `short:"g"`
|
||||
} `command:"cmd"`
|
||||
}{}
|
||||
|
||||
assertParseFail(t, ErrUnknownFlag, "unknown flag `g'", &opts, "-v", "-g", "cmd")
|
||||
}
|
||||
|
||||
func TestCommandFlagOrder2(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Command struct {
|
||||
G bool `short:"g"`
|
||||
} `command:"cmd"`
|
||||
}{}
|
||||
|
||||
assertParseSuccess(t, &opts, "cmd", "-v", "-g")
|
||||
|
||||
if !opts.Value {
|
||||
t.Errorf("Expected Value to be true")
|
||||
}
|
||||
|
||||
if !opts.Command.G {
|
||||
t.Errorf("Expected Command.G to be true")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCommandFlagOrderSub(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Command struct {
|
||||
G bool `short:"g"`
|
||||
|
||||
SubCommand struct {
|
||||
B bool `short:"b"`
|
||||
} `command:"sub"`
|
||||
} `command:"cmd"`
|
||||
}{}
|
||||
|
||||
assertParseSuccess(t, &opts, "cmd", "sub", "-v", "-g", "-b")
|
||||
|
||||
if !opts.Value {
|
||||
t.Errorf("Expected Value to be true")
|
||||
}
|
||||
|
||||
if !opts.Command.G {
|
||||
t.Errorf("Expected Command.G to be true")
|
||||
}
|
||||
|
||||
if !opts.Command.SubCommand.B {
|
||||
t.Errorf("Expected Command.SubCommand.B to be true")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCommandFlagOverride1(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Command struct {
|
||||
Value bool `short:"v"`
|
||||
} `command:"cmd"`
|
||||
}{}
|
||||
|
||||
assertParseSuccess(t, &opts, "-v", "cmd")
|
||||
|
||||
if !opts.Value {
|
||||
t.Errorf("Expected Value to be true")
|
||||
}
|
||||
|
||||
if opts.Command.Value {
|
||||
t.Errorf("Expected Command.Value to be false")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCommandFlagOverride2(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Command struct {
|
||||
Value bool `short:"v"`
|
||||
} `command:"cmd"`
|
||||
}{}
|
||||
|
||||
assertParseSuccess(t, &opts, "cmd", "-v")
|
||||
|
||||
if opts.Value {
|
||||
t.Errorf("Expected Value to be false")
|
||||
}
|
||||
|
||||
if !opts.Command.Value {
|
||||
t.Errorf("Expected Command.Value to be true")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCommandFlagOverrideSub(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Command struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
SubCommand struct {
|
||||
Value bool `short:"v"`
|
||||
} `command:"sub"`
|
||||
} `command:"cmd"`
|
||||
}{}
|
||||
|
||||
assertParseSuccess(t, &opts, "cmd", "sub", "-v")
|
||||
|
||||
if opts.Value {
|
||||
t.Errorf("Expected Value to be false")
|
||||
}
|
||||
|
||||
if opts.Command.Value {
|
||||
t.Errorf("Expected Command.Value to be false")
|
||||
}
|
||||
|
||||
if !opts.Command.SubCommand.Value {
|
||||
t.Errorf("Expected Command.Value to be true")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCommandFlagOverrideSub2(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Command struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
SubCommand struct {
|
||||
G bool `short:"g"`
|
||||
} `command:"sub"`
|
||||
} `command:"cmd"`
|
||||
}{}
|
||||
|
||||
assertParseSuccess(t, &opts, "cmd", "sub", "-v")
|
||||
|
||||
if opts.Value {
|
||||
t.Errorf("Expected Value to be false")
|
||||
}
|
||||
|
||||
if !opts.Command.Value {
|
||||
t.Errorf("Expected Command.Value to be true")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCommandEstimate(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Cmd1 struct {
|
||||
} `command:"remove"`
|
||||
|
||||
Cmd2 struct {
|
||||
} `command:"add"`
|
||||
}{}
|
||||
|
||||
p := NewParser(&opts, None)
|
||||
_, err := p.ParseArgs([]string{})
|
||||
|
||||
assertError(t, err, ErrCommandRequired, "Please specify one command of: add or remove")
|
||||
}
|
||||
|
||||
func TestCommandEstimate2(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Cmd1 struct {
|
||||
} `command:"remove"`
|
||||
|
||||
Cmd2 struct {
|
||||
} `command:"add"`
|
||||
}{}
|
||||
|
||||
p := NewParser(&opts, None)
|
||||
_, err := p.ParseArgs([]string{"rmive"})
|
||||
|
||||
assertError(t, err, ErrUnknownCommand, "Unknown command `rmive', did you mean `remove'?")
|
||||
}
|
||||
|
||||
type testCommand struct {
|
||||
G bool `short:"g"`
|
||||
Executed bool
|
||||
EArgs []string
|
||||
}
|
||||
|
||||
func (c *testCommand) Execute(args []string) error {
|
||||
c.Executed = true
|
||||
c.EArgs = args
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func TestCommandExecute(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Command testCommand `command:"cmd"`
|
||||
}{}
|
||||
|
||||
assertParseSuccess(t, &opts, "-v", "cmd", "-g", "a", "b")
|
||||
|
||||
if !opts.Value {
|
||||
t.Errorf("Expected Value to be true")
|
||||
}
|
||||
|
||||
if !opts.Command.Executed {
|
||||
t.Errorf("Did not execute command")
|
||||
}
|
||||
|
||||
if !opts.Command.G {
|
||||
t.Errorf("Expected Command.C to be true")
|
||||
}
|
||||
|
||||
assertStringArray(t, opts.Command.EArgs, []string{"a", "b"})
|
||||
}
|
||||
|
||||
func TestCommandClosest(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Cmd1 struct {
|
||||
} `command:"remove"`
|
||||
|
||||
Cmd2 struct {
|
||||
} `command:"add"`
|
||||
}{}
|
||||
|
||||
args := assertParseFail(t, ErrUnknownCommand, "Unknown command `addd', did you mean `add'?", &opts, "-v", "addd")
|
||||
|
||||
assertStringArray(t, args, []string{"addd"})
|
||||
}
|
||||
|
||||
func TestCommandAdd(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
}{}
|
||||
|
||||
var cmd = struct {
|
||||
G bool `short:"g"`
|
||||
}{}
|
||||
|
||||
p := NewParser(&opts, Default)
|
||||
c, err := p.AddCommand("cmd", "", "", &cmd)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
ret, err := p.ParseArgs([]string{"-v", "cmd", "-g", "rest"})
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
assertStringArray(t, ret, []string{"rest"})
|
||||
|
||||
if !opts.Value {
|
||||
t.Errorf("Expected Value to be true")
|
||||
}
|
||||
|
||||
if !cmd.G {
|
||||
t.Errorf("Expected Command.G to be true")
|
||||
}
|
||||
|
||||
if p.Command.Find("cmd") != c {
|
||||
t.Errorf("Expected to find command `cmd'")
|
||||
}
|
||||
|
||||
if p.Commands()[0] != c {
|
||||
t.Errorf("Expected command %#v, but got %#v", c, p.Commands()[0])
|
||||
}
|
||||
|
||||
if c.Options()[0].ShortName != 'g' {
|
||||
t.Errorf("Expected short name `g' but got %v", c.Options()[0].ShortName)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCommandNestedInline(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Command struct {
|
||||
G bool `short:"g"`
|
||||
|
||||
Nested struct {
|
||||
N string `long:"n"`
|
||||
} `command:"nested"`
|
||||
} `command:"cmd"`
|
||||
}{}
|
||||
|
||||
p, ret := assertParserSuccess(t, &opts, "-v", "cmd", "-g", "nested", "--n", "n", "rest")
|
||||
|
||||
assertStringArray(t, ret, []string{"rest"})
|
||||
|
||||
if !opts.Value {
|
||||
t.Errorf("Expected Value to be true")
|
||||
}
|
||||
|
||||
if !opts.Command.G {
|
||||
t.Errorf("Expected Command.G to be true")
|
||||
}
|
||||
|
||||
assertString(t, opts.Command.Nested.N, "n")
|
||||
|
||||
if c := p.Command.Find("cmd"); c == nil {
|
||||
t.Errorf("Expected to find command `cmd'")
|
||||
} else {
|
||||
if c != p.Active {
|
||||
t.Errorf("Expected `cmd' to be the active parser command")
|
||||
}
|
||||
|
||||
if nested := c.Find("nested"); nested == nil {
|
||||
t.Errorf("Expected to find command `nested'")
|
||||
} else if nested != c.Active {
|
||||
t.Errorf("Expected to find command `nested' to be the active `cmd' command")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestRequiredOnCommand(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v" required:"true"`
|
||||
|
||||
Command struct {
|
||||
G bool `short:"g"`
|
||||
} `command:"cmd"`
|
||||
}{}
|
||||
|
||||
assertParseFail(t, ErrRequired, fmt.Sprintf("the required flag `%cv' was not specified", defaultShortOptDelimiter), &opts, "cmd")
|
||||
}
|
||||
|
||||
func TestRequiredAllOnCommand(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v" required:"true"`
|
||||
Missing bool `long:"missing" required:"true"`
|
||||
|
||||
Command struct {
|
||||
G bool `short:"g"`
|
||||
} `command:"cmd"`
|
||||
}{}
|
||||
|
||||
assertParseFail(t, ErrRequired, fmt.Sprintf("the required flags `%smissing' and `%cv' were not specified", defaultLongOptDelimiter, defaultShortOptDelimiter), &opts, "cmd")
|
||||
}
|
||||
|
||||
func TestDefaultOnCommand(t *testing.T) {
|
||||
var opts = struct {
|
||||
Command struct {
|
||||
G string `short:"g" default:"value"`
|
||||
} `command:"cmd"`
|
||||
}{}
|
||||
|
||||
assertParseSuccess(t, &opts, "cmd")
|
||||
|
||||
if opts.Command.G != "value" {
|
||||
t.Errorf("Expected G to be \"value\"")
|
||||
}
|
||||
}
|
||||
|
||||
func TestSubcommandsOptional(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Cmd1 struct {
|
||||
} `command:"remove"`
|
||||
|
||||
Cmd2 struct {
|
||||
} `command:"add"`
|
||||
}{}
|
||||
|
||||
p := NewParser(&opts, None)
|
||||
p.SubcommandsOptional = true
|
||||
|
||||
_, err := p.ParseArgs([]string{"-v"})
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if !opts.Value {
|
||||
t.Errorf("Expected Value to be true")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCommandAlias(t *testing.T) {
|
||||
var opts = struct {
|
||||
Command struct {
|
||||
G string `short:"g" default:"value"`
|
||||
} `command:"cmd" alias:"cm"`
|
||||
}{}
|
||||
|
||||
assertParseSuccess(t, &opts, "cm")
|
||||
|
||||
if opts.Command.G != "value" {
|
||||
t.Errorf("Expected G to be \"value\"")
|
||||
}
|
||||
}
|
||||
|
||||
func TestSubCommandFindOptionByLongFlag(t *testing.T) {
|
||||
var opts struct {
|
||||
Testing bool `long:"testing" description:"Testing"`
|
||||
}
|
||||
|
||||
var cmd struct {
|
||||
Other bool `long:"other" description:"Other"`
|
||||
}
|
||||
|
||||
p := NewParser(&opts, Default)
|
||||
c, _ := p.AddCommand("command", "Short", "Long", &cmd)
|
||||
|
||||
opt := c.FindOptionByLongName("other")
|
||||
|
||||
if opt == nil {
|
||||
t.Errorf("Expected option, but found none")
|
||||
}
|
||||
|
||||
assertString(t, opt.LongName, "other")
|
||||
|
||||
opt = c.FindOptionByLongName("testing")
|
||||
|
||||
if opt == nil {
|
||||
t.Errorf("Expected option, but found none")
|
||||
}
|
||||
|
||||
assertString(t, opt.LongName, "testing")
|
||||
}
|
||||
|
||||
func TestSubCommandFindOptionByShortFlag(t *testing.T) {
|
||||
var opts struct {
|
||||
Testing bool `short:"t" description:"Testing"`
|
||||
}
|
||||
|
||||
var cmd struct {
|
||||
Other bool `short:"o" description:"Other"`
|
||||
}
|
||||
|
||||
p := NewParser(&opts, Default)
|
||||
c, _ := p.AddCommand("command", "Short", "Long", &cmd)
|
||||
|
||||
opt := c.FindOptionByShortName('o')
|
||||
|
||||
if opt == nil {
|
||||
t.Errorf("Expected option, but found none")
|
||||
}
|
||||
|
||||
if opt.ShortName != 'o' {
|
||||
t.Errorf("Expected 'o', but got %v", opt.ShortName)
|
||||
}
|
||||
|
||||
opt = c.FindOptionByShortName('t')
|
||||
|
||||
if opt == nil {
|
||||
t.Errorf("Expected option, but found none")
|
||||
}
|
||||
|
||||
if opt.ShortName != 't' {
|
||||
t.Errorf("Expected 'o', but got %v", opt.ShortName)
|
||||
}
|
||||
}
|
300
vendor/github.com/jessevdk/go-flags/completion.go
generated
vendored
Normal file
300
vendor/github.com/jessevdk/go-flags/completion.go
generated
vendored
Normal file
@ -0,0 +1,300 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// Completion is a type containing information of a completion.
|
||||
type Completion struct {
|
||||
// The completed item
|
||||
Item string
|
||||
|
||||
// A description of the completed item (optional)
|
||||
Description string
|
||||
}
|
||||
|
||||
type completions []Completion
|
||||
|
||||
func (c completions) Len() int {
|
||||
return len(c)
|
||||
}
|
||||
|
||||
func (c completions) Less(i, j int) bool {
|
||||
return c[i].Item < c[j].Item
|
||||
}
|
||||
|
||||
func (c completions) Swap(i, j int) {
|
||||
c[i], c[j] = c[j], c[i]
|
||||
}
|
||||
|
||||
// Completer is an interface which can be implemented by types
|
||||
// to provide custom command line argument completion.
|
||||
type Completer interface {
|
||||
// Complete receives a prefix representing a (partial) value
|
||||
// for its type and should provide a list of possible valid
|
||||
// completions.
|
||||
Complete(match string) []Completion
|
||||
}
|
||||
|
||||
type completion struct {
|
||||
parser *Parser
|
||||
}
|
||||
|
||||
// Filename is a string alias which provides filename completion.
|
||||
type Filename string
|
||||
|
||||
func completionsWithoutDescriptions(items []string) []Completion {
|
||||
ret := make([]Completion, len(items))
|
||||
|
||||
for i, v := range items {
|
||||
ret[i].Item = v
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
// Complete returns a list of existing files with the given
|
||||
// prefix.
|
||||
func (f *Filename) Complete(match string) []Completion {
|
||||
ret, _ := filepath.Glob(match + "*")
|
||||
return completionsWithoutDescriptions(ret)
|
||||
}
|
||||
|
||||
func (c *completion) skipPositional(s *parseState, n int) {
|
||||
if n >= len(s.positional) {
|
||||
s.positional = nil
|
||||
} else {
|
||||
s.positional = s.positional[n:]
|
||||
}
|
||||
}
|
||||
|
||||
func (c *completion) completeOptionNames(names map[string]*Option, prefix string, match string) []Completion {
|
||||
n := make([]Completion, 0, len(names))
|
||||
|
||||
for k, opt := range names {
|
||||
if strings.HasPrefix(k, match) {
|
||||
n = append(n, Completion{
|
||||
Item: prefix + k,
|
||||
Description: opt.Description,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return n
|
||||
}
|
||||
|
||||
func (c *completion) completeLongNames(s *parseState, prefix string, match string) []Completion {
|
||||
return c.completeOptionNames(s.lookup.longNames, prefix, match)
|
||||
}
|
||||
|
||||
func (c *completion) completeShortNames(s *parseState, prefix string, match string) []Completion {
|
||||
if len(match) != 0 {
|
||||
return []Completion{
|
||||
Completion{
|
||||
Item: prefix + match,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return c.completeOptionNames(s.lookup.shortNames, prefix, match)
|
||||
}
|
||||
|
||||
func (c *completion) completeCommands(s *parseState, match string) []Completion {
|
||||
n := make([]Completion, 0, len(s.command.commands))
|
||||
|
||||
for _, cmd := range s.command.commands {
|
||||
if cmd.data != c && strings.HasPrefix(cmd.Name, match) {
|
||||
n = append(n, Completion{
|
||||
Item: cmd.Name,
|
||||
Description: cmd.ShortDescription,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return n
|
||||
}
|
||||
|
||||
func (c *completion) completeValue(value reflect.Value, prefix string, match string) []Completion {
|
||||
i := value.Interface()
|
||||
|
||||
var ret []Completion
|
||||
|
||||
if cmp, ok := i.(Completer); ok {
|
||||
ret = cmp.Complete(match)
|
||||
} else if value.CanAddr() {
|
||||
if cmp, ok = value.Addr().Interface().(Completer); ok {
|
||||
ret = cmp.Complete(match)
|
||||
}
|
||||
}
|
||||
|
||||
for i, v := range ret {
|
||||
ret[i].Item = prefix + v.Item
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func (c *completion) completeArg(arg *Arg, prefix string, match string) []Completion {
|
||||
if arg.isRemaining() {
|
||||
// For remaining positional args (that are parsed into a slice), complete
|
||||
// based on the element type.
|
||||
return c.completeValue(reflect.New(arg.value.Type().Elem()), prefix, match)
|
||||
}
|
||||
|
||||
return c.completeValue(arg.value, prefix, match)
|
||||
}
|
||||
|
||||
func (c *completion) complete(args []string) []Completion {
|
||||
if len(args) == 0 {
|
||||
args = []string{""}
|
||||
}
|
||||
|
||||
s := &parseState{
|
||||
args: args,
|
||||
}
|
||||
|
||||
c.parser.fillParseState(s)
|
||||
|
||||
var opt *Option
|
||||
|
||||
for len(s.args) > 1 {
|
||||
arg := s.pop()
|
||||
|
||||
if (c.parser.Options&PassDoubleDash) != None && arg == "--" {
|
||||
opt = nil
|
||||
c.skipPositional(s, len(s.args)-1)
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
if argumentIsOption(arg) {
|
||||
prefix, optname, islong := stripOptionPrefix(arg)
|
||||
optname, _, argument := splitOption(prefix, optname, islong)
|
||||
|
||||
if argument == nil {
|
||||
var o *Option
|
||||
canarg := true
|
||||
|
||||
if islong {
|
||||
o = s.lookup.longNames[optname]
|
||||
} else {
|
||||
for i, r := range optname {
|
||||
sname := string(r)
|
||||
o = s.lookup.shortNames[sname]
|
||||
|
||||
if o == nil {
|
||||
break
|
||||
}
|
||||
|
||||
if i == 0 && o.canArgument() && len(optname) != len(sname) {
|
||||
canarg = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if o == nil && (c.parser.Options&PassAfterNonOption) != None {
|
||||
opt = nil
|
||||
c.skipPositional(s, len(s.args)-1)
|
||||
|
||||
break
|
||||
} else if o != nil && o.canArgument() && !o.OptionalArgument && canarg {
|
||||
if len(s.args) > 1 {
|
||||
s.pop()
|
||||
} else {
|
||||
opt = o
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if len(s.positional) > 0 {
|
||||
if !s.positional[0].isRemaining() {
|
||||
// Don't advance beyond a remaining positional arg (because
|
||||
// it consumes all subsequent args).
|
||||
s.positional = s.positional[1:]
|
||||
}
|
||||
} else if cmd, ok := s.lookup.commands[arg]; ok {
|
||||
cmd.fillParseState(s)
|
||||
}
|
||||
|
||||
opt = nil
|
||||
}
|
||||
}
|
||||
|
||||
lastarg := s.args[len(s.args)-1]
|
||||
var ret []Completion
|
||||
|
||||
if opt != nil {
|
||||
// Completion for the argument of 'opt'
|
||||
ret = c.completeValue(opt.value, "", lastarg)
|
||||
} else if argumentStartsOption(lastarg) {
|
||||
// Complete the option
|
||||
prefix, optname, islong := stripOptionPrefix(lastarg)
|
||||
optname, split, argument := splitOption(prefix, optname, islong)
|
||||
|
||||
if argument == nil && !islong {
|
||||
rname, n := utf8.DecodeRuneInString(optname)
|
||||
sname := string(rname)
|
||||
|
||||
if opt := s.lookup.shortNames[sname]; opt != nil && opt.canArgument() {
|
||||
ret = c.completeValue(opt.value, prefix+sname, optname[n:])
|
||||
} else {
|
||||
ret = c.completeShortNames(s, prefix, optname)
|
||||
}
|
||||
} else if argument != nil {
|
||||
if islong {
|
||||
opt = s.lookup.longNames[optname]
|
||||
} else {
|
||||
opt = s.lookup.shortNames[optname]
|
||||
}
|
||||
|
||||
if opt != nil {
|
||||
ret = c.completeValue(opt.value, prefix+optname+split, *argument)
|
||||
}
|
||||
} else if islong {
|
||||
ret = c.completeLongNames(s, prefix, optname)
|
||||
} else {
|
||||
ret = c.completeShortNames(s, prefix, optname)
|
||||
}
|
||||
} else if len(s.positional) > 0 {
|
||||
// Complete for positional argument
|
||||
ret = c.completeArg(s.positional[0], "", lastarg)
|
||||
} else if len(s.command.commands) > 0 {
|
||||
// Complete for command
|
||||
ret = c.completeCommands(s, lastarg)
|
||||
}
|
||||
|
||||
sort.Sort(completions(ret))
|
||||
return ret
|
||||
}
|
||||
|
||||
func (c *completion) print(items []Completion, showDescriptions bool) {
|
||||
if showDescriptions && len(items) > 1 {
|
||||
maxl := 0
|
||||
|
||||
for _, v := range items {
|
||||
if len(v.Item) > maxl {
|
||||
maxl = len(v.Item)
|
||||
}
|
||||
}
|
||||
|
||||
for _, v := range items {
|
||||
fmt.Printf("%s", v.Item)
|
||||
|
||||
if len(v.Description) > 0 {
|
||||
fmt.Printf("%s # %s", strings.Repeat(" ", maxl-len(v.Item)), v.Description)
|
||||
}
|
||||
|
||||
fmt.Printf("\n")
|
||||
}
|
||||
} else {
|
||||
for _, v := range items {
|
||||
fmt.Println(v.Item)
|
||||
}
|
||||
}
|
||||
}
|
294
vendor/github.com/jessevdk/go-flags/completion_test.go
generated
vendored
Normal file
294
vendor/github.com/jessevdk/go-flags/completion_test.go
generated
vendored
Normal file
@ -0,0 +1,294 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type TestComplete struct {
|
||||
}
|
||||
|
||||
func (t *TestComplete) Complete(match string) []Completion {
|
||||
options := []string{
|
||||
"hello world",
|
||||
"hello universe",
|
||||
"hello multiverse",
|
||||
}
|
||||
|
||||
ret := make([]Completion, 0, len(options))
|
||||
|
||||
for _, o := range options {
|
||||
if strings.HasPrefix(o, match) {
|
||||
ret = append(ret, Completion{
|
||||
Item: o,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
var completionTestOptions struct {
|
||||
Verbose bool `short:"v" long:"verbose" description:"Verbose messages"`
|
||||
Debug bool `short:"d" long:"debug" description:"Enable debug"`
|
||||
Version bool `long:"version" description:"Show version"`
|
||||
Required bool `long:"required" required:"true" description:"This is required"`
|
||||
|
||||
AddCommand struct {
|
||||
Positional struct {
|
||||
Filename Filename
|
||||
} `positional-args:"yes"`
|
||||
} `command:"add" description:"add an item"`
|
||||
|
||||
AddMultiCommand struct {
|
||||
Positional struct {
|
||||
Filename []Filename
|
||||
} `positional-args:"yes"`
|
||||
} `command:"add-multi" description:"add multiple items"`
|
||||
|
||||
RemoveCommand struct {
|
||||
Other bool `short:"o"`
|
||||
File Filename `short:"f" long:"filename"`
|
||||
} `command:"rm" description:"remove an item"`
|
||||
|
||||
RenameCommand struct {
|
||||
Completed TestComplete `short:"c" long:"completed"`
|
||||
} `command:"rename" description:"rename an item"`
|
||||
}
|
||||
|
||||
type completionTest struct {
|
||||
Args []string
|
||||
Completed []string
|
||||
ShowDescriptions bool
|
||||
}
|
||||
|
||||
var completionTests []completionTest
|
||||
|
||||
func init() {
|
||||
_, sourcefile, _, _ := runtime.Caller(0)
|
||||
completionTestSourcedir := filepath.Join(filepath.SplitList(path.Dir(sourcefile))...)
|
||||
|
||||
completionTestFilename := []string{filepath.Join(completionTestSourcedir, "completion.go"), filepath.Join(completionTestSourcedir, "completion_test.go")}
|
||||
|
||||
completionTests = []completionTest{
|
||||
{
|
||||
// Short names
|
||||
[]string{"-"},
|
||||
[]string{"-d", "-v"},
|
||||
false,
|
||||
},
|
||||
|
||||
{
|
||||
// Short names concatenated
|
||||
[]string{"-dv"},
|
||||
[]string{"-dv"},
|
||||
false,
|
||||
},
|
||||
|
||||
{
|
||||
// Long names
|
||||
[]string{"--"},
|
||||
[]string{"--debug", "--required", "--verbose", "--version"},
|
||||
false,
|
||||
},
|
||||
|
||||
{
|
||||
// Long names with descriptions
|
||||
[]string{"--"},
|
||||
[]string{
|
||||
"--debug # Enable debug",
|
||||
"--required # This is required",
|
||||
"--verbose # Verbose messages",
|
||||
"--version # Show version",
|
||||
},
|
||||
true,
|
||||
},
|
||||
|
||||
{
|
||||
// Long names partial
|
||||
[]string{"--ver"},
|
||||
[]string{"--verbose", "--version"},
|
||||
false,
|
||||
},
|
||||
|
||||
{
|
||||
// Commands
|
||||
[]string{""},
|
||||
[]string{"add", "add-multi", "rename", "rm"},
|
||||
false,
|
||||
},
|
||||
|
||||
{
|
||||
// Commands with descriptions
|
||||
[]string{""},
|
||||
[]string{
|
||||
"add # add an item",
|
||||
"add-multi # add multiple items",
|
||||
"rename # rename an item",
|
||||
"rm # remove an item",
|
||||
},
|
||||
true,
|
||||
},
|
||||
|
||||
{
|
||||
// Commands partial
|
||||
[]string{"r"},
|
||||
[]string{"rename", "rm"},
|
||||
false,
|
||||
},
|
||||
|
||||
{
|
||||
// Positional filename
|
||||
[]string{"add", filepath.Join(completionTestSourcedir, "completion")},
|
||||
completionTestFilename,
|
||||
false,
|
||||
},
|
||||
|
||||
{
|
||||
// Multiple positional filename (1 arg)
|
||||
[]string{"add-multi", filepath.Join(completionTestSourcedir, "completion")},
|
||||
completionTestFilename,
|
||||
false,
|
||||
},
|
||||
{
|
||||
// Multiple positional filename (2 args)
|
||||
[]string{"add-multi", filepath.Join(completionTestSourcedir, "completion.go"), filepath.Join(completionTestSourcedir, "completion")},
|
||||
completionTestFilename,
|
||||
false,
|
||||
},
|
||||
{
|
||||
// Multiple positional filename (3 args)
|
||||
[]string{"add-multi", filepath.Join(completionTestSourcedir, "completion.go"), filepath.Join(completionTestSourcedir, "completion.go"), filepath.Join(completionTestSourcedir, "completion")},
|
||||
completionTestFilename,
|
||||
false,
|
||||
},
|
||||
|
||||
{
|
||||
// Flag filename
|
||||
[]string{"rm", "-f", path.Join(completionTestSourcedir, "completion")},
|
||||
completionTestFilename,
|
||||
false,
|
||||
},
|
||||
|
||||
{
|
||||
// Flag short concat last filename
|
||||
[]string{"rm", "-of", path.Join(completionTestSourcedir, "completion")},
|
||||
completionTestFilename,
|
||||
false,
|
||||
},
|
||||
|
||||
{
|
||||
// Flag concat filename
|
||||
[]string{"rm", "-f" + path.Join(completionTestSourcedir, "completion")},
|
||||
[]string{"-f" + completionTestFilename[0], "-f" + completionTestFilename[1]},
|
||||
false,
|
||||
},
|
||||
|
||||
{
|
||||
// Flag equal concat filename
|
||||
[]string{"rm", "-f=" + path.Join(completionTestSourcedir, "completion")},
|
||||
[]string{"-f=" + completionTestFilename[0], "-f=" + completionTestFilename[1]},
|
||||
false,
|
||||
},
|
||||
|
||||
{
|
||||
// Flag concat long filename
|
||||
[]string{"rm", "--filename=" + path.Join(completionTestSourcedir, "completion")},
|
||||
[]string{"--filename=" + completionTestFilename[0], "--filename=" + completionTestFilename[1]},
|
||||
false,
|
||||
},
|
||||
|
||||
{
|
||||
// Flag long filename
|
||||
[]string{"rm", "--filename", path.Join(completionTestSourcedir, "completion")},
|
||||
completionTestFilename,
|
||||
false,
|
||||
},
|
||||
|
||||
{
|
||||
// Custom completed
|
||||
[]string{"rename", "-c", "hello un"},
|
||||
[]string{"hello universe"},
|
||||
false,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func TestCompletion(t *testing.T) {
|
||||
p := NewParser(&completionTestOptions, Default)
|
||||
c := &completion{parser: p}
|
||||
|
||||
for _, test := range completionTests {
|
||||
if test.ShowDescriptions {
|
||||
continue
|
||||
}
|
||||
|
||||
ret := c.complete(test.Args)
|
||||
items := make([]string, len(ret))
|
||||
|
||||
for i, v := range ret {
|
||||
items[i] = v.Item
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(items, test.Completed) {
|
||||
t.Errorf("Args: %#v, %#v\n Expected: %#v\n Got: %#v", test.Args, test.ShowDescriptions, test.Completed, items)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParserCompletion(t *testing.T) {
|
||||
for _, test := range completionTests {
|
||||
if test.ShowDescriptions {
|
||||
os.Setenv("GO_FLAGS_COMPLETION", "verbose")
|
||||
} else {
|
||||
os.Setenv("GO_FLAGS_COMPLETION", "1")
|
||||
}
|
||||
|
||||
tmp := os.Stdout
|
||||
|
||||
r, w, _ := os.Pipe()
|
||||
os.Stdout = w
|
||||
|
||||
out := make(chan string)
|
||||
|
||||
go func() {
|
||||
var buf bytes.Buffer
|
||||
|
||||
io.Copy(&buf, r)
|
||||
|
||||
out <- buf.String()
|
||||
}()
|
||||
|
||||
p := NewParser(&completionTestOptions, None)
|
||||
|
||||
p.CompletionHandler = func(items []Completion) {
|
||||
comp := &completion{parser: p}
|
||||
comp.print(items, test.ShowDescriptions)
|
||||
}
|
||||
|
||||
_, err := p.ParseArgs(test.Args)
|
||||
|
||||
w.Close()
|
||||
|
||||
os.Stdout = tmp
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %s", err)
|
||||
}
|
||||
|
||||
got := strings.Split(strings.Trim(<-out, "\n"), "\n")
|
||||
|
||||
if !reflect.DeepEqual(got, test.Completed) {
|
||||
t.Errorf("Expected: %#v\nGot: %#v", test.Completed, got)
|
||||
}
|
||||
}
|
||||
|
||||
os.Setenv("GO_FLAGS_COMPLETION", "")
|
||||
}
|
341
vendor/github.com/jessevdk/go-flags/convert.go
generated
vendored
Normal file
341
vendor/github.com/jessevdk/go-flags/convert.go
generated
vendored
Normal file
@ -0,0 +1,341 @@
|
||||
// Copyright 2012 Jesse van den Kieboom. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package flags
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Marshaler is the interface implemented by types that can marshal themselves
|
||||
// to a string representation of the flag.
|
||||
type Marshaler interface {
|
||||
// MarshalFlag marshals a flag value to its string representation.
|
||||
MarshalFlag() (string, error)
|
||||
}
|
||||
|
||||
// Unmarshaler is the interface implemented by types that can unmarshal a flag
|
||||
// argument to themselves. The provided value is directly passed from the
|
||||
// command line.
|
||||
type Unmarshaler interface {
|
||||
// UnmarshalFlag unmarshals a string value representation to the flag
|
||||
// value (which therefore needs to be a pointer receiver).
|
||||
UnmarshalFlag(value string) error
|
||||
}
|
||||
|
||||
func getBase(options multiTag, base int) (int, error) {
|
||||
sbase := options.Get("base")
|
||||
|
||||
var err error
|
||||
var ivbase int64
|
||||
|
||||
if sbase != "" {
|
||||
ivbase, err = strconv.ParseInt(sbase, 10, 32)
|
||||
base = int(ivbase)
|
||||
}
|
||||
|
||||
return base, err
|
||||
}
|
||||
|
||||
func convertMarshal(val reflect.Value) (bool, string, error) {
|
||||
// Check first for the Marshaler interface
|
||||
if val.Type().NumMethod() > 0 && val.CanInterface() {
|
||||
if marshaler, ok := val.Interface().(Marshaler); ok {
|
||||
ret, err := marshaler.MarshalFlag()
|
||||
return true, ret, err
|
||||
}
|
||||
}
|
||||
|
||||
return false, "", nil
|
||||
}
|
||||
|
||||
func convertToString(val reflect.Value, options multiTag) (string, error) {
|
||||
if ok, ret, err := convertMarshal(val); ok {
|
||||
return ret, err
|
||||
}
|
||||
|
||||
tp := val.Type()
|
||||
|
||||
// Support for time.Duration
|
||||
if tp == reflect.TypeOf((*time.Duration)(nil)).Elem() {
|
||||
stringer := val.Interface().(fmt.Stringer)
|
||||
return stringer.String(), nil
|
||||
}
|
||||
|
||||
switch tp.Kind() {
|
||||
case reflect.String:
|
||||
return val.String(), nil
|
||||
case reflect.Bool:
|
||||
if val.Bool() {
|
||||
return "true", nil
|
||||
}
|
||||
|
||||
return "false", nil
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
base, err := getBase(options, 10)
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return strconv.FormatInt(val.Int(), base), nil
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
base, err := getBase(options, 10)
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return strconv.FormatUint(val.Uint(), base), nil
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return strconv.FormatFloat(val.Float(), 'g', -1, tp.Bits()), nil
|
||||
case reflect.Slice:
|
||||
if val.Len() == 0 {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
ret := "["
|
||||
|
||||
for i := 0; i < val.Len(); i++ {
|
||||
if i != 0 {
|
||||
ret += ", "
|
||||
}
|
||||
|
||||
item, err := convertToString(val.Index(i), options)
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
ret += item
|
||||
}
|
||||
|
||||
return ret + "]", nil
|
||||
case reflect.Map:
|
||||
ret := "{"
|
||||
|
||||
for i, key := range val.MapKeys() {
|
||||
if i != 0 {
|
||||
ret += ", "
|
||||
}
|
||||
|
||||
keyitem, err := convertToString(key, options)
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
item, err := convertToString(val.MapIndex(key), options)
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
ret += keyitem + ":" + item
|
||||
}
|
||||
|
||||
return ret + "}", nil
|
||||
case reflect.Ptr:
|
||||
return convertToString(reflect.Indirect(val), options)
|
||||
case reflect.Interface:
|
||||
if !val.IsNil() {
|
||||
return convertToString(val.Elem(), options)
|
||||
}
|
||||
}
|
||||
|
||||
return "", nil
|
||||
}
|
||||
|
||||
func convertUnmarshal(val string, retval reflect.Value) (bool, error) {
|
||||
if retval.Type().NumMethod() > 0 && retval.CanInterface() {
|
||||
if unmarshaler, ok := retval.Interface().(Unmarshaler); ok {
|
||||
return true, unmarshaler.UnmarshalFlag(val)
|
||||
}
|
||||
}
|
||||
|
||||
if retval.Type().Kind() != reflect.Ptr && retval.CanAddr() {
|
||||
return convertUnmarshal(val, retval.Addr())
|
||||
}
|
||||
|
||||
if retval.Type().Kind() == reflect.Interface && !retval.IsNil() {
|
||||
return convertUnmarshal(val, retval.Elem())
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func convert(val string, retval reflect.Value, options multiTag) error {
|
||||
if ok, err := convertUnmarshal(val, retval); ok {
|
||||
return err
|
||||
}
|
||||
|
||||
tp := retval.Type()
|
||||
|
||||
// Support for time.Duration
|
||||
if tp == reflect.TypeOf((*time.Duration)(nil)).Elem() {
|
||||
parsed, err := time.ParseDuration(val)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
retval.SetInt(int64(parsed))
|
||||
return nil
|
||||
}
|
||||
|
||||
switch tp.Kind() {
|
||||
case reflect.String:
|
||||
retval.SetString(val)
|
||||
case reflect.Bool:
|
||||
if val == "" {
|
||||
retval.SetBool(true)
|
||||
} else {
|
||||
b, err := strconv.ParseBool(val)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
retval.SetBool(b)
|
||||
}
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
base, err := getBase(options, 10)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
parsed, err := strconv.ParseInt(val, base, tp.Bits())
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
retval.SetInt(parsed)
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
base, err := getBase(options, 10)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
parsed, err := strconv.ParseUint(val, base, tp.Bits())
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
retval.SetUint(parsed)
|
||||
case reflect.Float32, reflect.Float64:
|
||||
parsed, err := strconv.ParseFloat(val, tp.Bits())
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
retval.SetFloat(parsed)
|
||||
case reflect.Slice:
|
||||
elemtp := tp.Elem()
|
||||
|
||||
elemvalptr := reflect.New(elemtp)
|
||||
elemval := reflect.Indirect(elemvalptr)
|
||||
|
||||
if err := convert(val, elemval, options); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
retval.Set(reflect.Append(retval, elemval))
|
||||
case reflect.Map:
|
||||
parts := strings.SplitN(val, ":", 2)
|
||||
|
||||
key := parts[0]
|
||||
var value string
|
||||
|
||||
if len(parts) == 2 {
|
||||
value = parts[1]
|
||||
}
|
||||
|
||||
keytp := tp.Key()
|
||||
keyval := reflect.New(keytp)
|
||||
|
||||
if err := convert(key, keyval, options); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
valuetp := tp.Elem()
|
||||
valueval := reflect.New(valuetp)
|
||||
|
||||
if err := convert(value, valueval, options); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if retval.IsNil() {
|
||||
retval.Set(reflect.MakeMap(tp))
|
||||
}
|
||||
|
||||
retval.SetMapIndex(reflect.Indirect(keyval), reflect.Indirect(valueval))
|
||||
case reflect.Ptr:
|
||||
if retval.IsNil() {
|
||||
retval.Set(reflect.New(retval.Type().Elem()))
|
||||
}
|
||||
|
||||
return convert(val, reflect.Indirect(retval), options)
|
||||
case reflect.Interface:
|
||||
if !retval.IsNil() {
|
||||
return convert(val, retval.Elem(), options)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func isPrint(s string) bool {
|
||||
for _, c := range s {
|
||||
if !strconv.IsPrint(c) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func quoteIfNeeded(s string) string {
|
||||
if !isPrint(s) {
|
||||
return strconv.Quote(s)
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
func quoteIfNeededV(s []string) []string {
|
||||
ret := make([]string, len(s))
|
||||
|
||||
for i, v := range s {
|
||||
ret[i] = quoteIfNeeded(v)
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func quoteV(s []string) []string {
|
||||
ret := make([]string, len(s))
|
||||
|
||||
for i, v := range s {
|
||||
ret[i] = strconv.Quote(v)
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func unquoteIfPossible(s string) (string, error) {
|
||||
if len(s) == 0 || s[0] != '"' {
|
||||
return s, nil
|
||||
}
|
||||
|
||||
return strconv.Unquote(s)
|
||||
}
|
159
vendor/github.com/jessevdk/go-flags/convert_test.go
generated
vendored
Normal file
159
vendor/github.com/jessevdk/go-flags/convert_test.go
generated
vendored
Normal file
@ -0,0 +1,159 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func expectConvert(t *testing.T, o *Option, expected string) {
|
||||
s, err := convertToString(o.value, o.tag)
|
||||
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
assertString(t, s, expected)
|
||||
}
|
||||
|
||||
func TestConvertToString(t *testing.T) {
|
||||
d, _ := time.ParseDuration("1h2m4s")
|
||||
|
||||
var opts = struct {
|
||||
String string `long:"string"`
|
||||
|
||||
Int int `long:"int"`
|
||||
Int8 int8 `long:"int8"`
|
||||
Int16 int16 `long:"int16"`
|
||||
Int32 int32 `long:"int32"`
|
||||
Int64 int64 `long:"int64"`
|
||||
|
||||
Uint uint `long:"uint"`
|
||||
Uint8 uint8 `long:"uint8"`
|
||||
Uint16 uint16 `long:"uint16"`
|
||||
Uint32 uint32 `long:"uint32"`
|
||||
Uint64 uint64 `long:"uint64"`
|
||||
|
||||
Float32 float32 `long:"float32"`
|
||||
Float64 float64 `long:"float64"`
|
||||
|
||||
Duration time.Duration `long:"duration"`
|
||||
|
||||
Bool bool `long:"bool"`
|
||||
|
||||
IntSlice []int `long:"int-slice"`
|
||||
IntFloatMap map[int]float64 `long:"int-float-map"`
|
||||
|
||||
PtrBool *bool `long:"ptr-bool"`
|
||||
Interface interface{} `long:"interface"`
|
||||
|
||||
Int32Base int32 `long:"int32-base" base:"16"`
|
||||
Uint32Base uint32 `long:"uint32-base" base:"16"`
|
||||
}{
|
||||
"string",
|
||||
|
||||
-2,
|
||||
-1,
|
||||
0,
|
||||
1,
|
||||
2,
|
||||
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
|
||||
1.2,
|
||||
-3.4,
|
||||
|
||||
d,
|
||||
true,
|
||||
|
||||
[]int{-3, 4, -2},
|
||||
map[int]float64{-2: 4.5},
|
||||
|
||||
new(bool),
|
||||
float32(5.2),
|
||||
|
||||
-5823,
|
||||
4232,
|
||||
}
|
||||
|
||||
p := NewNamedParser("test", Default)
|
||||
grp, _ := p.AddGroup("test group", "", &opts)
|
||||
|
||||
expects := []string{
|
||||
"string",
|
||||
"-2",
|
||||
"-1",
|
||||
"0",
|
||||
"1",
|
||||
"2",
|
||||
|
||||
"1",
|
||||
"2",
|
||||
"3",
|
||||
"4",
|
||||
"5",
|
||||
|
||||
"1.2",
|
||||
"-3.4",
|
||||
|
||||
"1h2m4s",
|
||||
"true",
|
||||
|
||||
"[-3, 4, -2]",
|
||||
"{-2:4.5}",
|
||||
|
||||
"false",
|
||||
"5.2",
|
||||
|
||||
"-16bf",
|
||||
"1088",
|
||||
}
|
||||
|
||||
for i, v := range grp.Options() {
|
||||
expectConvert(t, v, expects[i])
|
||||
}
|
||||
}
|
||||
|
||||
func TestConvertToStringInvalidIntBase(t *testing.T) {
|
||||
var opts = struct {
|
||||
Int int `long:"int" base:"no"`
|
||||
}{
|
||||
2,
|
||||
}
|
||||
|
||||
p := NewNamedParser("test", Default)
|
||||
grp, _ := p.AddGroup("test group", "", &opts)
|
||||
o := grp.Options()[0]
|
||||
|
||||
_, err := convertToString(o.value, o.tag)
|
||||
|
||||
if err != nil {
|
||||
err = newErrorf(ErrMarshal, "%v", err)
|
||||
}
|
||||
|
||||
assertError(t, err, ErrMarshal, "strconv.ParseInt: parsing \"no\": invalid syntax")
|
||||
}
|
||||
|
||||
func TestConvertToStringInvalidUintBase(t *testing.T) {
|
||||
var opts = struct {
|
||||
Uint uint `long:"uint" base:"no"`
|
||||
}{
|
||||
2,
|
||||
}
|
||||
|
||||
p := NewNamedParser("test", Default)
|
||||
grp, _ := p.AddGroup("test group", "", &opts)
|
||||
o := grp.Options()[0]
|
||||
|
||||
_, err := convertToString(o.value, o.tag)
|
||||
|
||||
if err != nil {
|
||||
err = newErrorf(ErrMarshal, "%v", err)
|
||||
}
|
||||
|
||||
assertError(t, err, ErrMarshal, "strconv.ParseInt: parsing \"no\": invalid syntax")
|
||||
}
|
134
vendor/github.com/jessevdk/go-flags/error.go
generated
vendored
Normal file
134
vendor/github.com/jessevdk/go-flags/error.go
generated
vendored
Normal file
@ -0,0 +1,134 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// ErrorType represents the type of error.
|
||||
type ErrorType uint
|
||||
|
||||
const (
|
||||
// ErrUnknown indicates a generic error.
|
||||
ErrUnknown ErrorType = iota
|
||||
|
||||
// ErrExpectedArgument indicates that an argument was expected.
|
||||
ErrExpectedArgument
|
||||
|
||||
// ErrUnknownFlag indicates an unknown flag.
|
||||
ErrUnknownFlag
|
||||
|
||||
// ErrUnknownGroup indicates an unknown group.
|
||||
ErrUnknownGroup
|
||||
|
||||
// ErrMarshal indicates a marshalling error while converting values.
|
||||
ErrMarshal
|
||||
|
||||
// ErrHelp indicates that the built-in help was shown (the error
|
||||
// contains the help message).
|
||||
ErrHelp
|
||||
|
||||
// ErrNoArgumentForBool indicates that an argument was given for a
|
||||
// boolean flag (which don't not take any arguments).
|
||||
ErrNoArgumentForBool
|
||||
|
||||
// ErrRequired indicates that a required flag was not provided.
|
||||
ErrRequired
|
||||
|
||||
// ErrShortNameTooLong indicates that a short flag name was specified,
|
||||
// longer than one character.
|
||||
ErrShortNameTooLong
|
||||
|
||||
// ErrDuplicatedFlag indicates that a short or long flag has been
|
||||
// defined more than once
|
||||
ErrDuplicatedFlag
|
||||
|
||||
// ErrTag indicates an error while parsing flag tags.
|
||||
ErrTag
|
||||
|
||||
// ErrCommandRequired indicates that a command was required but not
|
||||
// specified
|
||||
ErrCommandRequired
|
||||
|
||||
// ErrUnknownCommand indicates that an unknown command was specified.
|
||||
ErrUnknownCommand
|
||||
|
||||
// ErrInvalidChoice indicates an invalid option value which only allows
|
||||
// a certain number of choices.
|
||||
ErrInvalidChoice
|
||||
|
||||
// ErrInvalidTag indicates an invalid tag or invalid use of an existing tag
|
||||
ErrInvalidTag
|
||||
)
|
||||
|
||||
func (e ErrorType) String() string {
|
||||
switch e {
|
||||
case ErrUnknown:
|
||||
return "unknown"
|
||||
case ErrExpectedArgument:
|
||||
return "expected argument"
|
||||
case ErrUnknownFlag:
|
||||
return "unknown flag"
|
||||
case ErrUnknownGroup:
|
||||
return "unknown group"
|
||||
case ErrMarshal:
|
||||
return "marshal"
|
||||
case ErrHelp:
|
||||
return "help"
|
||||
case ErrNoArgumentForBool:
|
||||
return "no argument for bool"
|
||||
case ErrRequired:
|
||||
return "required"
|
||||
case ErrShortNameTooLong:
|
||||
return "short name too long"
|
||||
case ErrDuplicatedFlag:
|
||||
return "duplicated flag"
|
||||
case ErrTag:
|
||||
return "tag"
|
||||
case ErrCommandRequired:
|
||||
return "command required"
|
||||
case ErrUnknownCommand:
|
||||
return "unknown command"
|
||||
case ErrInvalidChoice:
|
||||
return "invalid choice"
|
||||
case ErrInvalidTag:
|
||||
return "invalid tag"
|
||||
}
|
||||
|
||||
return "unrecognized error type"
|
||||
}
|
||||
|
||||
// Error represents a parser error. The error returned from Parse is of this
|
||||
// type. The error contains both a Type and Message.
|
||||
type Error struct {
|
||||
// The type of error
|
||||
Type ErrorType
|
||||
|
||||
// The error message
|
||||
Message string
|
||||
}
|
||||
|
||||
// Error returns the error's message
|
||||
func (e *Error) Error() string {
|
||||
return e.Message
|
||||
}
|
||||
|
||||
func newError(tp ErrorType, message string) *Error {
|
||||
return &Error{
|
||||
Type: tp,
|
||||
Message: message,
|
||||
}
|
||||
}
|
||||
|
||||
func newErrorf(tp ErrorType, format string, args ...interface{}) *Error {
|
||||
return newError(tp, fmt.Sprintf(format, args...))
|
||||
}
|
||||
|
||||
func wrapError(err error) *Error {
|
||||
ret, ok := err.(*Error)
|
||||
|
||||
if !ok {
|
||||
return newError(ErrUnknown, err.Error())
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
110
vendor/github.com/jessevdk/go-flags/example_test.go
generated
vendored
Normal file
110
vendor/github.com/jessevdk/go-flags/example_test.go
generated
vendored
Normal file
@ -0,0 +1,110 @@
|
||||
// Example of use of the flags package.
|
||||
package flags
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os/exec"
|
||||
)
|
||||
|
||||
func Example() {
|
||||
var opts struct {
|
||||
// Slice of bool will append 'true' each time the option
|
||||
// is encountered (can be set multiple times, like -vvv)
|
||||
Verbose []bool `short:"v" long:"verbose" description:"Show verbose debug information"`
|
||||
|
||||
// Example of automatic marshalling to desired type (uint)
|
||||
Offset uint `long:"offset" description:"Offset"`
|
||||
|
||||
// Example of a callback, called each time the option is found.
|
||||
Call func(string) `short:"c" description:"Call phone number"`
|
||||
|
||||
// Example of a required flag
|
||||
Name string `short:"n" long:"name" description:"A name" required:"true"`
|
||||
|
||||
// Example of a value name
|
||||
File string `short:"f" long:"file" description:"A file" value-name:"FILE"`
|
||||
|
||||
// Example of a pointer
|
||||
Ptr *int `short:"p" description:"A pointer to an integer"`
|
||||
|
||||
// Example of a slice of strings
|
||||
StringSlice []string `short:"s" description:"A slice of strings"`
|
||||
|
||||
// Example of a slice of pointers
|
||||
PtrSlice []*string `long:"ptrslice" description:"A slice of pointers to string"`
|
||||
|
||||
// Example of a map
|
||||
IntMap map[string]int `long:"intmap" description:"A map from string to int"`
|
||||
|
||||
// Example of a filename (useful for completion)
|
||||
Filename Filename `long:"filename" description:"A filename"`
|
||||
|
||||
// Example of positional arguments
|
||||
Args struct {
|
||||
Id string
|
||||
Num int
|
||||
Rest []string
|
||||
} `positional-args:"yes" required:"yes"`
|
||||
}
|
||||
|
||||
// Callback which will invoke callto:<argument> to call a number.
|
||||
// Note that this works just on OS X (and probably only with
|
||||
// Skype) but it shows the idea.
|
||||
opts.Call = func(num string) {
|
||||
cmd := exec.Command("open", "callto:"+num)
|
||||
cmd.Start()
|
||||
cmd.Process.Release()
|
||||
}
|
||||
|
||||
// Make some fake arguments to parse.
|
||||
args := []string{
|
||||
"-vv",
|
||||
"--offset=5",
|
||||
"-n", "Me",
|
||||
"-p", "3",
|
||||
"-s", "hello",
|
||||
"-s", "world",
|
||||
"--ptrslice", "hello",
|
||||
"--ptrslice", "world",
|
||||
"--intmap", "a:1",
|
||||
"--intmap", "b:5",
|
||||
"--filename", "hello.go",
|
||||
"id",
|
||||
"10",
|
||||
"remaining1",
|
||||
"remaining2",
|
||||
}
|
||||
|
||||
// Parse flags from `args'. Note that here we use flags.ParseArgs for
|
||||
// the sake of making a working example. Normally, you would simply use
|
||||
// flags.Parse(&opts) which uses os.Args
|
||||
_, err := ParseArgs(&opts, args)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Printf("Verbosity: %v\n", opts.Verbose)
|
||||
fmt.Printf("Offset: %d\n", opts.Offset)
|
||||
fmt.Printf("Name: %s\n", opts.Name)
|
||||
fmt.Printf("Ptr: %d\n", *opts.Ptr)
|
||||
fmt.Printf("StringSlice: %v\n", opts.StringSlice)
|
||||
fmt.Printf("PtrSlice: [%v %v]\n", *opts.PtrSlice[0], *opts.PtrSlice[1])
|
||||
fmt.Printf("IntMap: [a:%v b:%v]\n", opts.IntMap["a"], opts.IntMap["b"])
|
||||
fmt.Printf("Filename: %v\n", opts.Filename)
|
||||
fmt.Printf("Args.Id: %s\n", opts.Args.Id)
|
||||
fmt.Printf("Args.Num: %d\n", opts.Args.Num)
|
||||
fmt.Printf("Args.Rest: %v\n", opts.Args.Rest)
|
||||
|
||||
// Output: Verbosity: [true true]
|
||||
// Offset: 5
|
||||
// Name: Me
|
||||
// Ptr: 3
|
||||
// StringSlice: [hello world]
|
||||
// PtrSlice: [hello world]
|
||||
// IntMap: [a:1 b:5]
|
||||
// Filename: hello.go
|
||||
// Args.Id: id
|
||||
// Args.Num: 10
|
||||
// Args.Rest: [remaining1 remaining2]
|
||||
}
|
256
vendor/github.com/jessevdk/go-flags/flags.go
generated
vendored
Normal file
256
vendor/github.com/jessevdk/go-flags/flags.go
generated
vendored
Normal file
@ -0,0 +1,256 @@
|
||||
// Copyright 2012 Jesse van den Kieboom. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
Package flags provides an extensive command line option parser.
|
||||
The flags package is similar in functionality to the go built-in flag package
|
||||
but provides more options and uses reflection to provide a convenient and
|
||||
succinct way of specifying command line options.
|
||||
|
||||
|
||||
Supported features
|
||||
|
||||
The following features are supported in go-flags:
|
||||
|
||||
Options with short names (-v)
|
||||
Options with long names (--verbose)
|
||||
Options with and without arguments (bool v.s. other type)
|
||||
Options with optional arguments and default values
|
||||
Option default values from ENVIRONMENT_VARIABLES, including slice and map values
|
||||
Multiple option groups each containing a set of options
|
||||
Generate and print well-formatted help message
|
||||
Passing remaining command line arguments after -- (optional)
|
||||
Ignoring unknown command line options (optional)
|
||||
Supports -I/usr/include -I=/usr/include -I /usr/include option argument specification
|
||||
Supports multiple short options -aux
|
||||
Supports all primitive go types (string, int{8..64}, uint{8..64}, float)
|
||||
Supports same option multiple times (can store in slice or last option counts)
|
||||
Supports maps
|
||||
Supports function callbacks
|
||||
Supports namespaces for (nested) option groups
|
||||
|
||||
Additional features specific to Windows:
|
||||
Options with short names (/v)
|
||||
Options with long names (/verbose)
|
||||
Windows-style options with arguments use a colon as the delimiter
|
||||
Modify generated help message with Windows-style / options
|
||||
|
||||
|
||||
Basic usage
|
||||
|
||||
The flags package uses structs, reflection and struct field tags
|
||||
to allow users to specify command line options. This results in very simple
|
||||
and concise specification of your application options. For example:
|
||||
|
||||
type Options struct {
|
||||
Verbose []bool `short:"v" long:"verbose" description:"Show verbose debug information"`
|
||||
}
|
||||
|
||||
This specifies one option with a short name -v and a long name --verbose.
|
||||
When either -v or --verbose is found on the command line, a 'true' value
|
||||
will be appended to the Verbose field. e.g. when specifying -vvv, the
|
||||
resulting value of Verbose will be {[true, true, true]}.
|
||||
|
||||
Slice options work exactly the same as primitive type options, except that
|
||||
whenever the option is encountered, a value is appended to the slice.
|
||||
|
||||
Map options from string to primitive type are also supported. On the command
|
||||
line, you specify the value for such an option as key:value. For example
|
||||
|
||||
type Options struct {
|
||||
AuthorInfo string[string] `short:"a"`
|
||||
}
|
||||
|
||||
Then, the AuthorInfo map can be filled with something like
|
||||
-a name:Jesse -a "surname:van den Kieboom".
|
||||
|
||||
Finally, for full control over the conversion between command line argument
|
||||
values and options, user defined types can choose to implement the Marshaler
|
||||
and Unmarshaler interfaces.
|
||||
|
||||
|
||||
Available field tags
|
||||
|
||||
The following is a list of tags for struct fields supported by go-flags:
|
||||
|
||||
short: the short name of the option (single character)
|
||||
long: the long name of the option
|
||||
required: whether an option is required to appear on the command
|
||||
line. If a required option is not present, the parser will
|
||||
return ErrRequired (optional)
|
||||
description: the description of the option (optional)
|
||||
long-description: the long description of the option. Currently only
|
||||
displayed in generated man pages (optional)
|
||||
no-flag: if non-empty this field is ignored as an option (optional)
|
||||
|
||||
optional: whether an argument of the option is optional. When an
|
||||
argument is optional it can only be specified using
|
||||
--option=argument (optional)
|
||||
optional-value: the value of an optional option when the option occurs
|
||||
without an argument. This tag can be specified multiple
|
||||
times in the case of maps or slices (optional)
|
||||
default: the default value of an option. This tag can be specified
|
||||
multiple times in the case of slices or maps (optional)
|
||||
default-mask: when specified, this value will be displayed in the help
|
||||
instead of the actual default value. This is useful
|
||||
mostly for hiding otherwise sensitive information from
|
||||
showing up in the help. If default-mask takes the special
|
||||
value "-", then no default value will be shown at all
|
||||
(optional)
|
||||
env: the default value of the option is overridden from the
|
||||
specified environment variable, if one has been defined.
|
||||
(optional)
|
||||
env-delim: the 'env' default value from environment is split into
|
||||
multiple values with the given delimiter string, use with
|
||||
slices and maps (optional)
|
||||
value-name: the name of the argument value (to be shown in the help)
|
||||
(optional)
|
||||
choice: limits the values for an option to a set of values.
|
||||
This tag can be specified mltiple times (optional)
|
||||
hidden: the option is not visible in the help or man page.
|
||||
|
||||
base: a base (radix) used to convert strings to integer values, the
|
||||
default base is 10 (i.e. decimal) (optional)
|
||||
|
||||
ini-name: the explicit ini option name (optional)
|
||||
no-ini: if non-empty this field is ignored as an ini option
|
||||
(optional)
|
||||
|
||||
group: when specified on a struct field, makes the struct
|
||||
field a separate group with the given name (optional)
|
||||
namespace: when specified on a group struct field, the namespace
|
||||
gets prepended to every option's long name and
|
||||
subgroup's namespace of this group, separated by
|
||||
the parser's namespace delimiter (optional)
|
||||
command: when specified on a struct field, makes the struct
|
||||
field a (sub)command with the given name (optional)
|
||||
subcommands-optional: when specified on a command struct field, makes
|
||||
any subcommands of that command optional (optional)
|
||||
alias: when specified on a command struct field, adds the
|
||||
specified name as an alias for the command. Can be
|
||||
be specified multiple times to add more than one
|
||||
alias (optional)
|
||||
positional-args: when specified on a field with a struct type,
|
||||
uses the fields of that struct to parse remaining
|
||||
positional command line arguments into (in order
|
||||
of the fields). If a field has a slice type,
|
||||
then all remaining arguments will be added to it.
|
||||
Positional arguments are optional by default,
|
||||
unless the "required" tag is specified together
|
||||
with the "positional-args" tag. The "required" tag
|
||||
can also be set on the individual rest argument
|
||||
fields, to require only the first N positional
|
||||
arguments. If the "required" tag is set on the
|
||||
rest arguments slice, then its value determines
|
||||
the minimum amount of rest arguments that needs to
|
||||
be provided (e.g. `required:"2"`) (optional)
|
||||
positional-arg-name: used on a field in a positional argument struct; name
|
||||
of the positional argument placeholder to be shown in
|
||||
the help (optional)
|
||||
|
||||
Either the `short:` tag or the `long:` must be specified to make the field eligible as an
|
||||
option.
|
||||
|
||||
|
||||
Option groups
|
||||
|
||||
Option groups are a simple way to semantically separate your options. All
|
||||
options in a particular group are shown together in the help under the name
|
||||
of the group. Namespaces can be used to specify option long names more
|
||||
precisely and emphasize the options affiliation to their group.
|
||||
|
||||
There are currently three ways to specify option groups.
|
||||
|
||||
1. Use NewNamedParser specifying the various option groups.
|
||||
2. Use AddGroup to add a group to an existing parser.
|
||||
3. Add a struct field to the top-level options annotated with the
|
||||
group:"group-name" tag.
|
||||
|
||||
|
||||
|
||||
Commands
|
||||
|
||||
The flags package also has basic support for commands. Commands are often
|
||||
used in monolithic applications that support various commands or actions.
|
||||
Take git for example, all of the add, commit, checkout, etc. are called
|
||||
commands. Using commands you can easily separate multiple functions of your
|
||||
application.
|
||||
|
||||
There are currently two ways to specify a command.
|
||||
|
||||
1. Use AddCommand on an existing parser.
|
||||
2. Add a struct field to your options struct annotated with the
|
||||
command:"command-name" tag.
|
||||
|
||||
The most common, idiomatic way to implement commands is to define a global
|
||||
parser instance and implement each command in a separate file. These
|
||||
command files should define a go init function which calls AddCommand on
|
||||
the global parser.
|
||||
|
||||
When parsing ends and there is an active command and that command implements
|
||||
the Commander interface, then its Execute method will be run with the
|
||||
remaining command line arguments.
|
||||
|
||||
Command structs can have options which become valid to parse after the
|
||||
command has been specified on the command line, in addition to the options
|
||||
of all the parent commands. I.e. considering a -v flag on the parser and an
|
||||
add command, the following are equivalent:
|
||||
|
||||
./app -v add
|
||||
./app add -v
|
||||
|
||||
However, if the -v flag is defined on the add command, then the first of
|
||||
the two examples above would fail since the -v flag is not defined before
|
||||
the add command.
|
||||
|
||||
|
||||
Completion
|
||||
|
||||
go-flags has builtin support to provide bash completion of flags, commands
|
||||
and argument values. To use completion, the binary which uses go-flags
|
||||
can be invoked in a special environment to list completion of the current
|
||||
command line argument. It should be noted that this `executes` your application,
|
||||
and it is up to the user to make sure there are no negative side effects (for
|
||||
example from init functions).
|
||||
|
||||
Setting the environment variable `GO_FLAGS_COMPLETION=1` enables completion
|
||||
by replacing the argument parsing routine with the completion routine which
|
||||
outputs completions for the passed arguments. The basic invocation to
|
||||
complete a set of arguments is therefore:
|
||||
|
||||
GO_FLAGS_COMPLETION=1 ./completion-example arg1 arg2 arg3
|
||||
|
||||
where `completion-example` is the binary, `arg1` and `arg2` are
|
||||
the current arguments, and `arg3` (the last argument) is the argument
|
||||
to be completed. If the GO_FLAGS_COMPLETION is set to "verbose", then
|
||||
descriptions of possible completion items will also be shown, if there
|
||||
are more than 1 completion items.
|
||||
|
||||
To use this with bash completion, a simple file can be written which
|
||||
calls the binary which supports go-flags completion:
|
||||
|
||||
_completion_example() {
|
||||
# All arguments except the first one
|
||||
args=("${COMP_WORDS[@]:1:$COMP_CWORD}")
|
||||
|
||||
# Only split on newlines
|
||||
local IFS=$'\n'
|
||||
|
||||
# Call completion (note that the first element of COMP_WORDS is
|
||||
# the executable itself)
|
||||
COMPREPLY=($(GO_FLAGS_COMPLETION=1 ${COMP_WORDS[0]} "${args[@]}"))
|
||||
return 0
|
||||
}
|
||||
|
||||
complete -F _completion_example completion-example
|
||||
|
||||
Completion requires the parser option PassDoubleDash and is therefore enforced if the environment variable GO_FLAGS_COMPLETION is set.
|
||||
|
||||
Customized completion for argument values is supported by implementing
|
||||
the flags.Completer interface for the argument value type. An example
|
||||
of a type which does so is the flags.Filename type, an alias of string
|
||||
allowing simple filename completion. A slice or array argument value
|
||||
whose element type implements flags.Completer will also be completed.
|
||||
*/
|
||||
package flags
|
385
vendor/github.com/jessevdk/go-flags/group.go
generated
vendored
Normal file
385
vendor/github.com/jessevdk/go-flags/group.go
generated
vendored
Normal file
@ -0,0 +1,385 @@
|
||||
// Copyright 2012 Jesse van den Kieboom. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package flags
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"reflect"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// ErrNotPointerToStruct indicates that a provided data container is not
|
||||
// a pointer to a struct. Only pointers to structs are valid data containers
|
||||
// for options.
|
||||
var ErrNotPointerToStruct = errors.New("provided data is not a pointer to struct")
|
||||
|
||||
// Group represents an option group. Option groups can be used to logically
|
||||
// group options together under a description. Groups are only used to provide
|
||||
// more structure to options both for the user (as displayed in the help message)
|
||||
// and for you, since groups can be nested.
|
||||
type Group struct {
|
||||
// A short description of the group. The
|
||||
// short description is primarily used in the built-in generated help
|
||||
// message
|
||||
ShortDescription string
|
||||
|
||||
// A long description of the group. The long
|
||||
// description is primarily used to present information on commands
|
||||
// (Command embeds Group) in the built-in generated help and man pages.
|
||||
LongDescription string
|
||||
|
||||
// The namespace of the group
|
||||
Namespace string
|
||||
|
||||
// If true, the group is not displayed in the help or man page
|
||||
Hidden bool
|
||||
|
||||
// The parent of the group or nil if it has no parent
|
||||
parent interface{}
|
||||
|
||||
// All the options in the group
|
||||
options []*Option
|
||||
|
||||
// All the subgroups
|
||||
groups []*Group
|
||||
|
||||
// Whether the group represents the built-in help group
|
||||
isBuiltinHelp bool
|
||||
|
||||
data interface{}
|
||||
}
|
||||
|
||||
type scanHandler func(reflect.Value, *reflect.StructField) (bool, error)
|
||||
|
||||
// AddGroup adds a new group to the command with the given name and data. The
|
||||
// data needs to be a pointer to a struct from which the fields indicate which
|
||||
// options are in the group.
|
||||
func (g *Group) AddGroup(shortDescription string, longDescription string, data interface{}) (*Group, error) {
|
||||
group := newGroup(shortDescription, longDescription, data)
|
||||
|
||||
group.parent = g
|
||||
|
||||
if err := group.scan(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
g.groups = append(g.groups, group)
|
||||
return group, nil
|
||||
}
|
||||
|
||||
// Groups returns the list of groups embedded in this group.
|
||||
func (g *Group) Groups() []*Group {
|
||||
return g.groups
|
||||
}
|
||||
|
||||
// Options returns the list of options in this group.
|
||||
func (g *Group) Options() []*Option {
|
||||
return g.options
|
||||
}
|
||||
|
||||
// Find locates the subgroup with the given short description and returns it.
|
||||
// If no such group can be found Find will return nil. Note that the description
|
||||
// is matched case insensitively.
|
||||
func (g *Group) Find(shortDescription string) *Group {
|
||||
lshortDescription := strings.ToLower(shortDescription)
|
||||
|
||||
var ret *Group
|
||||
|
||||
g.eachGroup(func(gg *Group) {
|
||||
if gg != g && strings.ToLower(gg.ShortDescription) == lshortDescription {
|
||||
ret = gg
|
||||
}
|
||||
})
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func (g *Group) findOption(matcher func(*Option) bool) (option *Option) {
|
||||
g.eachGroup(func(g *Group) {
|
||||
for _, opt := range g.options {
|
||||
if option == nil && matcher(opt) {
|
||||
option = opt
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return option
|
||||
}
|
||||
|
||||
// Find an option that is part of the group, or any of its subgroups,
|
||||
// by matching its long name (including the option namespace).
|
||||
func (g *Group) FindOptionByLongName(longName string) *Option {
|
||||
return g.findOption(func(option *Option) bool {
|
||||
return option.LongNameWithNamespace() == longName
|
||||
})
|
||||
}
|
||||
|
||||
// Find an option that is part of the group, or any of its subgroups,
|
||||
// by matching its short name.
|
||||
func (g *Group) FindOptionByShortName(shortName rune) *Option {
|
||||
return g.findOption(func(option *Option) bool {
|
||||
return option.ShortName == shortName
|
||||
})
|
||||
}
|
||||
|
||||
func newGroup(shortDescription string, longDescription string, data interface{}) *Group {
|
||||
return &Group{
|
||||
ShortDescription: shortDescription,
|
||||
LongDescription: longDescription,
|
||||
|
||||
data: data,
|
||||
}
|
||||
}
|
||||
|
||||
func (g *Group) optionByName(name string, namematch func(*Option, string) bool) *Option {
|
||||
prio := 0
|
||||
var retopt *Option
|
||||
|
||||
g.eachGroup(func(g *Group) {
|
||||
for _, opt := range g.options {
|
||||
if namematch != nil && namematch(opt, name) && prio < 4 {
|
||||
retopt = opt
|
||||
prio = 4
|
||||
}
|
||||
|
||||
if name == opt.field.Name && prio < 3 {
|
||||
retopt = opt
|
||||
prio = 3
|
||||
}
|
||||
|
||||
if name == opt.LongNameWithNamespace() && prio < 2 {
|
||||
retopt = opt
|
||||
prio = 2
|
||||
}
|
||||
|
||||
if opt.ShortName != 0 && name == string(opt.ShortName) && prio < 1 {
|
||||
retopt = opt
|
||||
prio = 1
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return retopt
|
||||
}
|
||||
|
||||
func (g *Group) eachGroup(f func(*Group)) {
|
||||
f(g)
|
||||
|
||||
for _, gg := range g.groups {
|
||||
gg.eachGroup(f)
|
||||
}
|
||||
}
|
||||
|
||||
func (g *Group) scanStruct(realval reflect.Value, sfield *reflect.StructField, handler scanHandler) error {
|
||||
stype := realval.Type()
|
||||
|
||||
if sfield != nil {
|
||||
if ok, err := handler(realval, sfield); err != nil {
|
||||
return err
|
||||
} else if ok {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
for i := 0; i < stype.NumField(); i++ {
|
||||
field := stype.Field(i)
|
||||
|
||||
// PkgName is set only for non-exported fields, which we ignore
|
||||
if field.PkgPath != "" && !field.Anonymous {
|
||||
continue
|
||||
}
|
||||
|
||||
mtag := newMultiTag(string(field.Tag))
|
||||
|
||||
if err := mtag.Parse(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Skip fields with the no-flag tag
|
||||
if mtag.Get("no-flag") != "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// Dive deep into structs or pointers to structs
|
||||
kind := field.Type.Kind()
|
||||
fld := realval.Field(i)
|
||||
|
||||
if kind == reflect.Struct {
|
||||
if err := g.scanStruct(fld, &field, handler); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if kind == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct {
|
||||
if fld.IsNil() {
|
||||
fld.Set(reflect.New(fld.Type().Elem()))
|
||||
}
|
||||
|
||||
if err := g.scanStruct(reflect.Indirect(fld), &field, handler); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
longname := mtag.Get("long")
|
||||
shortname := mtag.Get("short")
|
||||
|
||||
// Need at least either a short or long name
|
||||
if longname == "" && shortname == "" && mtag.Get("ini-name") == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
short := rune(0)
|
||||
rc := utf8.RuneCountInString(shortname)
|
||||
|
||||
if rc > 1 {
|
||||
return newErrorf(ErrShortNameTooLong,
|
||||
"short names can only be 1 character long, not `%s'",
|
||||
shortname)
|
||||
|
||||
} else if rc == 1 {
|
||||
short, _ = utf8.DecodeRuneInString(shortname)
|
||||
}
|
||||
|
||||
description := mtag.Get("description")
|
||||
def := mtag.GetMany("default")
|
||||
|
||||
optionalValue := mtag.GetMany("optional-value")
|
||||
valueName := mtag.Get("value-name")
|
||||
defaultMask := mtag.Get("default-mask")
|
||||
|
||||
optional := (mtag.Get("optional") != "")
|
||||
required := (mtag.Get("required") != "")
|
||||
choices := mtag.GetMany("choice")
|
||||
hidden := (mtag.Get("hidden") != "")
|
||||
|
||||
option := &Option{
|
||||
Description: description,
|
||||
ShortName: short,
|
||||
LongName: longname,
|
||||
Default: def,
|
||||
EnvDefaultKey: mtag.Get("env"),
|
||||
EnvDefaultDelim: mtag.Get("env-delim"),
|
||||
OptionalArgument: optional,
|
||||
OptionalValue: optionalValue,
|
||||
Required: required,
|
||||
ValueName: valueName,
|
||||
DefaultMask: defaultMask,
|
||||
Choices: choices,
|
||||
Hidden: hidden,
|
||||
|
||||
group: g,
|
||||
|
||||
field: field,
|
||||
value: realval.Field(i),
|
||||
tag: mtag,
|
||||
}
|
||||
|
||||
if option.isBool() && option.Default != nil {
|
||||
return newErrorf(ErrInvalidTag,
|
||||
"boolean flag `%s' may not have default values, they always default to `false' and can only be turned on",
|
||||
option.shortAndLongName())
|
||||
}
|
||||
|
||||
g.options = append(g.options, option)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Group) checkForDuplicateFlags() *Error {
|
||||
shortNames := make(map[rune]*Option)
|
||||
longNames := make(map[string]*Option)
|
||||
|
||||
var duplicateError *Error
|
||||
|
||||
g.eachGroup(func(g *Group) {
|
||||
for _, option := range g.options {
|
||||
if option.LongName != "" {
|
||||
longName := option.LongNameWithNamespace()
|
||||
|
||||
if otherOption, ok := longNames[longName]; ok {
|
||||
duplicateError = newErrorf(ErrDuplicatedFlag, "option `%s' uses the same long name as option `%s'", option, otherOption)
|
||||
return
|
||||
}
|
||||
longNames[longName] = option
|
||||
}
|
||||
if option.ShortName != 0 {
|
||||
if otherOption, ok := shortNames[option.ShortName]; ok {
|
||||
duplicateError = newErrorf(ErrDuplicatedFlag, "option `%s' uses the same short name as option `%s'", option, otherOption)
|
||||
return
|
||||
}
|
||||
shortNames[option.ShortName] = option
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return duplicateError
|
||||
}
|
||||
|
||||
func (g *Group) scanSubGroupHandler(realval reflect.Value, sfield *reflect.StructField) (bool, error) {
|
||||
mtag := newMultiTag(string(sfield.Tag))
|
||||
|
||||
if err := mtag.Parse(); err != nil {
|
||||
return true, err
|
||||
}
|
||||
|
||||
subgroup := mtag.Get("group")
|
||||
|
||||
if len(subgroup) != 0 {
|
||||
ptrval := reflect.NewAt(realval.Type(), unsafe.Pointer(realval.UnsafeAddr()))
|
||||
description := mtag.Get("description")
|
||||
|
||||
group, err := g.AddGroup(subgroup, description, ptrval.Interface())
|
||||
if err != nil {
|
||||
return true, err
|
||||
}
|
||||
|
||||
group.Namespace = mtag.Get("namespace")
|
||||
group.Hidden = mtag.Get("hidden") != ""
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func (g *Group) scanType(handler scanHandler) error {
|
||||
// Get all the public fields in the data struct
|
||||
ptrval := reflect.ValueOf(g.data)
|
||||
|
||||
if ptrval.Type().Kind() != reflect.Ptr {
|
||||
panic(ErrNotPointerToStruct)
|
||||
}
|
||||
|
||||
stype := ptrval.Type().Elem()
|
||||
|
||||
if stype.Kind() != reflect.Struct {
|
||||
panic(ErrNotPointerToStruct)
|
||||
}
|
||||
|
||||
realval := reflect.Indirect(ptrval)
|
||||
|
||||
if err := g.scanStruct(realval, nil, handler); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := g.checkForDuplicateFlags(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Group) scan() error {
|
||||
return g.scanType(g.scanSubGroupHandler)
|
||||
}
|
||||
|
||||
func (g *Group) groupByName(name string) *Group {
|
||||
if len(name) == 0 {
|
||||
return g
|
||||
}
|
||||
|
||||
return g.Find(name)
|
||||
}
|
255
vendor/github.com/jessevdk/go-flags/group_test.go
generated
vendored
Normal file
255
vendor/github.com/jessevdk/go-flags/group_test.go
generated
vendored
Normal file
@ -0,0 +1,255 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestGroupInline(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Group struct {
|
||||
G bool `short:"g"`
|
||||
} `group:"Grouped Options"`
|
||||
}{}
|
||||
|
||||
p, ret := assertParserSuccess(t, &opts, "-v", "-g")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
|
||||
if !opts.Value {
|
||||
t.Errorf("Expected Value to be true")
|
||||
}
|
||||
|
||||
if !opts.Group.G {
|
||||
t.Errorf("Expected Group.G to be true")
|
||||
}
|
||||
|
||||
if p.Command.Group.Find("Grouped Options") == nil {
|
||||
t.Errorf("Expected to find group `Grouped Options'")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGroupAdd(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
}{}
|
||||
|
||||
var grp = struct {
|
||||
G bool `short:"g"`
|
||||
}{}
|
||||
|
||||
p := NewParser(&opts, Default)
|
||||
g, err := p.AddGroup("Grouped Options", "", &grp)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
ret, err := p.ParseArgs([]string{"-v", "-g", "rest"})
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
assertStringArray(t, ret, []string{"rest"})
|
||||
|
||||
if !opts.Value {
|
||||
t.Errorf("Expected Value to be true")
|
||||
}
|
||||
|
||||
if !grp.G {
|
||||
t.Errorf("Expected Group.G to be true")
|
||||
}
|
||||
|
||||
if p.Command.Group.Find("Grouped Options") != g {
|
||||
t.Errorf("Expected to find group `Grouped Options'")
|
||||
}
|
||||
|
||||
if p.Groups()[1] != g {
|
||||
t.Errorf("Expected group %#v, but got %#v", g, p.Groups()[0])
|
||||
}
|
||||
|
||||
if g.Options()[0].ShortName != 'g' {
|
||||
t.Errorf("Expected short name `g' but got %v", g.Options()[0].ShortName)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGroupNestedInline(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
|
||||
Group struct {
|
||||
G bool `short:"g"`
|
||||
|
||||
Nested struct {
|
||||
N string `long:"n"`
|
||||
} `group:"Nested Options"`
|
||||
} `group:"Grouped Options"`
|
||||
}{}
|
||||
|
||||
p, ret := assertParserSuccess(t, &opts, "-v", "-g", "--n", "n", "rest")
|
||||
|
||||
assertStringArray(t, ret, []string{"rest"})
|
||||
|
||||
if !opts.Value {
|
||||
t.Errorf("Expected Value to be true")
|
||||
}
|
||||
|
||||
if !opts.Group.G {
|
||||
t.Errorf("Expected Group.G to be true")
|
||||
}
|
||||
|
||||
assertString(t, opts.Group.Nested.N, "n")
|
||||
|
||||
if p.Command.Group.Find("Grouped Options") == nil {
|
||||
t.Errorf("Expected to find group `Grouped Options'")
|
||||
}
|
||||
|
||||
if p.Command.Group.Find("Nested Options") == nil {
|
||||
t.Errorf("Expected to find group `Nested Options'")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGroupNestedInlineNamespace(t *testing.T) {
|
||||
var opts = struct {
|
||||
Opt string `long:"opt"`
|
||||
|
||||
Group struct {
|
||||
Opt string `long:"opt"`
|
||||
Group struct {
|
||||
Opt string `long:"opt"`
|
||||
} `group:"Subsubgroup" namespace:"sap"`
|
||||
} `group:"Subgroup" namespace:"sip"`
|
||||
}{}
|
||||
|
||||
p, ret := assertParserSuccess(t, &opts, "--opt", "a", "--sip.opt", "b", "--sip.sap.opt", "c", "rest")
|
||||
|
||||
assertStringArray(t, ret, []string{"rest"})
|
||||
|
||||
assertString(t, opts.Opt, "a")
|
||||
assertString(t, opts.Group.Opt, "b")
|
||||
assertString(t, opts.Group.Group.Opt, "c")
|
||||
|
||||
for _, name := range []string{"Subgroup", "Subsubgroup"} {
|
||||
if p.Command.Group.Find(name) == nil {
|
||||
t.Errorf("Expected to find group '%s'", name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDuplicateShortFlags(t *testing.T) {
|
||||
var opts struct {
|
||||
Verbose []bool `short:"v" long:"verbose" description:"Show verbose debug information"`
|
||||
Variables []string `short:"v" long:"variable" description:"Set a variable value."`
|
||||
}
|
||||
|
||||
args := []string{
|
||||
"--verbose",
|
||||
"-v", "123",
|
||||
"-v", "456",
|
||||
}
|
||||
|
||||
_, err := ParseArgs(&opts, args)
|
||||
|
||||
if err == nil {
|
||||
t.Errorf("Expected an error with type ErrDuplicatedFlag")
|
||||
} else {
|
||||
err2 := err.(*Error)
|
||||
if err2.Type != ErrDuplicatedFlag {
|
||||
t.Errorf("Expected an error with type ErrDuplicatedFlag")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDuplicateLongFlags(t *testing.T) {
|
||||
var opts struct {
|
||||
Test1 []bool `short:"a" long:"testing" description:"Test 1"`
|
||||
Test2 []string `short:"b" long:"testing" description:"Test 2."`
|
||||
}
|
||||
|
||||
args := []string{
|
||||
"--testing",
|
||||
}
|
||||
|
||||
_, err := ParseArgs(&opts, args)
|
||||
|
||||
if err == nil {
|
||||
t.Errorf("Expected an error with type ErrDuplicatedFlag")
|
||||
} else {
|
||||
err2 := err.(*Error)
|
||||
if err2.Type != ErrDuplicatedFlag {
|
||||
t.Errorf("Expected an error with type ErrDuplicatedFlag")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFindOptionByLongFlag(t *testing.T) {
|
||||
var opts struct {
|
||||
Testing bool `long:"testing" description:"Testing"`
|
||||
}
|
||||
|
||||
p := NewParser(&opts, Default)
|
||||
opt := p.FindOptionByLongName("testing")
|
||||
|
||||
if opt == nil {
|
||||
t.Errorf("Expected option, but found none")
|
||||
}
|
||||
|
||||
assertString(t, opt.LongName, "testing")
|
||||
}
|
||||
|
||||
func TestFindOptionByShortFlag(t *testing.T) {
|
||||
var opts struct {
|
||||
Testing bool `short:"t" description:"Testing"`
|
||||
}
|
||||
|
||||
p := NewParser(&opts, Default)
|
||||
opt := p.FindOptionByShortName('t')
|
||||
|
||||
if opt == nil {
|
||||
t.Errorf("Expected option, but found none")
|
||||
}
|
||||
|
||||
if opt.ShortName != 't' {
|
||||
t.Errorf("Expected 't', but got %v", opt.ShortName)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFindOptionByLongFlagInSubGroup(t *testing.T) {
|
||||
var opts struct {
|
||||
Group struct {
|
||||
Testing bool `long:"testing" description:"Testing"`
|
||||
} `group:"sub-group"`
|
||||
}
|
||||
|
||||
p := NewParser(&opts, Default)
|
||||
opt := p.FindOptionByLongName("testing")
|
||||
|
||||
if opt == nil {
|
||||
t.Errorf("Expected option, but found none")
|
||||
}
|
||||
|
||||
assertString(t, opt.LongName, "testing")
|
||||
}
|
||||
|
||||
func TestFindOptionByShortFlagInSubGroup(t *testing.T) {
|
||||
var opts struct {
|
||||
Group struct {
|
||||
Testing bool `short:"t" description:"Testing"`
|
||||
} `group:"sub-group"`
|
||||
}
|
||||
|
||||
p := NewParser(&opts, Default)
|
||||
opt := p.FindOptionByShortName('t')
|
||||
|
||||
if opt == nil {
|
||||
t.Errorf("Expected option, but found none")
|
||||
}
|
||||
|
||||
if opt.ShortName != 't' {
|
||||
t.Errorf("Expected 't', but got %v", opt.ShortName)
|
||||
}
|
||||
}
|
473
vendor/github.com/jessevdk/go-flags/help.go
generated
vendored
Normal file
473
vendor/github.com/jessevdk/go-flags/help.go
generated
vendored
Normal file
@ -0,0 +1,473 @@
|
||||
// Copyright 2012 Jesse van den Kieboom. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package flags
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"runtime"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type alignmentInfo struct {
|
||||
maxLongLen int
|
||||
hasShort bool
|
||||
hasValueName bool
|
||||
terminalColumns int
|
||||
indent bool
|
||||
}
|
||||
|
||||
const (
|
||||
paddingBeforeOption = 2
|
||||
distanceBetweenOptionAndDescription = 2
|
||||
)
|
||||
|
||||
func (a *alignmentInfo) descriptionStart() int {
|
||||
ret := a.maxLongLen + distanceBetweenOptionAndDescription
|
||||
|
||||
if a.hasShort {
|
||||
ret += 2
|
||||
}
|
||||
|
||||
if a.maxLongLen > 0 {
|
||||
ret += 4
|
||||
}
|
||||
|
||||
if a.hasValueName {
|
||||
ret += 3
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func (a *alignmentInfo) updateLen(name string, indent bool) {
|
||||
l := utf8.RuneCountInString(name)
|
||||
|
||||
if indent {
|
||||
l = l + 4
|
||||
}
|
||||
|
||||
if l > a.maxLongLen {
|
||||
a.maxLongLen = l
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Parser) getAlignmentInfo() alignmentInfo {
|
||||
ret := alignmentInfo{
|
||||
maxLongLen: 0,
|
||||
hasShort: false,
|
||||
hasValueName: false,
|
||||
terminalColumns: getTerminalColumns(),
|
||||
}
|
||||
|
||||
if ret.terminalColumns <= 0 {
|
||||
ret.terminalColumns = 80
|
||||
}
|
||||
|
||||
var prevcmd *Command
|
||||
|
||||
p.eachActiveGroup(func(c *Command, grp *Group) {
|
||||
if c != prevcmd {
|
||||
for _, arg := range c.args {
|
||||
ret.updateLen(arg.Name, c != p.Command)
|
||||
}
|
||||
}
|
||||
|
||||
for _, info := range grp.options {
|
||||
if !info.canCli() {
|
||||
continue
|
||||
}
|
||||
|
||||
if info.ShortName != 0 {
|
||||
ret.hasShort = true
|
||||
}
|
||||
|
||||
if len(info.ValueName) > 0 {
|
||||
ret.hasValueName = true
|
||||
}
|
||||
|
||||
l := info.LongNameWithNamespace() + info.ValueName
|
||||
|
||||
if len(info.Choices) != 0 {
|
||||
l += "[" + strings.Join(info.Choices, "|") + "]"
|
||||
}
|
||||
|
||||
ret.updateLen(l, c != p.Command)
|
||||
}
|
||||
})
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func wrapText(s string, l int, prefix string) string {
|
||||
var ret string
|
||||
|
||||
// Basic text wrapping of s at spaces to fit in l
|
||||
lines := strings.Split(s, "\n")
|
||||
|
||||
for _, line := range lines {
|
||||
var retline string
|
||||
|
||||
line = strings.TrimSpace(line)
|
||||
|
||||
for len(line) > l {
|
||||
// Try to split on space
|
||||
suffix := ""
|
||||
|
||||
pos := strings.LastIndex(line[:l], " ")
|
||||
|
||||
if pos < 0 {
|
||||
pos = l - 1
|
||||
suffix = "-\n"
|
||||
}
|
||||
|
||||
if len(retline) != 0 {
|
||||
retline += "\n" + prefix
|
||||
}
|
||||
|
||||
retline += strings.TrimSpace(line[:pos]) + suffix
|
||||
line = strings.TrimSpace(line[pos:])
|
||||
}
|
||||
|
||||
if len(line) > 0 {
|
||||
if len(retline) != 0 {
|
||||
retline += "\n" + prefix
|
||||
}
|
||||
|
||||
retline += line
|
||||
}
|
||||
|
||||
if len(ret) > 0 {
|
||||
ret += "\n"
|
||||
|
||||
if len(retline) > 0 {
|
||||
ret += prefix
|
||||
}
|
||||
}
|
||||
|
||||
ret += retline
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func (p *Parser) writeHelpOption(writer *bufio.Writer, option *Option, info alignmentInfo) {
|
||||
line := &bytes.Buffer{}
|
||||
|
||||
prefix := paddingBeforeOption
|
||||
|
||||
if info.indent {
|
||||
prefix += 4
|
||||
}
|
||||
|
||||
if option.Hidden {
|
||||
return
|
||||
}
|
||||
|
||||
line.WriteString(strings.Repeat(" ", prefix))
|
||||
|
||||
if option.ShortName != 0 {
|
||||
line.WriteRune(defaultShortOptDelimiter)
|
||||
line.WriteRune(option.ShortName)
|
||||
} else if info.hasShort {
|
||||
line.WriteString(" ")
|
||||
}
|
||||
|
||||
descstart := info.descriptionStart() + paddingBeforeOption
|
||||
|
||||
if len(option.LongName) > 0 {
|
||||
if option.ShortName != 0 {
|
||||
line.WriteString(", ")
|
||||
} else if info.hasShort {
|
||||
line.WriteString(" ")
|
||||
}
|
||||
|
||||
line.WriteString(defaultLongOptDelimiter)
|
||||
line.WriteString(option.LongNameWithNamespace())
|
||||
}
|
||||
|
||||
if option.canArgument() {
|
||||
line.WriteRune(defaultNameArgDelimiter)
|
||||
|
||||
if len(option.ValueName) > 0 {
|
||||
line.WriteString(option.ValueName)
|
||||
}
|
||||
|
||||
if len(option.Choices) > 0 {
|
||||
line.WriteString("[" + strings.Join(option.Choices, "|") + "]")
|
||||
}
|
||||
}
|
||||
|
||||
written := line.Len()
|
||||
line.WriteTo(writer)
|
||||
|
||||
if option.Description != "" {
|
||||
dw := descstart - written
|
||||
writer.WriteString(strings.Repeat(" ", dw))
|
||||
|
||||
var def string
|
||||
|
||||
if len(option.DefaultMask) != 0 && option.DefaultMask != "-" {
|
||||
def = option.DefaultMask
|
||||
} else {
|
||||
def = option.defaultLiteral
|
||||
}
|
||||
|
||||
var envDef string
|
||||
if option.EnvDefaultKey != "" {
|
||||
var envPrintable string
|
||||
if runtime.GOOS == "windows" {
|
||||
envPrintable = "%" + option.EnvDefaultKey + "%"
|
||||
} else {
|
||||
envPrintable = "$" + option.EnvDefaultKey
|
||||
}
|
||||
envDef = fmt.Sprintf(" [%s]", envPrintable)
|
||||
}
|
||||
|
||||
var desc string
|
||||
|
||||
if def != "" {
|
||||
desc = fmt.Sprintf("%s (default: %v)%s", option.Description, def, envDef)
|
||||
} else {
|
||||
desc = option.Description + envDef
|
||||
}
|
||||
|
||||
writer.WriteString(wrapText(desc,
|
||||
info.terminalColumns-descstart,
|
||||
strings.Repeat(" ", descstart)))
|
||||
}
|
||||
|
||||
writer.WriteString("\n")
|
||||
}
|
||||
|
||||
func maxCommandLength(s []*Command) int {
|
||||
if len(s) == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
ret := len(s[0].Name)
|
||||
|
||||
for _, v := range s[1:] {
|
||||
l := len(v.Name)
|
||||
|
||||
if l > ret {
|
||||
ret = l
|
||||
}
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
// WriteHelp writes a help message containing all the possible options and
|
||||
// their descriptions to the provided writer. Note that the HelpFlag parser
|
||||
// option provides a convenient way to add a -h/--help option group to the
|
||||
// command line parser which will automatically show the help messages using
|
||||
// this method.
|
||||
func (p *Parser) WriteHelp(writer io.Writer) {
|
||||
if writer == nil {
|
||||
return
|
||||
}
|
||||
|
||||
wr := bufio.NewWriter(writer)
|
||||
aligninfo := p.getAlignmentInfo()
|
||||
|
||||
cmd := p.Command
|
||||
|
||||
for cmd.Active != nil {
|
||||
cmd = cmd.Active
|
||||
}
|
||||
|
||||
if p.Name != "" {
|
||||
wr.WriteString("Usage:\n")
|
||||
wr.WriteString(" ")
|
||||
|
||||
allcmd := p.Command
|
||||
|
||||
for allcmd != nil {
|
||||
var usage string
|
||||
|
||||
if allcmd == p.Command {
|
||||
if len(p.Usage) != 0 {
|
||||
usage = p.Usage
|
||||
} else if p.Options&HelpFlag != 0 {
|
||||
usage = "[OPTIONS]"
|
||||
}
|
||||
} else if us, ok := allcmd.data.(Usage); ok {
|
||||
usage = us.Usage()
|
||||
} else if allcmd.hasCliOptions() {
|
||||
usage = fmt.Sprintf("[%s-OPTIONS]", allcmd.Name)
|
||||
}
|
||||
|
||||
if len(usage) != 0 {
|
||||
fmt.Fprintf(wr, " %s %s", allcmd.Name, usage)
|
||||
} else {
|
||||
fmt.Fprintf(wr, " %s", allcmd.Name)
|
||||
}
|
||||
|
||||
if len(allcmd.args) > 0 {
|
||||
fmt.Fprintf(wr, " ")
|
||||
}
|
||||
|
||||
for i, arg := range allcmd.args {
|
||||
if i != 0 {
|
||||
fmt.Fprintf(wr, " ")
|
||||
}
|
||||
|
||||
name := arg.Name
|
||||
|
||||
if arg.isRemaining() {
|
||||
name = name + "..."
|
||||
}
|
||||
|
||||
if !allcmd.ArgsRequired {
|
||||
fmt.Fprintf(wr, "[%s]", name)
|
||||
} else {
|
||||
fmt.Fprintf(wr, "%s", name)
|
||||
}
|
||||
}
|
||||
|
||||
if allcmd.Active == nil && len(allcmd.commands) > 0 {
|
||||
var co, cc string
|
||||
|
||||
if allcmd.SubcommandsOptional {
|
||||
co, cc = "[", "]"
|
||||
} else {
|
||||
co, cc = "<", ">"
|
||||
}
|
||||
|
||||
visibleCommands := allcmd.visibleCommands()
|
||||
|
||||
if len(visibleCommands) > 3 {
|
||||
fmt.Fprintf(wr, " %scommand%s", co, cc)
|
||||
} else {
|
||||
subcommands := allcmd.sortedVisibleCommands()
|
||||
names := make([]string, len(subcommands))
|
||||
|
||||
for i, subc := range subcommands {
|
||||
names[i] = subc.Name
|
||||
}
|
||||
|
||||
fmt.Fprintf(wr, " %s%s%s", co, strings.Join(names, " | "), cc)
|
||||
}
|
||||
}
|
||||
|
||||
allcmd = allcmd.Active
|
||||
}
|
||||
|
||||
fmt.Fprintln(wr)
|
||||
|
||||
if len(cmd.LongDescription) != 0 {
|
||||
fmt.Fprintln(wr)
|
||||
|
||||
t := wrapText(cmd.LongDescription,
|
||||
aligninfo.terminalColumns,
|
||||
"")
|
||||
|
||||
fmt.Fprintln(wr, t)
|
||||
}
|
||||
}
|
||||
|
||||
c := p.Command
|
||||
|
||||
for c != nil {
|
||||
printcmd := c != p.Command
|
||||
|
||||
c.eachGroup(func(grp *Group) {
|
||||
first := true
|
||||
|
||||
// Skip built-in help group for all commands except the top-level
|
||||
// parser
|
||||
if grp.Hidden || (grp.isBuiltinHelp && c != p.Command) {
|
||||
return
|
||||
}
|
||||
|
||||
for _, info := range grp.options {
|
||||
if !info.canCli() || info.Hidden {
|
||||
continue
|
||||
}
|
||||
|
||||
if printcmd {
|
||||
fmt.Fprintf(wr, "\n[%s command options]\n", c.Name)
|
||||
aligninfo.indent = true
|
||||
printcmd = false
|
||||
}
|
||||
|
||||
if first && cmd.Group != grp {
|
||||
fmt.Fprintln(wr)
|
||||
|
||||
if aligninfo.indent {
|
||||
wr.WriteString(" ")
|
||||
}
|
||||
|
||||
fmt.Fprintf(wr, "%s:\n", grp.ShortDescription)
|
||||
first = false
|
||||
}
|
||||
|
||||
p.writeHelpOption(wr, info, aligninfo)
|
||||
}
|
||||
})
|
||||
|
||||
var args []*Arg
|
||||
for _, arg := range c.args {
|
||||
if arg.Description != "" {
|
||||
args = append(args, arg)
|
||||
}
|
||||
}
|
||||
|
||||
if len(args) > 0 {
|
||||
if c == p.Command {
|
||||
fmt.Fprintf(wr, "\nArguments:\n")
|
||||
} else {
|
||||
fmt.Fprintf(wr, "\n[%s command arguments]\n", c.Name)
|
||||
}
|
||||
|
||||
maxlen := aligninfo.descriptionStart()
|
||||
|
||||
for _, arg := range args {
|
||||
prefix := strings.Repeat(" ", paddingBeforeOption)
|
||||
fmt.Fprintf(wr, "%s%s", prefix, arg.Name)
|
||||
|
||||
if len(arg.Description) > 0 {
|
||||
align := strings.Repeat(" ", maxlen-len(arg.Name)-1)
|
||||
fmt.Fprintf(wr, ":%s%s", align, arg.Description)
|
||||
}
|
||||
|
||||
fmt.Fprintln(wr)
|
||||
}
|
||||
}
|
||||
|
||||
c = c.Active
|
||||
}
|
||||
|
||||
scommands := cmd.sortedVisibleCommands()
|
||||
|
||||
if len(scommands) > 0 {
|
||||
maxnamelen := maxCommandLength(scommands)
|
||||
|
||||
fmt.Fprintln(wr)
|
||||
fmt.Fprintln(wr, "Available commands:")
|
||||
|
||||
for _, c := range scommands {
|
||||
fmt.Fprintf(wr, " %s", c.Name)
|
||||
|
||||
if len(c.ShortDescription) > 0 {
|
||||
pad := strings.Repeat(" ", maxnamelen-len(c.Name))
|
||||
fmt.Fprintf(wr, "%s %s", pad, c.ShortDescription)
|
||||
|
||||
if len(c.Aliases) > 0 {
|
||||
fmt.Fprintf(wr, " (aliases: %s)", strings.Join(c.Aliases, ", "))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
fmt.Fprintln(wr)
|
||||
}
|
||||
}
|
||||
|
||||
wr.Flush()
|
||||
}
|
462
vendor/github.com/jessevdk/go-flags/help_test.go
generated
vendored
Normal file
462
vendor/github.com/jessevdk/go-flags/help_test.go
generated
vendored
Normal file
@ -0,0 +1,462 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"os"
|
||||
"runtime"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
type helpOptions struct {
|
||||
Verbose []bool `short:"v" long:"verbose" description:"Show verbose debug information" ini-name:"verbose"`
|
||||
Call func(string) `short:"c" description:"Call phone number" ini-name:"call"`
|
||||
PtrSlice []*string `long:"ptrslice" description:"A slice of pointers to string"`
|
||||
EmptyDescription bool `long:"empty-description"`
|
||||
|
||||
Default string `long:"default" default:"Some\nvalue" description:"Test default value"`
|
||||
DefaultArray []string `long:"default-array" default:"Some value" default:"Other\tvalue" description:"Test default array value"`
|
||||
DefaultMap map[string]string `long:"default-map" default:"some:value" default:"another:value" description:"Testdefault map value"`
|
||||
EnvDefault1 string `long:"env-default1" default:"Some value" env:"ENV_DEFAULT" description:"Test env-default1 value"`
|
||||
EnvDefault2 string `long:"env-default2" env:"ENV_DEFAULT" description:"Test env-default2 value"`
|
||||
OptionWithArgName string `long:"opt-with-arg-name" value-name:"something" description:"Option with named argument"`
|
||||
OptionWithChoices string `long:"opt-with-choices" value-name:"choice" choice:"dog" choice:"cat" description:"Option with choices"`
|
||||
Hidden string `long:"hidden" description:"Hidden option" hidden:"yes"`
|
||||
|
||||
OnlyIni string `ini-name:"only-ini" description:"Option only available in ini"`
|
||||
|
||||
Other struct {
|
||||
StringSlice []string `short:"s" default:"some" default:"value" description:"A slice of strings"`
|
||||
IntMap map[string]int `long:"intmap" default:"a:1" description:"A map from string to int" ini-name:"int-map"`
|
||||
} `group:"Other Options"`
|
||||
|
||||
HiddenGroup struct {
|
||||
InsideHiddenGroup string `long:"inside-hidden-group" description:"Inside hidden group"`
|
||||
} `group:"Hidden group" hidden:"yes"`
|
||||
|
||||
Group struct {
|
||||
Opt string `long:"opt" description:"This is a subgroup option"`
|
||||
HiddenInsideGroup string `long:"hidden-inside-group" description:"Hidden inside group" hidden:"yes"`
|
||||
|
||||
Group struct {
|
||||
Opt string `long:"opt" description:"This is a subsubgroup option"`
|
||||
} `group:"Subsubgroup" namespace:"sap"`
|
||||
} `group:"Subgroup" namespace:"sip"`
|
||||
|
||||
Command struct {
|
||||
ExtraVerbose []bool `long:"extra-verbose" description:"Use for extra verbosity"`
|
||||
} `command:"command" alias:"cm" alias:"cmd" description:"A command"`
|
||||
|
||||
HiddenCommand struct {
|
||||
ExtraVerbose []bool `long:"extra-verbose" description:"Use for extra verbosity"`
|
||||
} `command:"hidden-command" description:"A hidden command" hidden:"yes"`
|
||||
|
||||
Args struct {
|
||||
Filename string `positional-arg-name:"filename" description:"A filename"`
|
||||
Number int `positional-arg-name:"num" description:"A number"`
|
||||
HiddenInHelp float32 `positional-arg-name:"hidden-in-help" required:"yes"`
|
||||
} `positional-args:"yes"`
|
||||
}
|
||||
|
||||
func TestHelp(t *testing.T) {
|
||||
oldEnv := EnvSnapshot()
|
||||
defer oldEnv.Restore()
|
||||
os.Setenv("ENV_DEFAULT", "env-def")
|
||||
|
||||
var opts helpOptions
|
||||
p := NewNamedParser("TestHelp", HelpFlag)
|
||||
p.AddGroup("Application Options", "The application options", &opts)
|
||||
|
||||
_, err := p.ParseArgs([]string{"--help"})
|
||||
|
||||
if err == nil {
|
||||
t.Fatalf("Expected help error")
|
||||
}
|
||||
|
||||
if e, ok := err.(*Error); !ok {
|
||||
t.Fatalf("Expected flags.Error, but got %T", err)
|
||||
} else {
|
||||
if e.Type != ErrHelp {
|
||||
t.Errorf("Expected flags.ErrHelp type, but got %s", e.Type)
|
||||
}
|
||||
|
||||
var expected string
|
||||
|
||||
if runtime.GOOS == "windows" {
|
||||
expected = `Usage:
|
||||
TestHelp [OPTIONS] [filename] [num] [hidden-in-help] <command>
|
||||
|
||||
|
||||
Application Options:
|
||||
/v, /verbose Show verbose debug information
|
||||
/c: Call phone number
|
||||
/ptrslice: A slice of pointers to string
|
||||
/empty-description
|
||||
/default: Test default value (default:
|
||||
"Some\nvalue")
|
||||
/default-array: Test default array value (default:
|
||||
Some value, "Other\tvalue")
|
||||
/default-map: Testdefault map value (default:
|
||||
some:value, another:value)
|
||||
/env-default1: Test env-default1 value (default:
|
||||
Some value) [%ENV_DEFAULT%]
|
||||
/env-default2: Test env-default2 value
|
||||
[%ENV_DEFAULT%]
|
||||
/opt-with-arg-name:something Option with named argument
|
||||
/opt-with-choices:choice[dog|cat] Option with choices
|
||||
|
||||
Other Options:
|
||||
/s: A slice of strings (default: some,
|
||||
value)
|
||||
/intmap: A map from string to int (default:
|
||||
a:1)
|
||||
|
||||
Subgroup:
|
||||
/sip.opt: This is a subgroup option
|
||||
|
||||
Subsubgroup:
|
||||
/sip.sap.opt: This is a subsubgroup option
|
||||
|
||||
Help Options:
|
||||
/? Show this help message
|
||||
/h, /help Show this help message
|
||||
|
||||
Arguments:
|
||||
filename: A filename
|
||||
num: A number
|
||||
|
||||
Available commands:
|
||||
command A command (aliases: cm, cmd)
|
||||
`
|
||||
} else {
|
||||
expected = `Usage:
|
||||
TestHelp [OPTIONS] [filename] [num] [hidden-in-help] <command>
|
||||
|
||||
Application Options:
|
||||
-v, --verbose Show verbose debug information
|
||||
-c= Call phone number
|
||||
--ptrslice= A slice of pointers to string
|
||||
--empty-description
|
||||
--default= Test default value (default:
|
||||
"Some\nvalue")
|
||||
--default-array= Test default array value (default:
|
||||
Some value, "Other\tvalue")
|
||||
--default-map= Testdefault map value (default:
|
||||
some:value, another:value)
|
||||
--env-default1= Test env-default1 value (default:
|
||||
Some value) [$ENV_DEFAULT]
|
||||
--env-default2= Test env-default2 value
|
||||
[$ENV_DEFAULT]
|
||||
--opt-with-arg-name=something Option with named argument
|
||||
--opt-with-choices=choice[dog|cat] Option with choices
|
||||
|
||||
Other Options:
|
||||
-s= A slice of strings (default: some,
|
||||
value)
|
||||
--intmap= A map from string to int (default:
|
||||
a:1)
|
||||
|
||||
Subgroup:
|
||||
--sip.opt= This is a subgroup option
|
||||
|
||||
Subsubgroup:
|
||||
--sip.sap.opt= This is a subsubgroup option
|
||||
|
||||
Help Options:
|
||||
-h, --help Show this help message
|
||||
|
||||
Arguments:
|
||||
filename: A filename
|
||||
num: A number
|
||||
|
||||
Available commands:
|
||||
command A command (aliases: cm, cmd)
|
||||
`
|
||||
}
|
||||
|
||||
assertDiff(t, e.Message, expected, "help message")
|
||||
}
|
||||
}
|
||||
|
||||
func TestMan(t *testing.T) {
|
||||
oldEnv := EnvSnapshot()
|
||||
defer oldEnv.Restore()
|
||||
os.Setenv("ENV_DEFAULT", "env-def")
|
||||
|
||||
var opts helpOptions
|
||||
p := NewNamedParser("TestMan", HelpFlag)
|
||||
p.ShortDescription = "Test manpage generation"
|
||||
p.LongDescription = "This is a somewhat `longer' description of what this does"
|
||||
p.AddGroup("Application Options", "The application options", &opts)
|
||||
|
||||
p.Commands()[0].LongDescription = "Longer `command' description"
|
||||
|
||||
var buf bytes.Buffer
|
||||
p.WriteManPage(&buf)
|
||||
|
||||
got := buf.String()
|
||||
|
||||
tt := time.Now()
|
||||
|
||||
var envDefaultName string
|
||||
|
||||
if runtime.GOOS == "windows" {
|
||||
envDefaultName = "%ENV_DEFAULT%"
|
||||
} else {
|
||||
envDefaultName = "$ENV_DEFAULT"
|
||||
}
|
||||
|
||||
expected := fmt.Sprintf(`.TH TestMan 1 "%s"
|
||||
.SH NAME
|
||||
TestMan \- Test manpage generation
|
||||
.SH SYNOPSIS
|
||||
\fBTestMan\fP [OPTIONS]
|
||||
.SH DESCRIPTION
|
||||
This is a somewhat \fBlonger\fP description of what this does
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
\fB\fB\-v\fR, \fB\-\-verbose\fR\fP
|
||||
Show verbose debug information
|
||||
.TP
|
||||
\fB\fB\-c\fR\fP
|
||||
Call phone number
|
||||
.TP
|
||||
\fB\fB\-\-ptrslice\fR\fP
|
||||
A slice of pointers to string
|
||||
.TP
|
||||
\fB\fB\-\-empty-description\fR\fP
|
||||
.TP
|
||||
\fB\fB\-\-default\fR <default: \fI"Some\\nvalue"\fR>\fP
|
||||
Test default value
|
||||
.TP
|
||||
\fB\fB\-\-default-array\fR <default: \fI"Some value", "Other\\tvalue"\fR>\fP
|
||||
Test default array value
|
||||
.TP
|
||||
\fB\fB\-\-default-map\fR <default: \fI"some:value", "another:value"\fR>\fP
|
||||
Testdefault map value
|
||||
.TP
|
||||
\fB\fB\-\-env-default1\fR <default: \fI"Some value"\fR>\fP
|
||||
Test env-default1 value
|
||||
.TP
|
||||
\fB\fB\-\-env-default2\fR <default: \fI%s\fR>\fP
|
||||
Test env-default2 value
|
||||
.TP
|
||||
\fB\fB\-\-opt-with-arg-name\fR \fIsomething\fR\fP
|
||||
Option with named argument
|
||||
.TP
|
||||
\fB\fB\-\-opt-with-choices\fR \fIchoice\fR\fP
|
||||
Option with choices
|
||||
.TP
|
||||
\fB\fB\-s\fR <default: \fI"some", "value"\fR>\fP
|
||||
A slice of strings
|
||||
.TP
|
||||
\fB\fB\-\-intmap\fR <default: \fI"a:1"\fR>\fP
|
||||
A map from string to int
|
||||
.TP
|
||||
\fB\fB\-\-sip.opt\fR\fP
|
||||
This is a subgroup option
|
||||
.TP
|
||||
\fB\fB\-\-sip.sap.opt\fR\fP
|
||||
This is a subsubgroup option
|
||||
.SH COMMANDS
|
||||
.SS command
|
||||
A command
|
||||
|
||||
Longer \fBcommand\fP description
|
||||
|
||||
\fBUsage\fP: TestMan [OPTIONS] command [command-OPTIONS]
|
||||
.TP
|
||||
|
||||
\fBAliases\fP: cm, cmd
|
||||
|
||||
.TP
|
||||
\fB\fB\-\-extra-verbose\fR\fP
|
||||
Use for extra verbosity
|
||||
`, tt.Format("2 January 2006"), envDefaultName)
|
||||
|
||||
assertDiff(t, got, expected, "man page")
|
||||
}
|
||||
|
||||
type helpCommandNoOptions struct {
|
||||
Command struct {
|
||||
} `command:"command" description:"A command"`
|
||||
}
|
||||
|
||||
func TestHelpCommand(t *testing.T) {
|
||||
oldEnv := EnvSnapshot()
|
||||
defer oldEnv.Restore()
|
||||
os.Setenv("ENV_DEFAULT", "env-def")
|
||||
|
||||
var opts helpCommandNoOptions
|
||||
p := NewNamedParser("TestHelpCommand", HelpFlag)
|
||||
p.AddGroup("Application Options", "The application options", &opts)
|
||||
|
||||
_, err := p.ParseArgs([]string{"command", "--help"})
|
||||
|
||||
if err == nil {
|
||||
t.Fatalf("Expected help error")
|
||||
}
|
||||
|
||||
if e, ok := err.(*Error); !ok {
|
||||
t.Fatalf("Expected flags.Error, but got %T", err)
|
||||
} else {
|
||||
if e.Type != ErrHelp {
|
||||
t.Errorf("Expected flags.ErrHelp type, but got %s", e.Type)
|
||||
}
|
||||
|
||||
var expected string
|
||||
|
||||
if runtime.GOOS == "windows" {
|
||||
expected = `Usage:
|
||||
TestHelpCommand [OPTIONS] command
|
||||
|
||||
Help Options:
|
||||
/? Show this help message
|
||||
/h, /help Show this help message
|
||||
`
|
||||
} else {
|
||||
expected = `Usage:
|
||||
TestHelpCommand [OPTIONS] command
|
||||
|
||||
Help Options:
|
||||
-h, --help Show this help message
|
||||
`
|
||||
}
|
||||
|
||||
assertDiff(t, e.Message, expected, "help message")
|
||||
}
|
||||
}
|
||||
|
||||
func TestHelpDefaults(t *testing.T) {
|
||||
var expected string
|
||||
|
||||
if runtime.GOOS == "windows" {
|
||||
expected = `Usage:
|
||||
TestHelpDefaults [OPTIONS]
|
||||
|
||||
Application Options:
|
||||
/with-default: With default (default: default-value)
|
||||
/without-default: Without default
|
||||
/with-programmatic-default: With programmatic default (default:
|
||||
default-value)
|
||||
|
||||
Help Options:
|
||||
/? Show this help message
|
||||
/h, /help Show this help message
|
||||
`
|
||||
} else {
|
||||
expected = `Usage:
|
||||
TestHelpDefaults [OPTIONS]
|
||||
|
||||
Application Options:
|
||||
--with-default= With default (default: default-value)
|
||||
--without-default= Without default
|
||||
--with-programmatic-default= With programmatic default (default:
|
||||
default-value)
|
||||
|
||||
Help Options:
|
||||
-h, --help Show this help message
|
||||
`
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
Args []string
|
||||
Output string
|
||||
}{
|
||||
{
|
||||
Args: []string{"-h"},
|
||||
Output: expected,
|
||||
},
|
||||
{
|
||||
Args: []string{"--with-default", "other-value", "--with-programmatic-default", "other-value", "-h"},
|
||||
Output: expected,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
var opts struct {
|
||||
WithDefault string `long:"with-default" default:"default-value" description:"With default"`
|
||||
WithoutDefault string `long:"without-default" description:"Without default"`
|
||||
WithProgrammaticDefault string `long:"with-programmatic-default" description:"With programmatic default"`
|
||||
}
|
||||
|
||||
opts.WithProgrammaticDefault = "default-value"
|
||||
|
||||
p := NewNamedParser("TestHelpDefaults", HelpFlag)
|
||||
p.AddGroup("Application Options", "The application options", &opts)
|
||||
|
||||
_, err := p.ParseArgs(test.Args)
|
||||
|
||||
if err == nil {
|
||||
t.Fatalf("Expected help error")
|
||||
}
|
||||
|
||||
if e, ok := err.(*Error); !ok {
|
||||
t.Fatalf("Expected flags.Error, but got %T", err)
|
||||
} else {
|
||||
if e.Type != ErrHelp {
|
||||
t.Errorf("Expected flags.ErrHelp type, but got %s", e.Type)
|
||||
}
|
||||
|
||||
assertDiff(t, e.Message, test.Output, "help message")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestHelpRestArgs(t *testing.T) {
|
||||
opts := struct {
|
||||
Verbose bool `short:"v"`
|
||||
}{}
|
||||
|
||||
p := NewNamedParser("TestHelpDefaults", HelpFlag)
|
||||
p.AddGroup("Application Options", "The application options", &opts)
|
||||
|
||||
retargs, err := p.ParseArgs([]string{"-h", "-v", "rest"})
|
||||
|
||||
if err == nil {
|
||||
t.Fatalf("Expected help error")
|
||||
}
|
||||
|
||||
assertStringArray(t, retargs, []string{"-v", "rest"})
|
||||
}
|
||||
|
||||
func TestWrapText(t *testing.T) {
|
||||
s := "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."
|
||||
|
||||
got := wrapText(s, 60, " ")
|
||||
expected := `Lorem ipsum dolor sit amet, consectetur adipisicing elit,
|
||||
sed do eiusmod tempor incididunt ut labore et dolore magna
|
||||
aliqua. Ut enim ad minim veniam, quis nostrud exercitation
|
||||
ullamco laboris nisi ut aliquip ex ea commodo consequat.
|
||||
Duis aute irure dolor in reprehenderit in voluptate velit
|
||||
esse cillum dolore eu fugiat nulla pariatur. Excepteur sint
|
||||
occaecat cupidatat non proident, sunt in culpa qui officia
|
||||
deserunt mollit anim id est laborum.`
|
||||
|
||||
assertDiff(t, got, expected, "wrapped text")
|
||||
}
|
||||
|
||||
func TestWrapParagraph(t *testing.T) {
|
||||
s := "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.\n\n"
|
||||
s += "Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.\n\n"
|
||||
s += "Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.\n\n"
|
||||
s += "Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\n"
|
||||
|
||||
got := wrapText(s, 60, " ")
|
||||
expected := `Lorem ipsum dolor sit amet, consectetur adipisicing elit,
|
||||
sed do eiusmod tempor incididunt ut labore et dolore magna
|
||||
aliqua.
|
||||
|
||||
Ut enim ad minim veniam, quis nostrud exercitation ullamco
|
||||
laboris nisi ut aliquip ex ea commodo consequat.
|
||||
|
||||
Duis aute irure dolor in reprehenderit in voluptate velit
|
||||
esse cillum dolore eu fugiat nulla pariatur.
|
||||
|
||||
Excepteur sint occaecat cupidatat non proident, sunt in
|
||||
culpa qui officia deserunt mollit anim id est laborum.
|
||||
`
|
||||
|
||||
assertDiff(t, got, expected, "wrapped paragraph")
|
||||
}
|
593
vendor/github.com/jessevdk/go-flags/ini.go
generated
vendored
Normal file
593
vendor/github.com/jessevdk/go-flags/ini.go
generated
vendored
Normal file
@ -0,0 +1,593 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// IniError contains location information on where an error occured.
|
||||
type IniError struct {
|
||||
// The error message.
|
||||
Message string
|
||||
|
||||
// The filename of the file in which the error occurred.
|
||||
File string
|
||||
|
||||
// The line number at which the error occurred.
|
||||
LineNumber uint
|
||||
}
|
||||
|
||||
// Error provides a "file:line: message" formatted message of the ini error.
|
||||
func (x *IniError) Error() string {
|
||||
return fmt.Sprintf(
|
||||
"%s:%d: %s",
|
||||
x.File,
|
||||
x.LineNumber,
|
||||
x.Message,
|
||||
)
|
||||
}
|
||||
|
||||
// IniOptions for writing
|
||||
type IniOptions uint
|
||||
|
||||
const (
|
||||
// IniNone indicates no options.
|
||||
IniNone IniOptions = 0
|
||||
|
||||
// IniIncludeDefaults indicates that default values should be written.
|
||||
IniIncludeDefaults = 1 << iota
|
||||
|
||||
// IniCommentDefaults indicates that if IniIncludeDefaults is used
|
||||
// options with default values are written but commented out.
|
||||
IniCommentDefaults
|
||||
|
||||
// IniIncludeComments indicates that comments containing the description
|
||||
// of an option should be written.
|
||||
IniIncludeComments
|
||||
|
||||
// IniDefault provides a default set of options.
|
||||
IniDefault = IniIncludeComments
|
||||
)
|
||||
|
||||
// IniParser is a utility to read and write flags options from and to ini
|
||||
// formatted strings.
|
||||
type IniParser struct {
|
||||
parser *Parser
|
||||
}
|
||||
|
||||
type iniValue struct {
|
||||
Name string
|
||||
Value string
|
||||
Quoted bool
|
||||
LineNumber uint
|
||||
}
|
||||
|
||||
type iniSection []iniValue
|
||||
|
||||
type ini struct {
|
||||
File string
|
||||
Sections map[string]iniSection
|
||||
}
|
||||
|
||||
// NewIniParser creates a new ini parser for a given Parser.
|
||||
func NewIniParser(p *Parser) *IniParser {
|
||||
return &IniParser{
|
||||
parser: p,
|
||||
}
|
||||
}
|
||||
|
||||
// IniParse is a convenience function to parse command line options with default
|
||||
// settings from an ini formatted file. The provided data is a pointer to a struct
|
||||
// representing the default option group (named "Application Options"). For
|
||||
// more control, use flags.NewParser.
|
||||
func IniParse(filename string, data interface{}) error {
|
||||
p := NewParser(data, Default)
|
||||
|
||||
return NewIniParser(p).ParseFile(filename)
|
||||
}
|
||||
|
||||
// ParseFile parses flags from an ini formatted file. See Parse for more
|
||||
// information on the ini file format. The returned errors can be of the type
|
||||
// flags.Error or flags.IniError.
|
||||
func (i *IniParser) ParseFile(filename string) error {
|
||||
i.parser.clearIsSet()
|
||||
|
||||
ini, err := readIniFromFile(filename)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return i.parse(ini)
|
||||
}
|
||||
|
||||
// Parse parses flags from an ini format. You can use ParseFile as a
|
||||
// convenience function to parse from a filename instead of a general
|
||||
// io.Reader.
|
||||
//
|
||||
// The format of the ini file is as follows:
|
||||
//
|
||||
// [Option group name]
|
||||
// option = value
|
||||
//
|
||||
// Each section in the ini file represents an option group or command in the
|
||||
// flags parser. The default flags parser option group (i.e. when using
|
||||
// flags.Parse) is named 'Application Options'. The ini option name is matched
|
||||
// in the following order:
|
||||
//
|
||||
// 1. Compared to the ini-name tag on the option struct field (if present)
|
||||
// 2. Compared to the struct field name
|
||||
// 3. Compared to the option long name (if present)
|
||||
// 4. Compared to the option short name (if present)
|
||||
//
|
||||
// Sections for nested groups and commands can be addressed using a dot `.'
|
||||
// namespacing notation (i.e [subcommand.Options]). Group section names are
|
||||
// matched case insensitive.
|
||||
//
|
||||
// The returned errors can be of the type flags.Error or flags.IniError.
|
||||
func (i *IniParser) Parse(reader io.Reader) error {
|
||||
i.parser.clearIsSet()
|
||||
|
||||
ini, err := readIni(reader, "")
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return i.parse(ini)
|
||||
}
|
||||
|
||||
// WriteFile writes the flags as ini format into a file. See WriteIni
|
||||
// for more information. The returned error occurs when the specified file
|
||||
// could not be opened for writing.
|
||||
func (i *IniParser) WriteFile(filename string, options IniOptions) error {
|
||||
return writeIniToFile(i, filename, options)
|
||||
}
|
||||
|
||||
// Write writes the current values of all the flags to an ini format.
|
||||
// See Parse for more information on the ini file format. You typically
|
||||
// call this only after settings have been parsed since the default values of each
|
||||
// option are stored just before parsing the flags (this is only relevant when
|
||||
// IniIncludeDefaults is _not_ set in options).
|
||||
func (i *IniParser) Write(writer io.Writer, options IniOptions) {
|
||||
writeIni(i, writer, options)
|
||||
}
|
||||
|
||||
func readFullLine(reader *bufio.Reader) (string, error) {
|
||||
var line []byte
|
||||
|
||||
for {
|
||||
l, more, err := reader.ReadLine()
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if line == nil && !more {
|
||||
return string(l), nil
|
||||
}
|
||||
|
||||
line = append(line, l...)
|
||||
|
||||
if !more {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return string(line), nil
|
||||
}
|
||||
|
||||
func optionIniName(option *Option) string {
|
||||
name := option.tag.Get("_read-ini-name")
|
||||
|
||||
if len(name) != 0 {
|
||||
return name
|
||||
}
|
||||
|
||||
name = option.tag.Get("ini-name")
|
||||
|
||||
if len(name) != 0 {
|
||||
return name
|
||||
}
|
||||
|
||||
return option.field.Name
|
||||
}
|
||||
|
||||
func writeGroupIni(cmd *Command, group *Group, namespace string, writer io.Writer, options IniOptions) {
|
||||
var sname string
|
||||
|
||||
if len(namespace) != 0 {
|
||||
sname = namespace
|
||||
}
|
||||
|
||||
if cmd.Group != group && len(group.ShortDescription) != 0 {
|
||||
if len(sname) != 0 {
|
||||
sname += "."
|
||||
}
|
||||
|
||||
sname += group.ShortDescription
|
||||
}
|
||||
|
||||
sectionwritten := false
|
||||
comments := (options & IniIncludeComments) != IniNone
|
||||
|
||||
for _, option := range group.options {
|
||||
if option.isFunc() || option.Hidden {
|
||||
continue
|
||||
}
|
||||
|
||||
if len(option.tag.Get("no-ini")) != 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
val := option.value
|
||||
|
||||
if (options&IniIncludeDefaults) == IniNone && option.valueIsDefault() {
|
||||
continue
|
||||
}
|
||||
|
||||
if !sectionwritten {
|
||||
fmt.Fprintf(writer, "[%s]\n", sname)
|
||||
sectionwritten = true
|
||||
}
|
||||
|
||||
if comments && len(option.Description) != 0 {
|
||||
fmt.Fprintf(writer, "; %s\n", option.Description)
|
||||
}
|
||||
|
||||
oname := optionIniName(option)
|
||||
|
||||
commentOption := (options&(IniIncludeDefaults|IniCommentDefaults)) == IniIncludeDefaults|IniCommentDefaults && option.valueIsDefault()
|
||||
|
||||
kind := val.Type().Kind()
|
||||
switch kind {
|
||||
case reflect.Slice:
|
||||
kind = val.Type().Elem().Kind()
|
||||
|
||||
if val.Len() == 0 {
|
||||
writeOption(writer, oname, kind, "", "", true, option.iniQuote)
|
||||
} else {
|
||||
for idx := 0; idx < val.Len(); idx++ {
|
||||
v, _ := convertToString(val.Index(idx), option.tag)
|
||||
|
||||
writeOption(writer, oname, kind, "", v, commentOption, option.iniQuote)
|
||||
}
|
||||
}
|
||||
case reflect.Map:
|
||||
kind = val.Type().Elem().Kind()
|
||||
|
||||
if val.Len() == 0 {
|
||||
writeOption(writer, oname, kind, "", "", true, option.iniQuote)
|
||||
} else {
|
||||
mkeys := val.MapKeys()
|
||||
keys := make([]string, len(val.MapKeys()))
|
||||
kkmap := make(map[string]reflect.Value)
|
||||
|
||||
for i, k := range mkeys {
|
||||
keys[i], _ = convertToString(k, option.tag)
|
||||
kkmap[keys[i]] = k
|
||||
}
|
||||
|
||||
sort.Strings(keys)
|
||||
|
||||
for _, k := range keys {
|
||||
v, _ := convertToString(val.MapIndex(kkmap[k]), option.tag)
|
||||
|
||||
writeOption(writer, oname, kind, k, v, commentOption, option.iniQuote)
|
||||
}
|
||||
}
|
||||
default:
|
||||
v, _ := convertToString(val, option.tag)
|
||||
|
||||
writeOption(writer, oname, kind, "", v, commentOption, option.iniQuote)
|
||||
}
|
||||
|
||||
if comments {
|
||||
fmt.Fprintln(writer)
|
||||
}
|
||||
}
|
||||
|
||||
if sectionwritten && !comments {
|
||||
fmt.Fprintln(writer)
|
||||
}
|
||||
}
|
||||
|
||||
func writeOption(writer io.Writer, optionName string, optionType reflect.Kind, optionKey string, optionValue string, commentOption bool, forceQuote bool) {
|
||||
if forceQuote || (optionType == reflect.String && !isPrint(optionValue)) {
|
||||
optionValue = strconv.Quote(optionValue)
|
||||
}
|
||||
|
||||
comment := ""
|
||||
if commentOption {
|
||||
comment = "; "
|
||||
}
|
||||
|
||||
fmt.Fprintf(writer, "%s%s =", comment, optionName)
|
||||
|
||||
if optionKey != "" {
|
||||
fmt.Fprintf(writer, " %s:%s", optionKey, optionValue)
|
||||
} else if optionValue != "" {
|
||||
fmt.Fprintf(writer, " %s", optionValue)
|
||||
}
|
||||
|
||||
fmt.Fprintln(writer)
|
||||
}
|
||||
|
||||
func writeCommandIni(command *Command, namespace string, writer io.Writer, options IniOptions) {
|
||||
command.eachGroup(func(group *Group) {
|
||||
if !group.Hidden {
|
||||
writeGroupIni(command, group, namespace, writer, options)
|
||||
}
|
||||
})
|
||||
|
||||
for _, c := range command.commands {
|
||||
var nns string
|
||||
|
||||
if c.Hidden {
|
||||
continue
|
||||
}
|
||||
|
||||
if len(namespace) != 0 {
|
||||
nns = c.Name + "." + nns
|
||||
} else {
|
||||
nns = c.Name
|
||||
}
|
||||
|
||||
writeCommandIni(c, nns, writer, options)
|
||||
}
|
||||
}
|
||||
|
||||
func writeIni(parser *IniParser, writer io.Writer, options IniOptions) {
|
||||
writeCommandIni(parser.parser.Command, "", writer, options)
|
||||
}
|
||||
|
||||
func writeIniToFile(parser *IniParser, filename string, options IniOptions) error {
|
||||
file, err := os.Create(filename)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer file.Close()
|
||||
|
||||
writeIni(parser, file, options)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readIniFromFile(filename string) (*ini, error) {
|
||||
file, err := os.Open(filename)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
defer file.Close()
|
||||
|
||||
return readIni(file, filename)
|
||||
}
|
||||
|
||||
func readIni(contents io.Reader, filename string) (*ini, error) {
|
||||
ret := &ini{
|
||||
File: filename,
|
||||
Sections: make(map[string]iniSection),
|
||||
}
|
||||
|
||||
reader := bufio.NewReader(contents)
|
||||
|
||||
// Empty global section
|
||||
section := make(iniSection, 0, 10)
|
||||
sectionname := ""
|
||||
|
||||
ret.Sections[sectionname] = section
|
||||
|
||||
var lineno uint
|
||||
|
||||
for {
|
||||
line, err := readFullLine(reader)
|
||||
|
||||
if err == io.EOF {
|
||||
break
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
lineno++
|
||||
line = strings.TrimSpace(line)
|
||||
|
||||
// Skip empty lines and lines starting with ; (comments)
|
||||
if len(line) == 0 || line[0] == ';' || line[0] == '#' {
|
||||
continue
|
||||
}
|
||||
|
||||
if line[0] == '[' {
|
||||
if line[0] != '[' || line[len(line)-1] != ']' {
|
||||
return nil, &IniError{
|
||||
Message: "malformed section header",
|
||||
File: filename,
|
||||
LineNumber: lineno,
|
||||
}
|
||||
}
|
||||
|
||||
name := strings.TrimSpace(line[1 : len(line)-1])
|
||||
|
||||
if len(name) == 0 {
|
||||
return nil, &IniError{
|
||||
Message: "empty section name",
|
||||
File: filename,
|
||||
LineNumber: lineno,
|
||||
}
|
||||
}
|
||||
|
||||
sectionname = name
|
||||
section = ret.Sections[name]
|
||||
|
||||
if section == nil {
|
||||
section = make(iniSection, 0, 10)
|
||||
ret.Sections[name] = section
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// Parse option here
|
||||
keyval := strings.SplitN(line, "=", 2)
|
||||
|
||||
if len(keyval) != 2 {
|
||||
return nil, &IniError{
|
||||
Message: fmt.Sprintf("malformed key=value (%s)", line),
|
||||
File: filename,
|
||||
LineNumber: lineno,
|
||||
}
|
||||
}
|
||||
|
||||
name := strings.TrimSpace(keyval[0])
|
||||
value := strings.TrimSpace(keyval[1])
|
||||
quoted := false
|
||||
|
||||
if len(value) != 0 && value[0] == '"' {
|
||||
if v, err := strconv.Unquote(value); err == nil {
|
||||
value = v
|
||||
|
||||
quoted = true
|
||||
} else {
|
||||
return nil, &IniError{
|
||||
Message: err.Error(),
|
||||
File: filename,
|
||||
LineNumber: lineno,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
section = append(section, iniValue{
|
||||
Name: name,
|
||||
Value: value,
|
||||
Quoted: quoted,
|
||||
LineNumber: lineno,
|
||||
})
|
||||
|
||||
ret.Sections[sectionname] = section
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (i *IniParser) matchingGroups(name string) []*Group {
|
||||
if len(name) == 0 {
|
||||
var ret []*Group
|
||||
|
||||
i.parser.eachGroup(func(g *Group) {
|
||||
ret = append(ret, g)
|
||||
})
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
g := i.parser.groupByName(name)
|
||||
|
||||
if g != nil {
|
||||
return []*Group{g}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *IniParser) parse(ini *ini) error {
|
||||
p := i.parser
|
||||
|
||||
var quotesLookup = make(map[*Option]bool)
|
||||
|
||||
for name, section := range ini.Sections {
|
||||
groups := i.matchingGroups(name)
|
||||
|
||||
if len(groups) == 0 {
|
||||
return newErrorf(ErrUnknownGroup, "could not find option group `%s'", name)
|
||||
}
|
||||
|
||||
for _, inival := range section {
|
||||
var opt *Option
|
||||
|
||||
for _, group := range groups {
|
||||
opt = group.optionByName(inival.Name, func(o *Option, n string) bool {
|
||||
return strings.ToLower(o.tag.Get("ini-name")) == strings.ToLower(n)
|
||||
})
|
||||
|
||||
if opt != nil && len(opt.tag.Get("no-ini")) != 0 {
|
||||
opt = nil
|
||||
}
|
||||
|
||||
if opt != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if opt == nil {
|
||||
if (p.Options & IgnoreUnknown) == None {
|
||||
return &IniError{
|
||||
Message: fmt.Sprintf("unknown option: %s", inival.Name),
|
||||
File: ini.File,
|
||||
LineNumber: inival.LineNumber,
|
||||
}
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
pval := &inival.Value
|
||||
|
||||
if !opt.canArgument() && len(inival.Value) == 0 {
|
||||
pval = nil
|
||||
} else {
|
||||
if opt.value.Type().Kind() == reflect.Map {
|
||||
parts := strings.SplitN(inival.Value, ":", 2)
|
||||
|
||||
// only handle unquoting
|
||||
if len(parts) == 2 && parts[1][0] == '"' {
|
||||
if v, err := strconv.Unquote(parts[1]); err == nil {
|
||||
parts[1] = v
|
||||
|
||||
inival.Quoted = true
|
||||
} else {
|
||||
return &IniError{
|
||||
Message: err.Error(),
|
||||
File: ini.File,
|
||||
LineNumber: inival.LineNumber,
|
||||
}
|
||||
}
|
||||
|
||||
s := parts[0] + ":" + parts[1]
|
||||
|
||||
pval = &s
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := opt.set(pval); err != nil {
|
||||
return &IniError{
|
||||
Message: err.Error(),
|
||||
File: ini.File,
|
||||
LineNumber: inival.LineNumber,
|
||||
}
|
||||
}
|
||||
|
||||
// either all INI values are quoted or only values who need quoting
|
||||
if _, ok := quotesLookup[opt]; !inival.Quoted || !ok {
|
||||
quotesLookup[opt] = inival.Quoted
|
||||
}
|
||||
|
||||
opt.tag.Set("_read-ini-name", inival.Name)
|
||||
}
|
||||
}
|
||||
|
||||
for opt, quoted := range quotesLookup {
|
||||
opt.iniQuote = quoted
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
950
vendor/github.com/jessevdk/go-flags/ini_test.go
generated
vendored
Normal file
950
vendor/github.com/jessevdk/go-flags/ini_test.go
generated
vendored
Normal file
@ -0,0 +1,950 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestWriteIni(t *testing.T) {
|
||||
oldEnv := EnvSnapshot()
|
||||
defer oldEnv.Restore()
|
||||
os.Setenv("ENV_DEFAULT", "env-def")
|
||||
|
||||
var tests = []struct {
|
||||
args []string
|
||||
options IniOptions
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
[]string{"-vv", "--intmap=a:2", "--intmap", "b:3", "filename", "0", "3.14", "command"},
|
||||
IniDefault,
|
||||
`[Application Options]
|
||||
; Show verbose debug information
|
||||
verbose = true
|
||||
verbose = true
|
||||
|
||||
; Test env-default1 value
|
||||
EnvDefault1 = env-def
|
||||
|
||||
; Test env-default2 value
|
||||
EnvDefault2 = env-def
|
||||
|
||||
[Other Options]
|
||||
; A map from string to int
|
||||
int-map = a:2
|
||||
int-map = b:3
|
||||
|
||||
`,
|
||||
},
|
||||
{
|
||||
[]string{"-vv", "--intmap=a:2", "--intmap", "b:3", "filename", "0", "3.14", "command"},
|
||||
IniDefault | IniIncludeDefaults,
|
||||
`[Application Options]
|
||||
; Show verbose debug information
|
||||
verbose = true
|
||||
verbose = true
|
||||
|
||||
; A slice of pointers to string
|
||||
; PtrSlice =
|
||||
|
||||
EmptyDescription = false
|
||||
|
||||
; Test default value
|
||||
Default = "Some\nvalue"
|
||||
|
||||
; Test default array value
|
||||
DefaultArray = Some value
|
||||
DefaultArray = "Other\tvalue"
|
||||
|
||||
; Testdefault map value
|
||||
DefaultMap = another:value
|
||||
DefaultMap = some:value
|
||||
|
||||
; Test env-default1 value
|
||||
EnvDefault1 = env-def
|
||||
|
||||
; Test env-default2 value
|
||||
EnvDefault2 = env-def
|
||||
|
||||
; Option with named argument
|
||||
OptionWithArgName =
|
||||
|
||||
; Option with choices
|
||||
OptionWithChoices =
|
||||
|
||||
; Option only available in ini
|
||||
only-ini =
|
||||
|
||||
[Other Options]
|
||||
; A slice of strings
|
||||
StringSlice = some
|
||||
StringSlice = value
|
||||
|
||||
; A map from string to int
|
||||
int-map = a:2
|
||||
int-map = b:3
|
||||
|
||||
[Subgroup]
|
||||
; This is a subgroup option
|
||||
Opt =
|
||||
|
||||
[Subsubgroup]
|
||||
; This is a subsubgroup option
|
||||
Opt =
|
||||
|
||||
[command]
|
||||
; Use for extra verbosity
|
||||
; ExtraVerbose =
|
||||
|
||||
`,
|
||||
},
|
||||
{
|
||||
[]string{"filename", "0", "3.14", "command"},
|
||||
IniDefault | IniIncludeDefaults | IniCommentDefaults,
|
||||
`[Application Options]
|
||||
; Show verbose debug information
|
||||
; verbose =
|
||||
|
||||
; A slice of pointers to string
|
||||
; PtrSlice =
|
||||
|
||||
; EmptyDescription = false
|
||||
|
||||
; Test default value
|
||||
; Default = "Some\nvalue"
|
||||
|
||||
; Test default array value
|
||||
; DefaultArray = Some value
|
||||
; DefaultArray = "Other\tvalue"
|
||||
|
||||
; Testdefault map value
|
||||
; DefaultMap = another:value
|
||||
; DefaultMap = some:value
|
||||
|
||||
; Test env-default1 value
|
||||
EnvDefault1 = env-def
|
||||
|
||||
; Test env-default2 value
|
||||
EnvDefault2 = env-def
|
||||
|
||||
; Option with named argument
|
||||
; OptionWithArgName =
|
||||
|
||||
; Option with choices
|
||||
; OptionWithChoices =
|
||||
|
||||
; Option only available in ini
|
||||
; only-ini =
|
||||
|
||||
[Other Options]
|
||||
; A slice of strings
|
||||
; StringSlice = some
|
||||
; StringSlice = value
|
||||
|
||||
; A map from string to int
|
||||
; int-map = a:1
|
||||
|
||||
[Subgroup]
|
||||
; This is a subgroup option
|
||||
; Opt =
|
||||
|
||||
[Subsubgroup]
|
||||
; This is a subsubgroup option
|
||||
; Opt =
|
||||
|
||||
[command]
|
||||
; Use for extra verbosity
|
||||
; ExtraVerbose =
|
||||
|
||||
`,
|
||||
},
|
||||
{
|
||||
[]string{"--default=New value", "--default-array=New value", "--default-map=new:value", "filename", "0", "3.14", "command"},
|
||||
IniDefault | IniIncludeDefaults | IniCommentDefaults,
|
||||
`[Application Options]
|
||||
; Show verbose debug information
|
||||
; verbose =
|
||||
|
||||
; A slice of pointers to string
|
||||
; PtrSlice =
|
||||
|
||||
; EmptyDescription = false
|
||||
|
||||
; Test default value
|
||||
Default = New value
|
||||
|
||||
; Test default array value
|
||||
DefaultArray = New value
|
||||
|
||||
; Testdefault map value
|
||||
DefaultMap = new:value
|
||||
|
||||
; Test env-default1 value
|
||||
EnvDefault1 = env-def
|
||||
|
||||
; Test env-default2 value
|
||||
EnvDefault2 = env-def
|
||||
|
||||
; Option with named argument
|
||||
; OptionWithArgName =
|
||||
|
||||
; Option with choices
|
||||
; OptionWithChoices =
|
||||
|
||||
; Option only available in ini
|
||||
; only-ini =
|
||||
|
||||
[Other Options]
|
||||
; A slice of strings
|
||||
; StringSlice = some
|
||||
; StringSlice = value
|
||||
|
||||
; A map from string to int
|
||||
; int-map = a:1
|
||||
|
||||
[Subgroup]
|
||||
; This is a subgroup option
|
||||
; Opt =
|
||||
|
||||
[Subsubgroup]
|
||||
; This is a subsubgroup option
|
||||
; Opt =
|
||||
|
||||
[command]
|
||||
; Use for extra verbosity
|
||||
; ExtraVerbose =
|
||||
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
var opts helpOptions
|
||||
|
||||
p := NewNamedParser("TestIni", Default)
|
||||
p.AddGroup("Application Options", "The application options", &opts)
|
||||
|
||||
_, err := p.ParseArgs(test.args)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
|
||||
inip := NewIniParser(p)
|
||||
|
||||
var b bytes.Buffer
|
||||
inip.Write(&b, test.options)
|
||||
|
||||
got := b.String()
|
||||
expected := test.expected
|
||||
|
||||
msg := fmt.Sprintf("with arguments %+v and ini options %b", test.args, test.options)
|
||||
assertDiff(t, got, expected, msg)
|
||||
}
|
||||
}
|
||||
|
||||
func TestReadIni_flagEquivalent(t *testing.T) {
|
||||
type options struct {
|
||||
Opt1 bool `long:"opt1"`
|
||||
|
||||
Group1 struct {
|
||||
Opt2 bool `long:"opt2"`
|
||||
} `group:"group1"`
|
||||
|
||||
Group2 struct {
|
||||
Opt3 bool `long:"opt3"`
|
||||
} `group:"group2" namespace:"ns1"`
|
||||
|
||||
Cmd1 struct {
|
||||
Opt4 bool `long:"opt4"`
|
||||
Opt5 bool `long:"foo.opt5"`
|
||||
|
||||
Group1 struct {
|
||||
Opt6 bool `long:"opt6"`
|
||||
Opt7 bool `long:"foo.opt7"`
|
||||
} `group:"group1"`
|
||||
|
||||
Group2 struct {
|
||||
Opt8 bool `long:"opt8"`
|
||||
} `group:"group2" namespace:"ns1"`
|
||||
} `command:"cmd1"`
|
||||
}
|
||||
|
||||
a := `
|
||||
opt1=true
|
||||
|
||||
[group1]
|
||||
opt2=true
|
||||
|
||||
[group2]
|
||||
ns1.opt3=true
|
||||
|
||||
[cmd1]
|
||||
opt4=true
|
||||
foo.opt5=true
|
||||
|
||||
[cmd1.group1]
|
||||
opt6=true
|
||||
foo.opt7=true
|
||||
|
||||
[cmd1.group2]
|
||||
ns1.opt8=true
|
||||
`
|
||||
b := `
|
||||
opt1=true
|
||||
opt2=true
|
||||
ns1.opt3=true
|
||||
|
||||
[cmd1]
|
||||
opt4=true
|
||||
foo.opt5=true
|
||||
opt6=true
|
||||
foo.opt7=true
|
||||
ns1.opt8=true
|
||||
`
|
||||
|
||||
parse := func(readIni string) (opts options, writeIni string) {
|
||||
p := NewNamedParser("TestIni", Default)
|
||||
p.AddGroup("Application Options", "The application options", &opts)
|
||||
|
||||
inip := NewIniParser(p)
|
||||
err := inip.Parse(strings.NewReader(readIni))
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %s\n\nFile:\n%s", err, readIni)
|
||||
}
|
||||
|
||||
var b bytes.Buffer
|
||||
inip.Write(&b, Default)
|
||||
|
||||
return opts, b.String()
|
||||
}
|
||||
|
||||
aOpt, aIni := parse(a)
|
||||
bOpt, bIni := parse(b)
|
||||
|
||||
assertDiff(t, aIni, bIni, "")
|
||||
if !reflect.DeepEqual(aOpt, bOpt) {
|
||||
t.Errorf("not equal")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReadIni(t *testing.T) {
|
||||
var opts helpOptions
|
||||
|
||||
p := NewNamedParser("TestIni", Default)
|
||||
p.AddGroup("Application Options", "The application options", &opts)
|
||||
|
||||
inip := NewIniParser(p)
|
||||
|
||||
inic := `
|
||||
; Show verbose debug information
|
||||
verbose = true
|
||||
verbose = true
|
||||
|
||||
DefaultMap = another:"value\n1"
|
||||
DefaultMap = some:value 2
|
||||
|
||||
[Application Options]
|
||||
; A slice of pointers to string
|
||||
; PtrSlice =
|
||||
|
||||
; Test default value
|
||||
Default = "New\nvalue"
|
||||
|
||||
; Test env-default1 value
|
||||
EnvDefault1 = New value
|
||||
|
||||
[Other Options]
|
||||
# A slice of strings
|
||||
StringSlice = "some\nvalue"
|
||||
StringSlice = another value
|
||||
|
||||
; A map from string to int
|
||||
int-map = a:2
|
||||
int-map = b:3
|
||||
|
||||
`
|
||||
|
||||
b := strings.NewReader(inic)
|
||||
err := inip.Parse(b)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %s", err)
|
||||
}
|
||||
|
||||
assertBoolArray(t, opts.Verbose, []bool{true, true})
|
||||
|
||||
if v := map[string]string{"another": "value\n1", "some": "value 2"}; !reflect.DeepEqual(opts.DefaultMap, v) {
|
||||
t.Fatalf("Expected %#v for DefaultMap but got %#v", v, opts.DefaultMap)
|
||||
}
|
||||
|
||||
assertString(t, opts.Default, "New\nvalue")
|
||||
|
||||
assertString(t, opts.EnvDefault1, "New value")
|
||||
|
||||
assertStringArray(t, opts.Other.StringSlice, []string{"some\nvalue", "another value"})
|
||||
|
||||
if v, ok := opts.Other.IntMap["a"]; !ok {
|
||||
t.Errorf("Expected \"a\" in Other.IntMap")
|
||||
} else if v != 2 {
|
||||
t.Errorf("Expected Other.IntMap[\"a\"] = 2, but got %v", v)
|
||||
}
|
||||
|
||||
if v, ok := opts.Other.IntMap["b"]; !ok {
|
||||
t.Errorf("Expected \"b\" in Other.IntMap")
|
||||
} else if v != 3 {
|
||||
t.Errorf("Expected Other.IntMap[\"b\"] = 3, but got %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestReadAndWriteIni(t *testing.T) {
|
||||
var tests = []struct {
|
||||
options IniOptions
|
||||
read string
|
||||
write string
|
||||
}{
|
||||
{
|
||||
IniIncludeComments,
|
||||
`[Application Options]
|
||||
; Show verbose debug information
|
||||
verbose = true
|
||||
verbose = true
|
||||
|
||||
; Test default value
|
||||
Default = "quote me"
|
||||
|
||||
; Test default array value
|
||||
DefaultArray = 1
|
||||
DefaultArray = "2"
|
||||
DefaultArray = 3
|
||||
|
||||
; Testdefault map value
|
||||
; DefaultMap =
|
||||
|
||||
; Test env-default1 value
|
||||
EnvDefault1 = env-def
|
||||
|
||||
; Test env-default2 value
|
||||
EnvDefault2 = env-def
|
||||
|
||||
[Other Options]
|
||||
; A slice of strings
|
||||
; StringSlice =
|
||||
|
||||
; A map from string to int
|
||||
int-map = a:2
|
||||
int-map = b:"3"
|
||||
|
||||
`,
|
||||
`[Application Options]
|
||||
; Show verbose debug information
|
||||
verbose = true
|
||||
verbose = true
|
||||
|
||||
; Test default value
|
||||
Default = "quote me"
|
||||
|
||||
; Test default array value
|
||||
DefaultArray = 1
|
||||
DefaultArray = 2
|
||||
DefaultArray = 3
|
||||
|
||||
; Testdefault map value
|
||||
; DefaultMap =
|
||||
|
||||
; Test env-default1 value
|
||||
EnvDefault1 = env-def
|
||||
|
||||
; Test env-default2 value
|
||||
EnvDefault2 = env-def
|
||||
|
||||
[Other Options]
|
||||
; A slice of strings
|
||||
; StringSlice =
|
||||
|
||||
; A map from string to int
|
||||
int-map = a:2
|
||||
int-map = b:3
|
||||
|
||||
`,
|
||||
},
|
||||
{
|
||||
IniIncludeComments,
|
||||
`[Application Options]
|
||||
; Show verbose debug information
|
||||
verbose = true
|
||||
verbose = true
|
||||
|
||||
; Test default value
|
||||
Default = "quote me"
|
||||
|
||||
; Test default array value
|
||||
DefaultArray = "1"
|
||||
DefaultArray = "2"
|
||||
DefaultArray = "3"
|
||||
|
||||
; Testdefault map value
|
||||
; DefaultMap =
|
||||
|
||||
; Test env-default1 value
|
||||
EnvDefault1 = env-def
|
||||
|
||||
; Test env-default2 value
|
||||
EnvDefault2 = env-def
|
||||
|
||||
[Other Options]
|
||||
; A slice of strings
|
||||
; StringSlice =
|
||||
|
||||
; A map from string to int
|
||||
int-map = a:"2"
|
||||
int-map = b:"3"
|
||||
|
||||
`,
|
||||
`[Application Options]
|
||||
; Show verbose debug information
|
||||
verbose = true
|
||||
verbose = true
|
||||
|
||||
; Test default value
|
||||
Default = "quote me"
|
||||
|
||||
; Test default array value
|
||||
DefaultArray = "1"
|
||||
DefaultArray = "2"
|
||||
DefaultArray = "3"
|
||||
|
||||
; Testdefault map value
|
||||
; DefaultMap =
|
||||
|
||||
; Test env-default1 value
|
||||
EnvDefault1 = env-def
|
||||
|
||||
; Test env-default2 value
|
||||
EnvDefault2 = env-def
|
||||
|
||||
[Other Options]
|
||||
; A slice of strings
|
||||
; StringSlice =
|
||||
|
||||
; A map from string to int
|
||||
int-map = a:"2"
|
||||
int-map = b:"3"
|
||||
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
var opts helpOptions
|
||||
|
||||
p := NewNamedParser("TestIni", Default)
|
||||
p.AddGroup("Application Options", "The application options", &opts)
|
||||
|
||||
inip := NewIniParser(p)
|
||||
|
||||
read := strings.NewReader(test.read)
|
||||
err := inip.Parse(read)
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %s", err)
|
||||
}
|
||||
|
||||
var write bytes.Buffer
|
||||
inip.Write(&write, test.options)
|
||||
|
||||
got := write.String()
|
||||
|
||||
msg := fmt.Sprintf("with ini options %b", test.options)
|
||||
assertDiff(t, got, test.write, msg)
|
||||
}
|
||||
}
|
||||
|
||||
func TestReadIniWrongQuoting(t *testing.T) {
|
||||
var tests = []struct {
|
||||
iniFile string
|
||||
lineNumber uint
|
||||
}{
|
||||
{
|
||||
iniFile: `Default = "New\nvalue`,
|
||||
lineNumber: 1,
|
||||
},
|
||||
{
|
||||
iniFile: `StringSlice = "New\nvalue`,
|
||||
lineNumber: 1,
|
||||
},
|
||||
{
|
||||
iniFile: `StringSlice = "New\nvalue"
|
||||
StringSlice = "Second\nvalue`,
|
||||
lineNumber: 2,
|
||||
},
|
||||
{
|
||||
iniFile: `DefaultMap = some:"value`,
|
||||
lineNumber: 1,
|
||||
},
|
||||
{
|
||||
iniFile: `DefaultMap = some:value
|
||||
DefaultMap = another:"value`,
|
||||
lineNumber: 2,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
var opts helpOptions
|
||||
|
||||
p := NewNamedParser("TestIni", Default)
|
||||
p.AddGroup("Application Options", "The application options", &opts)
|
||||
|
||||
inip := NewIniParser(p)
|
||||
|
||||
inic := test.iniFile
|
||||
|
||||
b := strings.NewReader(inic)
|
||||
err := inip.Parse(b)
|
||||
|
||||
if err == nil {
|
||||
t.Fatalf("Expect error")
|
||||
}
|
||||
|
||||
iniError := err.(*IniError)
|
||||
|
||||
if iniError.LineNumber != test.lineNumber {
|
||||
t.Fatalf("Expect error on line %d", test.lineNumber)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIniCommands(t *testing.T) {
|
||||
var opts struct {
|
||||
Value string `short:"v" long:"value"`
|
||||
|
||||
Add struct {
|
||||
Name int `short:"n" long:"name" ini-name:"AliasName"`
|
||||
|
||||
Other struct {
|
||||
O string `short:"o" long:"other"`
|
||||
} `group:"Other Options"`
|
||||
} `command:"add"`
|
||||
}
|
||||
|
||||
p := NewNamedParser("TestIni", Default)
|
||||
p.AddGroup("Application Options", "The application options", &opts)
|
||||
|
||||
inip := NewIniParser(p)
|
||||
|
||||
inic := `[Application Options]
|
||||
value = some value
|
||||
|
||||
[add]
|
||||
AliasName = 5
|
||||
|
||||
[add.Other Options]
|
||||
other = subgroup
|
||||
|
||||
`
|
||||
|
||||
b := strings.NewReader(inic)
|
||||
err := inip.Parse(b)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %s", err)
|
||||
}
|
||||
|
||||
assertString(t, opts.Value, "some value")
|
||||
|
||||
if opts.Add.Name != 5 {
|
||||
t.Errorf("Expected opts.Add.Name to be 5, but got %v", opts.Add.Name)
|
||||
}
|
||||
|
||||
assertString(t, opts.Add.Other.O, "subgroup")
|
||||
|
||||
// Test writing it back
|
||||
buf := &bytes.Buffer{}
|
||||
|
||||
inip.Write(buf, IniDefault)
|
||||
|
||||
assertDiff(t, buf.String(), inic, "ini contents")
|
||||
}
|
||||
|
||||
func TestIniNoIni(t *testing.T) {
|
||||
var opts struct {
|
||||
NoValue string `short:"n" long:"novalue" no-ini:"yes"`
|
||||
Value string `short:"v" long:"value"`
|
||||
}
|
||||
|
||||
p := NewNamedParser("TestIni", Default)
|
||||
p.AddGroup("Application Options", "The application options", &opts)
|
||||
|
||||
inip := NewIniParser(p)
|
||||
|
||||
// read INI
|
||||
inic := `[Application Options]
|
||||
novalue = some value
|
||||
value = some other value
|
||||
`
|
||||
|
||||
b := strings.NewReader(inic)
|
||||
err := inip.Parse(b)
|
||||
|
||||
if err == nil {
|
||||
t.Fatalf("Expected error")
|
||||
}
|
||||
|
||||
iniError := err.(*IniError)
|
||||
|
||||
if v := uint(2); iniError.LineNumber != v {
|
||||
t.Errorf("Expected opts.Add.Name to be %d, but got %d", v, iniError.LineNumber)
|
||||
}
|
||||
|
||||
if v := "unknown option: novalue"; iniError.Message != v {
|
||||
t.Errorf("Expected opts.Add.Name to be %s, but got %s", v, iniError.Message)
|
||||
}
|
||||
|
||||
// write INI
|
||||
opts.NoValue = "some value"
|
||||
opts.Value = "some other value"
|
||||
|
||||
file, err := ioutil.TempFile("", "")
|
||||
if err != nil {
|
||||
t.Fatalf("Cannot create temporary file: %s", err)
|
||||
}
|
||||
defer os.Remove(file.Name())
|
||||
|
||||
err = inip.WriteFile(file.Name(), IniIncludeDefaults)
|
||||
if err != nil {
|
||||
t.Fatalf("Could not write ini file: %s", err)
|
||||
}
|
||||
|
||||
found, err := ioutil.ReadFile(file.Name())
|
||||
if err != nil {
|
||||
t.Fatalf("Could not read written ini file: %s", err)
|
||||
}
|
||||
|
||||
expected := "[Application Options]\nValue = some other value\n\n"
|
||||
|
||||
assertDiff(t, string(found), expected, "ini content")
|
||||
}
|
||||
|
||||
func TestIniParse(t *testing.T) {
|
||||
file, err := ioutil.TempFile("", "")
|
||||
if err != nil {
|
||||
t.Fatalf("Cannot create temporary file: %s", err)
|
||||
}
|
||||
defer os.Remove(file.Name())
|
||||
|
||||
_, err = file.WriteString("value = 123")
|
||||
if err != nil {
|
||||
t.Fatalf("Cannot write to temporary file: %s", err)
|
||||
}
|
||||
|
||||
file.Close()
|
||||
|
||||
var opts struct {
|
||||
Value int `long:"value"`
|
||||
}
|
||||
|
||||
err = IniParse(file.Name(), &opts)
|
||||
if err != nil {
|
||||
t.Fatalf("Could not parse ini: %s", err)
|
||||
}
|
||||
|
||||
if opts.Value != 123 {
|
||||
t.Fatalf("Expected Value to be \"123\" but was \"%d\"", opts.Value)
|
||||
}
|
||||
}
|
||||
|
||||
func TestIniCliOverrides(t *testing.T) {
|
||||
file, err := ioutil.TempFile("", "")
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Cannot create temporary file: %s", err)
|
||||
}
|
||||
|
||||
defer os.Remove(file.Name())
|
||||
|
||||
_, err = file.WriteString("values = 123\n")
|
||||
_, err = file.WriteString("values = 456\n")
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Cannot write to temporary file: %s", err)
|
||||
}
|
||||
|
||||
file.Close()
|
||||
|
||||
var opts struct {
|
||||
Values []int `long:"values"`
|
||||
}
|
||||
|
||||
p := NewParser(&opts, Default)
|
||||
err = NewIniParser(p).ParseFile(file.Name())
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Could not parse ini: %s", err)
|
||||
}
|
||||
|
||||
_, err = p.ParseArgs([]string{"--values", "111", "--values", "222"})
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to parse arguments: %s", err)
|
||||
}
|
||||
|
||||
if len(opts.Values) != 2 {
|
||||
t.Fatalf("Expected Values to contain two elements, but got %d", len(opts.Values))
|
||||
}
|
||||
|
||||
if opts.Values[0] != 111 {
|
||||
t.Fatalf("Expected Values[0] to be 111, but got '%d'", opts.Values[0])
|
||||
}
|
||||
|
||||
if opts.Values[1] != 222 {
|
||||
t.Fatalf("Expected Values[0] to be 222, but got '%d'", opts.Values[1])
|
||||
}
|
||||
}
|
||||
|
||||
func TestIniOverrides(t *testing.T) {
|
||||
file, err := ioutil.TempFile("", "")
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Cannot create temporary file: %s", err)
|
||||
}
|
||||
|
||||
defer os.Remove(file.Name())
|
||||
|
||||
_, err = file.WriteString("value-with-default = \"ini-value\"\n")
|
||||
_, err = file.WriteString("value-with-default-override-cli = \"ini-value\"\n")
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Cannot write to temporary file: %s", err)
|
||||
}
|
||||
|
||||
file.Close()
|
||||
|
||||
var opts struct {
|
||||
ValueWithDefault string `long:"value-with-default" default:"value"`
|
||||
ValueWithDefaultOverrideCli string `long:"value-with-default-override-cli" default:"value"`
|
||||
}
|
||||
|
||||
p := NewParser(&opts, Default)
|
||||
err = NewIniParser(p).ParseFile(file.Name())
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Could not parse ini: %s", err)
|
||||
}
|
||||
|
||||
_, err = p.ParseArgs([]string{"--value-with-default-override-cli", "cli-value"})
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to parse arguments: %s", err)
|
||||
}
|
||||
|
||||
assertString(t, opts.ValueWithDefault, "ini-value")
|
||||
assertString(t, opts.ValueWithDefaultOverrideCli, "cli-value")
|
||||
}
|
||||
|
||||
func TestWriteFile(t *testing.T) {
|
||||
file, err := ioutil.TempFile("", "")
|
||||
if err != nil {
|
||||
t.Fatalf("Cannot create temporary file: %s", err)
|
||||
}
|
||||
defer os.Remove(file.Name())
|
||||
|
||||
var opts struct {
|
||||
Value int `long:"value"`
|
||||
}
|
||||
|
||||
opts.Value = 123
|
||||
|
||||
p := NewParser(&opts, Default)
|
||||
ini := NewIniParser(p)
|
||||
|
||||
err = ini.WriteFile(file.Name(), IniIncludeDefaults)
|
||||
if err != nil {
|
||||
t.Fatalf("Could not write ini file: %s", err)
|
||||
}
|
||||
|
||||
found, err := ioutil.ReadFile(file.Name())
|
||||
if err != nil {
|
||||
t.Fatalf("Could not read written ini file: %s", err)
|
||||
}
|
||||
|
||||
expected := "[Application Options]\nValue = 123\n\n"
|
||||
|
||||
assertDiff(t, string(found), expected, "ini content")
|
||||
}
|
||||
|
||||
func TestOverwriteRequiredOptions(t *testing.T) {
|
||||
var tests = []struct {
|
||||
args []string
|
||||
expected []string
|
||||
}{
|
||||
{
|
||||
args: []string{"--value", "from CLI"},
|
||||
expected: []string{
|
||||
"from CLI",
|
||||
"from default",
|
||||
},
|
||||
},
|
||||
{
|
||||
args: []string{"--value", "from CLI", "--default", "from CLI"},
|
||||
expected: []string{
|
||||
"from CLI",
|
||||
"from CLI",
|
||||
},
|
||||
},
|
||||
{
|
||||
args: []string{"--config", "no file name"},
|
||||
expected: []string{
|
||||
"from INI",
|
||||
"from INI",
|
||||
},
|
||||
},
|
||||
{
|
||||
args: []string{"--value", "from CLI before", "--default", "from CLI before", "--config", "no file name"},
|
||||
expected: []string{
|
||||
"from INI",
|
||||
"from INI",
|
||||
},
|
||||
},
|
||||
{
|
||||
args: []string{"--value", "from CLI before", "--default", "from CLI before", "--config", "no file name", "--value", "from CLI after", "--default", "from CLI after"},
|
||||
expected: []string{
|
||||
"from CLI after",
|
||||
"from CLI after",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
var opts struct {
|
||||
Config func(s string) error `long:"config" no-ini:"true"`
|
||||
Value string `long:"value" required:"true"`
|
||||
Default string `long:"default" required:"true" default:"from default"`
|
||||
}
|
||||
|
||||
p := NewParser(&opts, Default)
|
||||
|
||||
opts.Config = func(s string) error {
|
||||
ini := NewIniParser(p)
|
||||
|
||||
return ini.Parse(bytes.NewBufferString("value = from INI\ndefault = from INI"))
|
||||
}
|
||||
|
||||
_, err := p.ParseArgs(test.args)
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error %s with args %+v", err, test.args)
|
||||
}
|
||||
|
||||
if opts.Value != test.expected[0] {
|
||||
t.Fatalf("Expected Value to be \"%s\" but was \"%s\" with args %+v", test.expected[0], opts.Value, test.args)
|
||||
}
|
||||
|
||||
if opts.Default != test.expected[1] {
|
||||
t.Fatalf("Expected Default to be \"%s\" but was \"%s\" with args %+v", test.expected[1], opts.Default, test.args)
|
||||
}
|
||||
}
|
||||
}
|
85
vendor/github.com/jessevdk/go-flags/long_test.go
generated
vendored
Normal file
85
vendor/github.com/jessevdk/go-flags/long_test.go
generated
vendored
Normal file
@ -0,0 +1,85 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestLong(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `long:"value"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "--value")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
|
||||
if !opts.Value {
|
||||
t.Errorf("Expected Value to be true")
|
||||
}
|
||||
}
|
||||
|
||||
func TestLongArg(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value string `long:"value"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "--value", "value")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
assertString(t, opts.Value, "value")
|
||||
}
|
||||
|
||||
func TestLongArgEqual(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value string `long:"value"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "--value=value")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
assertString(t, opts.Value, "value")
|
||||
}
|
||||
|
||||
func TestLongDefault(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value string `long:"value" default:"value"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts)
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
assertString(t, opts.Value, "value")
|
||||
}
|
||||
|
||||
func TestLongOptional(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value string `long:"value" optional:"yes" optional-value:"value"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "--value")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
assertString(t, opts.Value, "value")
|
||||
}
|
||||
|
||||
func TestLongOptionalArg(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value string `long:"value" optional:"yes" optional-value:"value"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "--value", "no")
|
||||
|
||||
assertStringArray(t, ret, []string{"no"})
|
||||
assertString(t, opts.Value, "value")
|
||||
}
|
||||
|
||||
func TestLongOptionalArgEqual(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value string `long:"value" optional:"yes" optional-value:"value"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "--value=value", "no")
|
||||
|
||||
assertStringArray(t, ret, []string{"no"})
|
||||
assertString(t, opts.Value, "value")
|
||||
}
|
194
vendor/github.com/jessevdk/go-flags/man.go
generated
vendored
Normal file
194
vendor/github.com/jessevdk/go-flags/man.go
generated
vendored
Normal file
@ -0,0 +1,194 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"runtime"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
func manQuote(s string) string {
|
||||
return strings.Replace(s, "\\", "\\\\", -1)
|
||||
}
|
||||
|
||||
func formatForMan(wr io.Writer, s string) {
|
||||
for {
|
||||
idx := strings.IndexRune(s, '`')
|
||||
|
||||
if idx < 0 {
|
||||
fmt.Fprintf(wr, "%s", manQuote(s))
|
||||
break
|
||||
}
|
||||
|
||||
fmt.Fprintf(wr, "%s", manQuote(s[:idx]))
|
||||
|
||||
s = s[idx+1:]
|
||||
idx = strings.IndexRune(s, '\'')
|
||||
|
||||
if idx < 0 {
|
||||
fmt.Fprintf(wr, "%s", manQuote(s))
|
||||
break
|
||||
}
|
||||
|
||||
fmt.Fprintf(wr, "\\fB%s\\fP", manQuote(s[:idx]))
|
||||
s = s[idx+1:]
|
||||
}
|
||||
}
|
||||
|
||||
func writeManPageOptions(wr io.Writer, grp *Group) {
|
||||
grp.eachGroup(func(group *Group) {
|
||||
if group.Hidden {
|
||||
return
|
||||
}
|
||||
|
||||
for _, opt := range group.options {
|
||||
if !opt.canCli() || opt.Hidden {
|
||||
continue
|
||||
}
|
||||
|
||||
fmt.Fprintln(wr, ".TP")
|
||||
fmt.Fprintf(wr, "\\fB")
|
||||
|
||||
if opt.ShortName != 0 {
|
||||
fmt.Fprintf(wr, "\\fB\\-%c\\fR", opt.ShortName)
|
||||
}
|
||||
|
||||
if len(opt.LongName) != 0 {
|
||||
if opt.ShortName != 0 {
|
||||
fmt.Fprintf(wr, ", ")
|
||||
}
|
||||
|
||||
fmt.Fprintf(wr, "\\fB\\-\\-%s\\fR", manQuote(opt.LongNameWithNamespace()))
|
||||
}
|
||||
|
||||
if len(opt.ValueName) != 0 || opt.OptionalArgument {
|
||||
if opt.OptionalArgument {
|
||||
fmt.Fprintf(wr, " [\\fI%s=%s\\fR]", manQuote(opt.ValueName), manQuote(strings.Join(quoteV(opt.OptionalValue), ", ")))
|
||||
} else {
|
||||
fmt.Fprintf(wr, " \\fI%s\\fR", manQuote(opt.ValueName))
|
||||
}
|
||||
}
|
||||
|
||||
if len(opt.Default) != 0 {
|
||||
fmt.Fprintf(wr, " <default: \\fI%s\\fR>", manQuote(strings.Join(quoteV(opt.Default), ", ")))
|
||||
} else if len(opt.EnvDefaultKey) != 0 {
|
||||
if runtime.GOOS == "windows" {
|
||||
fmt.Fprintf(wr, " <default: \\fI%%%s%%\\fR>", manQuote(opt.EnvDefaultKey))
|
||||
} else {
|
||||
fmt.Fprintf(wr, " <default: \\fI$%s\\fR>", manQuote(opt.EnvDefaultKey))
|
||||
}
|
||||
}
|
||||
|
||||
if opt.Required {
|
||||
fmt.Fprintf(wr, " (\\fIrequired\\fR)")
|
||||
}
|
||||
|
||||
fmt.Fprintln(wr, "\\fP")
|
||||
|
||||
if len(opt.Description) != 0 {
|
||||
formatForMan(wr, opt.Description)
|
||||
fmt.Fprintln(wr, "")
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func writeManPageSubcommands(wr io.Writer, name string, root *Command) {
|
||||
commands := root.sortedVisibleCommands()
|
||||
|
||||
for _, c := range commands {
|
||||
var nn string
|
||||
|
||||
if c.Hidden {
|
||||
continue
|
||||
}
|
||||
|
||||
if len(name) != 0 {
|
||||
nn = name + " " + c.Name
|
||||
} else {
|
||||
nn = c.Name
|
||||
}
|
||||
|
||||
writeManPageCommand(wr, nn, root, c)
|
||||
}
|
||||
}
|
||||
|
||||
func writeManPageCommand(wr io.Writer, name string, root *Command, command *Command) {
|
||||
fmt.Fprintf(wr, ".SS %s\n", name)
|
||||
fmt.Fprintln(wr, command.ShortDescription)
|
||||
|
||||
if len(command.LongDescription) > 0 {
|
||||
fmt.Fprintln(wr, "")
|
||||
|
||||
cmdstart := fmt.Sprintf("The %s command", manQuote(command.Name))
|
||||
|
||||
if strings.HasPrefix(command.LongDescription, cmdstart) {
|
||||
fmt.Fprintf(wr, "The \\fI%s\\fP command", manQuote(command.Name))
|
||||
|
||||
formatForMan(wr, command.LongDescription[len(cmdstart):])
|
||||
fmt.Fprintln(wr, "")
|
||||
} else {
|
||||
formatForMan(wr, command.LongDescription)
|
||||
fmt.Fprintln(wr, "")
|
||||
}
|
||||
}
|
||||
|
||||
var usage string
|
||||
if us, ok := command.data.(Usage); ok {
|
||||
usage = us.Usage()
|
||||
} else if command.hasCliOptions() {
|
||||
usage = fmt.Sprintf("[%s-OPTIONS]", command.Name)
|
||||
}
|
||||
|
||||
var pre string
|
||||
if root.hasCliOptions() {
|
||||
pre = fmt.Sprintf("%s [OPTIONS] %s", root.Name, command.Name)
|
||||
} else {
|
||||
pre = fmt.Sprintf("%s %s", root.Name, command.Name)
|
||||
}
|
||||
|
||||
if len(usage) > 0 {
|
||||
fmt.Fprintf(wr, "\n\\fBUsage\\fP: %s %s\n.TP\n", manQuote(pre), manQuote(usage))
|
||||
}
|
||||
|
||||
if len(command.Aliases) > 0 {
|
||||
fmt.Fprintf(wr, "\n\\fBAliases\\fP: %s\n\n", manQuote(strings.Join(command.Aliases, ", ")))
|
||||
}
|
||||
|
||||
writeManPageOptions(wr, command.Group)
|
||||
writeManPageSubcommands(wr, name, command)
|
||||
}
|
||||
|
||||
// WriteManPage writes a basic man page in groff format to the specified
|
||||
// writer.
|
||||
func (p *Parser) WriteManPage(wr io.Writer) {
|
||||
t := time.Now()
|
||||
|
||||
fmt.Fprintf(wr, ".TH %s 1 \"%s\"\n", manQuote(p.Name), t.Format("2 January 2006"))
|
||||
fmt.Fprintln(wr, ".SH NAME")
|
||||
fmt.Fprintf(wr, "%s \\- %s\n", manQuote(p.Name), manQuote(p.ShortDescription))
|
||||
fmt.Fprintln(wr, ".SH SYNOPSIS")
|
||||
|
||||
usage := p.Usage
|
||||
|
||||
if len(usage) == 0 {
|
||||
usage = "[OPTIONS]"
|
||||
}
|
||||
|
||||
fmt.Fprintf(wr, "\\fB%s\\fP %s\n", manQuote(p.Name), manQuote(usage))
|
||||
fmt.Fprintln(wr, ".SH DESCRIPTION")
|
||||
|
||||
formatForMan(wr, p.LongDescription)
|
||||
fmt.Fprintln(wr, "")
|
||||
|
||||
fmt.Fprintln(wr, ".SH OPTIONS")
|
||||
|
||||
writeManPageOptions(wr, p.Command.Group)
|
||||
|
||||
if len(p.visibleCommands()) > 0 {
|
||||
fmt.Fprintln(wr, ".SH COMMANDS")
|
||||
|
||||
writeManPageSubcommands(wr, "", p.Command)
|
||||
}
|
||||
}
|
97
vendor/github.com/jessevdk/go-flags/marshal_test.go
generated
vendored
Normal file
97
vendor/github.com/jessevdk/go-flags/marshal_test.go
generated
vendored
Normal file
@ -0,0 +1,97 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type marshalled string
|
||||
|
||||
func (m *marshalled) UnmarshalFlag(value string) error {
|
||||
if value == "yes" {
|
||||
*m = "true"
|
||||
} else if value == "no" {
|
||||
*m = "false"
|
||||
} else {
|
||||
return fmt.Errorf("`%s' is not a valid value, please specify `yes' or `no'", value)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m marshalled) MarshalFlag() (string, error) {
|
||||
if m == "true" {
|
||||
return "yes", nil
|
||||
}
|
||||
|
||||
return "no", nil
|
||||
}
|
||||
|
||||
type marshalledError bool
|
||||
|
||||
func (m marshalledError) MarshalFlag() (string, error) {
|
||||
return "", newErrorf(ErrMarshal, "Failed to marshal")
|
||||
}
|
||||
|
||||
func TestUnmarshal(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value marshalled `short:"v"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "-v=yes")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
|
||||
if opts.Value != "true" {
|
||||
t.Errorf("Expected Value to be \"true\"")
|
||||
}
|
||||
}
|
||||
|
||||
func TestUnmarshalDefault(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value marshalled `short:"v" default:"yes"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts)
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
|
||||
if opts.Value != "true" {
|
||||
t.Errorf("Expected Value to be \"true\"")
|
||||
}
|
||||
}
|
||||
|
||||
func TestUnmarshalOptional(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value marshalled `short:"v" optional:"yes" optional-value:"yes"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "-v")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
|
||||
if opts.Value != "true" {
|
||||
t.Errorf("Expected Value to be \"true\"")
|
||||
}
|
||||
}
|
||||
|
||||
func TestUnmarshalError(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value marshalled `short:"v"`
|
||||
}{}
|
||||
|
||||
assertParseFail(t, ErrMarshal, fmt.Sprintf("invalid argument for flag `%cv' (expected flags.marshalled): `invalid' is not a valid value, please specify `yes' or `no'", defaultShortOptDelimiter), &opts, "-vinvalid")
|
||||
}
|
||||
|
||||
func TestMarshalError(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value marshalledError `short:"v"`
|
||||
}{}
|
||||
|
||||
p := NewParser(&opts, Default)
|
||||
o := p.Command.Groups()[0].Options()[0]
|
||||
|
||||
_, err := convertToString(o.value, o.tag)
|
||||
|
||||
assertError(t, err, ErrMarshal, "Failed to marshal")
|
||||
}
|
140
vendor/github.com/jessevdk/go-flags/multitag.go
generated
vendored
Normal file
140
vendor/github.com/jessevdk/go-flags/multitag.go
generated
vendored
Normal file
@ -0,0 +1,140 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type multiTag struct {
|
||||
value string
|
||||
cache map[string][]string
|
||||
}
|
||||
|
||||
func newMultiTag(v string) multiTag {
|
||||
return multiTag{
|
||||
value: v,
|
||||
}
|
||||
}
|
||||
|
||||
func (x *multiTag) scan() (map[string][]string, error) {
|
||||
v := x.value
|
||||
|
||||
ret := make(map[string][]string)
|
||||
|
||||
// This is mostly copied from reflect.StructTag.Get
|
||||
for v != "" {
|
||||
i := 0
|
||||
|
||||
// Skip whitespace
|
||||
for i < len(v) && v[i] == ' ' {
|
||||
i++
|
||||
}
|
||||
|
||||
v = v[i:]
|
||||
|
||||
if v == "" {
|
||||
break
|
||||
}
|
||||
|
||||
// Scan to colon to find key
|
||||
i = 0
|
||||
|
||||
for i < len(v) && v[i] != ' ' && v[i] != ':' && v[i] != '"' {
|
||||
i++
|
||||
}
|
||||
|
||||
if i >= len(v) {
|
||||
return nil, newErrorf(ErrTag, "expected `:' after key name, but got end of tag (in `%v`)", x.value)
|
||||
}
|
||||
|
||||
if v[i] != ':' {
|
||||
return nil, newErrorf(ErrTag, "expected `:' after key name, but got `%v' (in `%v`)", v[i], x.value)
|
||||
}
|
||||
|
||||
if i+1 >= len(v) {
|
||||
return nil, newErrorf(ErrTag, "expected `\"' to start tag value at end of tag (in `%v`)", x.value)
|
||||
}
|
||||
|
||||
if v[i+1] != '"' {
|
||||
return nil, newErrorf(ErrTag, "expected `\"' to start tag value, but got `%v' (in `%v`)", v[i+1], x.value)
|
||||
}
|
||||
|
||||
name := v[:i]
|
||||
v = v[i+1:]
|
||||
|
||||
// Scan quoted string to find value
|
||||
i = 1
|
||||
|
||||
for i < len(v) && v[i] != '"' {
|
||||
if v[i] == '\n' {
|
||||
return nil, newErrorf(ErrTag, "unexpected newline in tag value `%v' (in `%v`)", name, x.value)
|
||||
}
|
||||
|
||||
if v[i] == '\\' {
|
||||
i++
|
||||
}
|
||||
i++
|
||||
}
|
||||
|
||||
if i >= len(v) {
|
||||
return nil, newErrorf(ErrTag, "expected end of tag value `\"' at end of tag (in `%v`)", x.value)
|
||||
}
|
||||
|
||||
val, err := strconv.Unquote(v[:i+1])
|
||||
|
||||
if err != nil {
|
||||
return nil, newErrorf(ErrTag, "Malformed value of tag `%v:%v` => %v (in `%v`)", name, v[:i+1], err, x.value)
|
||||
}
|
||||
|
||||
v = v[i+1:]
|
||||
|
||||
ret[name] = append(ret[name], val)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (x *multiTag) Parse() error {
|
||||
vals, err := x.scan()
|
||||
x.cache = vals
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (x *multiTag) cached() map[string][]string {
|
||||
if x.cache == nil {
|
||||
cache, _ := x.scan()
|
||||
|
||||
if cache == nil {
|
||||
cache = make(map[string][]string)
|
||||
}
|
||||
|
||||
x.cache = cache
|
||||
}
|
||||
|
||||
return x.cache
|
||||
}
|
||||
|
||||
func (x *multiTag) Get(key string) string {
|
||||
c := x.cached()
|
||||
|
||||
if v, ok := c[key]; ok {
|
||||
return v[len(v)-1]
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *multiTag) GetMany(key string) []string {
|
||||
c := x.cached()
|
||||
return c[key]
|
||||
}
|
||||
|
||||
func (x *multiTag) Set(key string, value string) {
|
||||
c := x.cached()
|
||||
c[key] = []string{value}
|
||||
}
|
||||
|
||||
func (x *multiTag) SetMany(key string, value []string) {
|
||||
c := x.cached()
|
||||
c[key] = value
|
||||
}
|
434
vendor/github.com/jessevdk/go-flags/option.go
generated
vendored
Normal file
434
vendor/github.com/jessevdk/go-flags/option.go
generated
vendored
Normal file
@ -0,0 +1,434 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
"syscall"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// Option flag information. Contains a description of the option, short and
|
||||
// long name as well as a default value and whether an argument for this
|
||||
// flag is optional.
|
||||
type Option struct {
|
||||
// The description of the option flag. This description is shown
|
||||
// automatically in the built-in help.
|
||||
Description string
|
||||
|
||||
// The short name of the option (a single character). If not 0, the
|
||||
// option flag can be 'activated' using -<ShortName>. Either ShortName
|
||||
// or LongName needs to be non-empty.
|
||||
ShortName rune
|
||||
|
||||
// The long name of the option. If not "", the option flag can be
|
||||
// activated using --<LongName>. Either ShortName or LongName needs
|
||||
// to be non-empty.
|
||||
LongName string
|
||||
|
||||
// The default value of the option.
|
||||
Default []string
|
||||
|
||||
// The optional environment default value key name.
|
||||
EnvDefaultKey string
|
||||
|
||||
// The optional delimiter string for EnvDefaultKey values.
|
||||
EnvDefaultDelim string
|
||||
|
||||
// If true, specifies that the argument to an option flag is optional.
|
||||
// When no argument to the flag is specified on the command line, the
|
||||
// value of OptionalValue will be set in the field this option represents.
|
||||
// This is only valid for non-boolean options.
|
||||
OptionalArgument bool
|
||||
|
||||
// The optional value of the option. The optional value is used when
|
||||
// the option flag is marked as having an OptionalArgument. This means
|
||||
// that when the flag is specified, but no option argument is given,
|
||||
// the value of the field this option represents will be set to
|
||||
// OptionalValue. This is only valid for non-boolean options.
|
||||
OptionalValue []string
|
||||
|
||||
// If true, the option _must_ be specified on the command line. If the
|
||||
// option is not specified, the parser will generate an ErrRequired type
|
||||
// error.
|
||||
Required bool
|
||||
|
||||
// A name for the value of an option shown in the Help as --flag [ValueName]
|
||||
ValueName string
|
||||
|
||||
// A mask value to show in the help instead of the default value. This
|
||||
// is useful for hiding sensitive information in the help, such as
|
||||
// passwords.
|
||||
DefaultMask string
|
||||
|
||||
// If non empty, only a certain set of values is allowed for an option.
|
||||
Choices []string
|
||||
|
||||
// If true, the option is not displayed in the help or man page
|
||||
Hidden bool
|
||||
|
||||
// The group which the option belongs to
|
||||
group *Group
|
||||
|
||||
// The struct field which the option represents.
|
||||
field reflect.StructField
|
||||
|
||||
// The struct field value which the option represents.
|
||||
value reflect.Value
|
||||
|
||||
// Determines if the option will be always quoted in the INI output
|
||||
iniQuote bool
|
||||
|
||||
tag multiTag
|
||||
isSet bool
|
||||
preventDefault bool
|
||||
|
||||
defaultLiteral string
|
||||
}
|
||||
|
||||
// LongNameWithNamespace returns the option's long name with the group namespaces
|
||||
// prepended by walking up the option's group tree. Namespaces and the long name
|
||||
// itself are separated by the parser's namespace delimiter. If the long name is
|
||||
// empty an empty string is returned.
|
||||
func (option *Option) LongNameWithNamespace() string {
|
||||
if len(option.LongName) == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
// fetch the namespace delimiter from the parser which is always at the
|
||||
// end of the group hierarchy
|
||||
namespaceDelimiter := ""
|
||||
g := option.group
|
||||
|
||||
for {
|
||||
if p, ok := g.parent.(*Parser); ok {
|
||||
namespaceDelimiter = p.NamespaceDelimiter
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
switch i := g.parent.(type) {
|
||||
case *Command:
|
||||
g = i.Group
|
||||
case *Group:
|
||||
g = i
|
||||
}
|
||||
}
|
||||
|
||||
// concatenate long name with namespace
|
||||
longName := option.LongName
|
||||
g = option.group
|
||||
|
||||
for g != nil {
|
||||
if g.Namespace != "" {
|
||||
longName = g.Namespace + namespaceDelimiter + longName
|
||||
}
|
||||
|
||||
switch i := g.parent.(type) {
|
||||
case *Command:
|
||||
g = i.Group
|
||||
case *Group:
|
||||
g = i
|
||||
case *Parser:
|
||||
g = nil
|
||||
}
|
||||
}
|
||||
|
||||
return longName
|
||||
}
|
||||
|
||||
// String converts an option to a human friendly readable string describing the
|
||||
// option.
|
||||
func (option *Option) String() string {
|
||||
var s string
|
||||
var short string
|
||||
|
||||
if option.ShortName != 0 {
|
||||
data := make([]byte, utf8.RuneLen(option.ShortName))
|
||||
utf8.EncodeRune(data, option.ShortName)
|
||||
short = string(data)
|
||||
|
||||
if len(option.LongName) != 0 {
|
||||
s = fmt.Sprintf("%s%s, %s%s",
|
||||
string(defaultShortOptDelimiter), short,
|
||||
defaultLongOptDelimiter, option.LongNameWithNamespace())
|
||||
} else {
|
||||
s = fmt.Sprintf("%s%s", string(defaultShortOptDelimiter), short)
|
||||
}
|
||||
} else if len(option.LongName) != 0 {
|
||||
s = fmt.Sprintf("%s%s", defaultLongOptDelimiter, option.LongNameWithNamespace())
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
// Value returns the option value as an interface{}.
|
||||
func (option *Option) Value() interface{} {
|
||||
return option.value.Interface()
|
||||
}
|
||||
|
||||
// IsSet returns true if option has been set
|
||||
func (option *Option) IsSet() bool {
|
||||
return option.isSet
|
||||
}
|
||||
|
||||
// Set the value of an option to the specified value. An error will be returned
|
||||
// if the specified value could not be converted to the corresponding option
|
||||
// value type.
|
||||
func (option *Option) set(value *string) error {
|
||||
kind := option.value.Type().Kind()
|
||||
|
||||
if (kind == reflect.Map || kind == reflect.Slice) && !option.isSet {
|
||||
option.empty()
|
||||
}
|
||||
|
||||
option.isSet = true
|
||||
option.preventDefault = true
|
||||
|
||||
if len(option.Choices) != 0 {
|
||||
found := false
|
||||
|
||||
for _, choice := range option.Choices {
|
||||
if choice == *value {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
allowed := strings.Join(option.Choices[0:len(option.Choices)-1], ", ")
|
||||
|
||||
if len(option.Choices) > 1 {
|
||||
allowed += " or " + option.Choices[len(option.Choices)-1]
|
||||
}
|
||||
|
||||
return newErrorf(ErrInvalidChoice,
|
||||
"Invalid value `%s' for option `%s'. Allowed values are: %s",
|
||||
*value, option, allowed)
|
||||
}
|
||||
}
|
||||
|
||||
if option.isFunc() {
|
||||
return option.call(value)
|
||||
} else if value != nil {
|
||||
return convert(*value, option.value, option.tag)
|
||||
}
|
||||
|
||||
return convert("", option.value, option.tag)
|
||||
}
|
||||
|
||||
func (option *Option) canCli() bool {
|
||||
return option.ShortName != 0 || len(option.LongName) != 0
|
||||
}
|
||||
|
||||
func (option *Option) canArgument() bool {
|
||||
if u := option.isUnmarshaler(); u != nil {
|
||||
return true
|
||||
}
|
||||
|
||||
return !option.isBool()
|
||||
}
|
||||
|
||||
func (option *Option) emptyValue() reflect.Value {
|
||||
tp := option.value.Type()
|
||||
|
||||
if tp.Kind() == reflect.Map {
|
||||
return reflect.MakeMap(tp)
|
||||
}
|
||||
|
||||
return reflect.Zero(tp)
|
||||
}
|
||||
|
||||
func (option *Option) empty() {
|
||||
if !option.isFunc() {
|
||||
option.value.Set(option.emptyValue())
|
||||
}
|
||||
}
|
||||
|
||||
func (option *Option) clearDefault() {
|
||||
usedDefault := option.Default
|
||||
|
||||
if envKey := option.EnvDefaultKey; envKey != "" {
|
||||
// os.Getenv() makes no distinction between undefined and
|
||||
// empty values, so we use syscall.Getenv()
|
||||
if value, ok := syscall.Getenv(envKey); ok {
|
||||
if option.EnvDefaultDelim != "" {
|
||||
usedDefault = strings.Split(value,
|
||||
option.EnvDefaultDelim)
|
||||
} else {
|
||||
usedDefault = []string{value}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(usedDefault) > 0 {
|
||||
option.empty()
|
||||
|
||||
for _, d := range usedDefault {
|
||||
option.set(&d)
|
||||
}
|
||||
} else {
|
||||
tp := option.value.Type()
|
||||
|
||||
switch tp.Kind() {
|
||||
case reflect.Map:
|
||||
if option.value.IsNil() {
|
||||
option.empty()
|
||||
}
|
||||
case reflect.Slice:
|
||||
if option.value.IsNil() {
|
||||
option.empty()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (option *Option) valueIsDefault() bool {
|
||||
// Check if the value of the option corresponds to its
|
||||
// default value
|
||||
emptyval := option.emptyValue()
|
||||
|
||||
checkvalptr := reflect.New(emptyval.Type())
|
||||
checkval := reflect.Indirect(checkvalptr)
|
||||
|
||||
checkval.Set(emptyval)
|
||||
|
||||
if len(option.Default) != 0 {
|
||||
for _, v := range option.Default {
|
||||
convert(v, checkval, option.tag)
|
||||
}
|
||||
}
|
||||
|
||||
return reflect.DeepEqual(option.value.Interface(), checkval.Interface())
|
||||
}
|
||||
|
||||
func (option *Option) isUnmarshaler() Unmarshaler {
|
||||
v := option.value
|
||||
|
||||
for {
|
||||
if !v.CanInterface() {
|
||||
break
|
||||
}
|
||||
|
||||
i := v.Interface()
|
||||
|
||||
if u, ok := i.(Unmarshaler); ok {
|
||||
return u
|
||||
}
|
||||
|
||||
if !v.CanAddr() {
|
||||
break
|
||||
}
|
||||
|
||||
v = v.Addr()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (option *Option) isBool() bool {
|
||||
tp := option.value.Type()
|
||||
|
||||
for {
|
||||
switch tp.Kind() {
|
||||
case reflect.Bool:
|
||||
return true
|
||||
case reflect.Slice:
|
||||
return (tp.Elem().Kind() == reflect.Bool)
|
||||
case reflect.Func:
|
||||
return tp.NumIn() == 0
|
||||
case reflect.Ptr:
|
||||
tp = tp.Elem()
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (option *Option) isFunc() bool {
|
||||
return option.value.Type().Kind() == reflect.Func
|
||||
}
|
||||
|
||||
func (option *Option) call(value *string) error {
|
||||
var retval []reflect.Value
|
||||
|
||||
if value == nil {
|
||||
retval = option.value.Call(nil)
|
||||
} else {
|
||||
tp := option.value.Type().In(0)
|
||||
|
||||
val := reflect.New(tp)
|
||||
val = reflect.Indirect(val)
|
||||
|
||||
if err := convert(*value, val, option.tag); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
retval = option.value.Call([]reflect.Value{val})
|
||||
}
|
||||
|
||||
if len(retval) == 1 && retval[0].Type() == reflect.TypeOf((*error)(nil)).Elem() {
|
||||
if retval[0].Interface() == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return retval[0].Interface().(error)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (option *Option) updateDefaultLiteral() {
|
||||
defs := option.Default
|
||||
def := ""
|
||||
|
||||
if len(defs) == 0 && option.canArgument() {
|
||||
var showdef bool
|
||||
|
||||
switch option.field.Type.Kind() {
|
||||
case reflect.Func, reflect.Ptr:
|
||||
showdef = !option.value.IsNil()
|
||||
case reflect.Slice, reflect.String, reflect.Array:
|
||||
showdef = option.value.Len() > 0
|
||||
case reflect.Map:
|
||||
showdef = !option.value.IsNil() && option.value.Len() > 0
|
||||
default:
|
||||
zeroval := reflect.Zero(option.field.Type)
|
||||
showdef = !reflect.DeepEqual(zeroval.Interface(), option.value.Interface())
|
||||
}
|
||||
|
||||
if showdef {
|
||||
def, _ = convertToString(option.value, option.tag)
|
||||
}
|
||||
} else if len(defs) != 0 {
|
||||
l := len(defs) - 1
|
||||
|
||||
for i := 0; i < l; i++ {
|
||||
def += quoteIfNeeded(defs[i]) + ", "
|
||||
}
|
||||
|
||||
def += quoteIfNeeded(defs[l])
|
||||
}
|
||||
|
||||
option.defaultLiteral = def
|
||||
}
|
||||
|
||||
func (option *Option) shortAndLongName() string {
|
||||
ret := &bytes.Buffer{}
|
||||
|
||||
if option.ShortName != 0 {
|
||||
ret.WriteRune(defaultShortOptDelimiter)
|
||||
ret.WriteRune(option.ShortName)
|
||||
}
|
||||
|
||||
if len(option.LongName) != 0 {
|
||||
if option.ShortName != 0 {
|
||||
ret.WriteRune('/')
|
||||
}
|
||||
|
||||
ret.WriteString(option.LongName)
|
||||
}
|
||||
|
||||
return ret.String()
|
||||
}
|
45
vendor/github.com/jessevdk/go-flags/options_test.go
generated
vendored
Normal file
45
vendor/github.com/jessevdk/go-flags/options_test.go
generated
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestPassDoubleDash(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
}{}
|
||||
|
||||
p := NewParser(&opts, PassDoubleDash)
|
||||
ret, err := p.ParseArgs([]string{"-v", "--", "-v", "-g"})
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if !opts.Value {
|
||||
t.Errorf("Expected Value to be true")
|
||||
}
|
||||
|
||||
assertStringArray(t, ret, []string{"-v", "-g"})
|
||||
}
|
||||
|
||||
func TestPassAfterNonOption(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
}{}
|
||||
|
||||
p := NewParser(&opts, PassAfterNonOption)
|
||||
ret, err := p.ParseArgs([]string{"-v", "arg", "-v", "-g"})
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if !opts.Value {
|
||||
t.Errorf("Expected Value to be true")
|
||||
}
|
||||
|
||||
assertStringArray(t, ret, []string{"arg", "-v", "-g"})
|
||||
}
|
67
vendor/github.com/jessevdk/go-flags/optstyle_other.go
generated
vendored
Normal file
67
vendor/github.com/jessevdk/go-flags/optstyle_other.go
generated
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
// +build !windows
|
||||
|
||||
package flags
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultShortOptDelimiter = '-'
|
||||
defaultLongOptDelimiter = "--"
|
||||
defaultNameArgDelimiter = '='
|
||||
)
|
||||
|
||||
func argumentStartsOption(arg string) bool {
|
||||
return len(arg) > 0 && arg[0] == '-'
|
||||
}
|
||||
|
||||
func argumentIsOption(arg string) bool {
|
||||
if len(arg) > 1 && arg[0] == '-' && arg[1] != '-' {
|
||||
return true
|
||||
}
|
||||
|
||||
if len(arg) > 2 && arg[0] == '-' && arg[1] == '-' && arg[2] != '-' {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// stripOptionPrefix returns the option without the prefix and whether or
|
||||
// not the option is a long option or not.
|
||||
func stripOptionPrefix(optname string) (prefix string, name string, islong bool) {
|
||||
if strings.HasPrefix(optname, "--") {
|
||||
return "--", optname[2:], true
|
||||
} else if strings.HasPrefix(optname, "-") {
|
||||
return "-", optname[1:], false
|
||||
}
|
||||
|
||||
return "", optname, false
|
||||
}
|
||||
|
||||
// splitOption attempts to split the passed option into a name and an argument.
|
||||
// When there is no argument specified, nil will be returned for it.
|
||||
func splitOption(prefix string, option string, islong bool) (string, string, *string) {
|
||||
pos := strings.Index(option, "=")
|
||||
|
||||
if (islong && pos >= 0) || (!islong && pos == 1) {
|
||||
rest := option[pos+1:]
|
||||
return option[:pos], "=", &rest
|
||||
}
|
||||
|
||||
return option, "", nil
|
||||
}
|
||||
|
||||
// addHelpGroup adds a new group that contains default help parameters.
|
||||
func (c *Command) addHelpGroup(showHelp func() error) *Group {
|
||||
var help struct {
|
||||
ShowHelp func() error `short:"h" long:"help" description:"Show this help message"`
|
||||
}
|
||||
|
||||
help.ShowHelp = showHelp
|
||||
ret, _ := c.AddGroup("Help Options", "", &help)
|
||||
ret.isBuiltinHelp = true
|
||||
|
||||
return ret
|
||||
}
|
106
vendor/github.com/jessevdk/go-flags/optstyle_windows.go
generated
vendored
Normal file
106
vendor/github.com/jessevdk/go-flags/optstyle_windows.go
generated
vendored
Normal file
@ -0,0 +1,106 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Windows uses a front slash for both short and long options. Also it uses
|
||||
// a colon for name/argument delimter.
|
||||
const (
|
||||
defaultShortOptDelimiter = '/'
|
||||
defaultLongOptDelimiter = "/"
|
||||
defaultNameArgDelimiter = ':'
|
||||
)
|
||||
|
||||
func argumentStartsOption(arg string) bool {
|
||||
return len(arg) > 0 && (arg[0] == '-' || arg[0] == '/')
|
||||
}
|
||||
|
||||
func argumentIsOption(arg string) bool {
|
||||
// Windows-style options allow front slash for the option
|
||||
// delimiter.
|
||||
if len(arg) > 1 && arg[0] == '/' {
|
||||
return true
|
||||
}
|
||||
|
||||
if len(arg) > 1 && arg[0] == '-' && arg[1] != '-' {
|
||||
return true
|
||||
}
|
||||
|
||||
if len(arg) > 2 && arg[0] == '-' && arg[1] == '-' && arg[2] != '-' {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// stripOptionPrefix returns the option without the prefix and whether or
|
||||
// not the option is a long option or not.
|
||||
func stripOptionPrefix(optname string) (prefix string, name string, islong bool) {
|
||||
// Determine if the argument is a long option or not. Windows
|
||||
// typically supports both long and short options with a single
|
||||
// front slash as the option delimiter, so handle this situation
|
||||
// nicely.
|
||||
possplit := 0
|
||||
|
||||
if strings.HasPrefix(optname, "--") {
|
||||
possplit = 2
|
||||
islong = true
|
||||
} else if strings.HasPrefix(optname, "-") {
|
||||
possplit = 1
|
||||
islong = false
|
||||
} else if strings.HasPrefix(optname, "/") {
|
||||
possplit = 1
|
||||
islong = len(optname) > 2
|
||||
}
|
||||
|
||||
return optname[:possplit], optname[possplit:], islong
|
||||
}
|
||||
|
||||
// splitOption attempts to split the passed option into a name and an argument.
|
||||
// When there is no argument specified, nil will be returned for it.
|
||||
func splitOption(prefix string, option string, islong bool) (string, string, *string) {
|
||||
if len(option) == 0 {
|
||||
return option, "", nil
|
||||
}
|
||||
|
||||
// Windows typically uses a colon for the option name and argument
|
||||
// delimiter while POSIX typically uses an equals. Support both styles,
|
||||
// but don't allow the two to be mixed. That is to say /foo:bar and
|
||||
// --foo=bar are acceptable, but /foo=bar and --foo:bar are not.
|
||||
var pos int
|
||||
var sp string
|
||||
|
||||
if prefix == "/" {
|
||||
sp = ":"
|
||||
pos = strings.Index(option, sp)
|
||||
} else if len(prefix) > 0 {
|
||||
sp = "="
|
||||
pos = strings.Index(option, sp)
|
||||
}
|
||||
|
||||
if (islong && pos >= 0) || (!islong && pos == 1) {
|
||||
rest := option[pos+1:]
|
||||
return option[:pos], sp, &rest
|
||||
}
|
||||
|
||||
return option, "", nil
|
||||
}
|
||||
|
||||
// addHelpGroup adds a new group that contains default help parameters.
|
||||
func (c *Command) addHelpGroup(showHelp func() error) *Group {
|
||||
// Windows CLI applications typically use /? for help, so make both
|
||||
// that available as well as the POSIX style h and help.
|
||||
var help struct {
|
||||
ShowHelpWindows func() error `short:"?" description:"Show this help message"`
|
||||
ShowHelpPosix func() error `short:"h" long:"help" description:"Show this help message"`
|
||||
}
|
||||
|
||||
help.ShowHelpWindows = showHelp
|
||||
help.ShowHelpPosix = showHelp
|
||||
|
||||
ret, _ := c.AddGroup("Help Options", "", &help)
|
||||
ret.isBuiltinHelp = true
|
||||
|
||||
return ret
|
||||
}
|
652
vendor/github.com/jessevdk/go-flags/parser.go
generated
vendored
Normal file
652
vendor/github.com/jessevdk/go-flags/parser.go
generated
vendored
Normal file
@ -0,0 +1,652 @@
|
||||
// Copyright 2012 Jesse van den Kieboom. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package flags
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"sort"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// A Parser provides command line option parsing. It can contain several
|
||||
// option groups each with their own set of options.
|
||||
type Parser struct {
|
||||
// Embedded, see Command for more information
|
||||
*Command
|
||||
|
||||
// A usage string to be displayed in the help message.
|
||||
Usage string
|
||||
|
||||
// Option flags changing the behavior of the parser.
|
||||
Options Options
|
||||
|
||||
// NamespaceDelimiter separates group namespaces and option long names
|
||||
NamespaceDelimiter string
|
||||
|
||||
// UnknownOptionsHandler is a function which gets called when the parser
|
||||
// encounters an unknown option. The function receives the unknown option
|
||||
// name, a SplitArgument which specifies its value if set with an argument
|
||||
// separator, and the remaining command line arguments.
|
||||
// It should return a new list of remaining arguments to continue parsing,
|
||||
// or an error to indicate a parse failure.
|
||||
UnknownOptionHandler func(option string, arg SplitArgument, args []string) ([]string, error)
|
||||
|
||||
// CompletionHandler is a function gets called to handle the completion of
|
||||
// items. By default, the items are printed and the application is exited.
|
||||
// You can override this default behavior by specifying a custom CompletionHandler.
|
||||
CompletionHandler func(items []Completion)
|
||||
|
||||
internalError error
|
||||
}
|
||||
|
||||
// SplitArgument represents the argument value of an option that was passed using
|
||||
// an argument separator.
|
||||
type SplitArgument interface {
|
||||
// String returns the option's value as a string, and a boolean indicating
|
||||
// if the option was present.
|
||||
Value() (string, bool)
|
||||
}
|
||||
|
||||
type strArgument struct {
|
||||
value *string
|
||||
}
|
||||
|
||||
func (s strArgument) Value() (string, bool) {
|
||||
if s.value == nil {
|
||||
return "", false
|
||||
}
|
||||
|
||||
return *s.value, true
|
||||
}
|
||||
|
||||
// Options provides parser options that change the behavior of the option
|
||||
// parser.
|
||||
type Options uint
|
||||
|
||||
const (
|
||||
// None indicates no options.
|
||||
None Options = 0
|
||||
|
||||
// HelpFlag adds a default Help Options group to the parser containing
|
||||
// -h and --help options. When either -h or --help is specified on the
|
||||
// command line, the parser will return the special error of type
|
||||
// ErrHelp. When PrintErrors is also specified, then the help message
|
||||
// will also be automatically printed to os.Stderr.
|
||||
HelpFlag = 1 << iota
|
||||
|
||||
// PassDoubleDash passes all arguments after a double dash, --, as
|
||||
// remaining command line arguments (i.e. they will not be parsed for
|
||||
// flags).
|
||||
PassDoubleDash
|
||||
|
||||
// IgnoreUnknown ignores any unknown options and passes them as
|
||||
// remaining command line arguments instead of generating an error.
|
||||
IgnoreUnknown
|
||||
|
||||
// PrintErrors prints any errors which occurred during parsing to
|
||||
// os.Stderr.
|
||||
PrintErrors
|
||||
|
||||
// PassAfterNonOption passes all arguments after the first non option
|
||||
// as remaining command line arguments. This is equivalent to strict
|
||||
// POSIX processing.
|
||||
PassAfterNonOption
|
||||
|
||||
// Default is a convenient default set of options which should cover
|
||||
// most of the uses of the flags package.
|
||||
Default = HelpFlag | PrintErrors | PassDoubleDash
|
||||
)
|
||||
|
||||
type parseState struct {
|
||||
arg string
|
||||
args []string
|
||||
retargs []string
|
||||
positional []*Arg
|
||||
err error
|
||||
|
||||
command *Command
|
||||
lookup lookup
|
||||
}
|
||||
|
||||
// Parse is a convenience function to parse command line options with default
|
||||
// settings. The provided data is a pointer to a struct representing the
|
||||
// default option group (named "Application Options"). For more control, use
|
||||
// flags.NewParser.
|
||||
func Parse(data interface{}) ([]string, error) {
|
||||
return NewParser(data, Default).Parse()
|
||||
}
|
||||
|
||||
// ParseArgs is a convenience function to parse command line options with default
|
||||
// settings. The provided data is a pointer to a struct representing the
|
||||
// default option group (named "Application Options"). The args argument is
|
||||
// the list of command line arguments to parse. If you just want to parse the
|
||||
// default program command line arguments (i.e. os.Args), then use flags.Parse
|
||||
// instead. For more control, use flags.NewParser.
|
||||
func ParseArgs(data interface{}, args []string) ([]string, error) {
|
||||
return NewParser(data, Default).ParseArgs(args)
|
||||
}
|
||||
|
||||
// NewParser creates a new parser. It uses os.Args[0] as the application
|
||||
// name and then calls Parser.NewNamedParser (see Parser.NewNamedParser for
|
||||
// more details). The provided data is a pointer to a struct representing the
|
||||
// default option group (named "Application Options"), or nil if the default
|
||||
// group should not be added. The options parameter specifies a set of options
|
||||
// for the parser.
|
||||
func NewParser(data interface{}, options Options) *Parser {
|
||||
p := NewNamedParser(path.Base(os.Args[0]), options)
|
||||
|
||||
if data != nil {
|
||||
g, err := p.AddGroup("Application Options", "", data)
|
||||
|
||||
if err == nil {
|
||||
g.parent = p
|
||||
}
|
||||
|
||||
p.internalError = err
|
||||
}
|
||||
|
||||
return p
|
||||
}
|
||||
|
||||
// NewNamedParser creates a new parser. The appname is used to display the
|
||||
// executable name in the built-in help message. Option groups and commands can
|
||||
// be added to this parser by using AddGroup and AddCommand.
|
||||
func NewNamedParser(appname string, options Options) *Parser {
|
||||
p := &Parser{
|
||||
Command: newCommand(appname, "", "", nil),
|
||||
Options: options,
|
||||
NamespaceDelimiter: ".",
|
||||
}
|
||||
|
||||
p.Command.parent = p
|
||||
|
||||
return p
|
||||
}
|
||||
|
||||
// Parse parses the command line arguments from os.Args using Parser.ParseArgs.
|
||||
// For more detailed information see ParseArgs.
|
||||
func (p *Parser) Parse() ([]string, error) {
|
||||
return p.ParseArgs(os.Args[1:])
|
||||
}
|
||||
|
||||
// ParseArgs parses the command line arguments according to the option groups that
|
||||
// were added to the parser. On successful parsing of the arguments, the
|
||||
// remaining, non-option, arguments (if any) are returned. The returned error
|
||||
// indicates a parsing error and can be used with PrintError to display
|
||||
// contextual information on where the error occurred exactly.
|
||||
//
|
||||
// When the common help group has been added (AddHelp) and either -h or --help
|
||||
// was specified in the command line arguments, a help message will be
|
||||
// automatically printed if the PrintErrors option is enabled.
|
||||
// Furthermore, the special error type ErrHelp is returned.
|
||||
// It is up to the caller to exit the program if so desired.
|
||||
func (p *Parser) ParseArgs(args []string) ([]string, error) {
|
||||
if p.internalError != nil {
|
||||
return nil, p.internalError
|
||||
}
|
||||
|
||||
p.eachOption(func(c *Command, g *Group, option *Option) {
|
||||
option.isSet = false
|
||||
option.updateDefaultLiteral()
|
||||
})
|
||||
|
||||
// Add built-in help group to all commands if necessary
|
||||
if (p.Options & HelpFlag) != None {
|
||||
p.addHelpGroups(p.showBuiltinHelp)
|
||||
}
|
||||
|
||||
compval := os.Getenv("GO_FLAGS_COMPLETION")
|
||||
|
||||
if len(compval) != 0 {
|
||||
comp := &completion{parser: p}
|
||||
items := comp.complete(args)
|
||||
|
||||
if p.CompletionHandler != nil {
|
||||
p.CompletionHandler(items)
|
||||
} else {
|
||||
comp.print(items, compval == "verbose")
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
s := &parseState{
|
||||
args: args,
|
||||
retargs: make([]string, 0, len(args)),
|
||||
}
|
||||
|
||||
p.fillParseState(s)
|
||||
|
||||
for !s.eof() {
|
||||
arg := s.pop()
|
||||
|
||||
// When PassDoubleDash is set and we encounter a --, then
|
||||
// simply append all the rest as arguments and break out
|
||||
if (p.Options&PassDoubleDash) != None && arg == "--" {
|
||||
s.addArgs(s.args...)
|
||||
break
|
||||
}
|
||||
|
||||
if !argumentIsOption(arg) {
|
||||
// Note: this also sets s.err, so we can just check for
|
||||
// nil here and use s.err later
|
||||
if p.parseNonOption(s) != nil {
|
||||
break
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
var err error
|
||||
|
||||
prefix, optname, islong := stripOptionPrefix(arg)
|
||||
optname, _, argument := splitOption(prefix, optname, islong)
|
||||
|
||||
if islong {
|
||||
err = p.parseLong(s, optname, argument)
|
||||
} else {
|
||||
err = p.parseShort(s, optname, argument)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
ignoreUnknown := (p.Options & IgnoreUnknown) != None
|
||||
parseErr := wrapError(err)
|
||||
|
||||
if parseErr.Type != ErrUnknownFlag || (!ignoreUnknown && p.UnknownOptionHandler == nil) {
|
||||
s.err = parseErr
|
||||
break
|
||||
}
|
||||
|
||||
if ignoreUnknown {
|
||||
s.addArgs(arg)
|
||||
} else if p.UnknownOptionHandler != nil {
|
||||
modifiedArgs, err := p.UnknownOptionHandler(optname, strArgument{argument}, s.args)
|
||||
|
||||
if err != nil {
|
||||
s.err = err
|
||||
break
|
||||
}
|
||||
|
||||
s.args = modifiedArgs
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if s.err == nil {
|
||||
p.eachOption(func(c *Command, g *Group, option *Option) {
|
||||
if option.preventDefault {
|
||||
return
|
||||
}
|
||||
|
||||
option.clearDefault()
|
||||
})
|
||||
|
||||
s.checkRequired(p)
|
||||
}
|
||||
|
||||
var reterr error
|
||||
|
||||
if s.err != nil {
|
||||
reterr = s.err
|
||||
} else if len(s.command.commands) != 0 && !s.command.SubcommandsOptional {
|
||||
reterr = s.estimateCommand()
|
||||
} else if cmd, ok := s.command.data.(Commander); ok {
|
||||
reterr = cmd.Execute(s.retargs)
|
||||
}
|
||||
|
||||
if reterr != nil {
|
||||
var retargs []string
|
||||
|
||||
if ourErr, ok := reterr.(*Error); !ok || ourErr.Type != ErrHelp {
|
||||
retargs = append([]string{s.arg}, s.args...)
|
||||
} else {
|
||||
retargs = s.args
|
||||
}
|
||||
|
||||
return retargs, p.printError(reterr)
|
||||
}
|
||||
|
||||
return s.retargs, nil
|
||||
}
|
||||
|
||||
func (p *parseState) eof() bool {
|
||||
return len(p.args) == 0
|
||||
}
|
||||
|
||||
func (p *parseState) pop() string {
|
||||
if p.eof() {
|
||||
return ""
|
||||
}
|
||||
|
||||
p.arg = p.args[0]
|
||||
p.args = p.args[1:]
|
||||
|
||||
return p.arg
|
||||
}
|
||||
|
||||
func (p *parseState) peek() string {
|
||||
if p.eof() {
|
||||
return ""
|
||||
}
|
||||
|
||||
return p.args[0]
|
||||
}
|
||||
|
||||
func (p *parseState) checkRequired(parser *Parser) error {
|
||||
c := parser.Command
|
||||
|
||||
var required []*Option
|
||||
|
||||
for c != nil {
|
||||
c.eachGroup(func(g *Group) {
|
||||
for _, option := range g.options {
|
||||
if !option.isSet && option.Required {
|
||||
required = append(required, option)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
c = c.Active
|
||||
}
|
||||
|
||||
if len(required) == 0 {
|
||||
if len(p.positional) > 0 {
|
||||
var reqnames []string
|
||||
|
||||
for _, arg := range p.positional {
|
||||
argRequired := (!arg.isRemaining() && p.command.ArgsRequired) || arg.Required != 0
|
||||
|
||||
if !argRequired {
|
||||
continue
|
||||
}
|
||||
|
||||
if arg.isRemaining() {
|
||||
if arg.value.Len() < arg.Required {
|
||||
var arguments string
|
||||
|
||||
if arg.Required > 1 {
|
||||
arguments = "arguments, but got only " + fmt.Sprintf("%d", arg.value.Len())
|
||||
} else {
|
||||
arguments = "argument"
|
||||
}
|
||||
|
||||
reqnames = append(reqnames, "`"+arg.Name+" (at least "+fmt.Sprintf("%d", arg.Required)+" "+arguments+")`")
|
||||
}
|
||||
} else {
|
||||
reqnames = append(reqnames, "`"+arg.Name+"`")
|
||||
}
|
||||
}
|
||||
|
||||
if len(reqnames) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var msg string
|
||||
|
||||
if len(reqnames) == 1 {
|
||||
msg = fmt.Sprintf("the required argument %s was not provided", reqnames[0])
|
||||
} else {
|
||||
msg = fmt.Sprintf("the required arguments %s and %s were not provided",
|
||||
strings.Join(reqnames[:len(reqnames)-1], ", "), reqnames[len(reqnames)-1])
|
||||
}
|
||||
|
||||
p.err = newError(ErrRequired, msg)
|
||||
return p.err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
names := make([]string, 0, len(required))
|
||||
|
||||
for _, k := range required {
|
||||
names = append(names, "`"+k.String()+"'")
|
||||
}
|
||||
|
||||
sort.Strings(names)
|
||||
|
||||
var msg string
|
||||
|
||||
if len(names) == 1 {
|
||||
msg = fmt.Sprintf("the required flag %s was not specified", names[0])
|
||||
} else {
|
||||
msg = fmt.Sprintf("the required flags %s and %s were not specified",
|
||||
strings.Join(names[:len(names)-1], ", "), names[len(names)-1])
|
||||
}
|
||||
|
||||
p.err = newError(ErrRequired, msg)
|
||||
return p.err
|
||||
}
|
||||
|
||||
func (p *parseState) estimateCommand() error {
|
||||
commands := p.command.sortedVisibleCommands()
|
||||
cmdnames := make([]string, len(commands))
|
||||
|
||||
for i, v := range commands {
|
||||
cmdnames[i] = v.Name
|
||||
}
|
||||
|
||||
var msg string
|
||||
var errtype ErrorType
|
||||
|
||||
if len(p.retargs) != 0 {
|
||||
c, l := closestChoice(p.retargs[0], cmdnames)
|
||||
msg = fmt.Sprintf("Unknown command `%s'", p.retargs[0])
|
||||
errtype = ErrUnknownCommand
|
||||
|
||||
if float32(l)/float32(len(c)) < 0.5 {
|
||||
msg = fmt.Sprintf("%s, did you mean `%s'?", msg, c)
|
||||
} else if len(cmdnames) == 1 {
|
||||
msg = fmt.Sprintf("%s. You should use the %s command",
|
||||
msg,
|
||||
cmdnames[0])
|
||||
} else {
|
||||
msg = fmt.Sprintf("%s. Please specify one command of: %s or %s",
|
||||
msg,
|
||||
strings.Join(cmdnames[:len(cmdnames)-1], ", "),
|
||||
cmdnames[len(cmdnames)-1])
|
||||
}
|
||||
} else {
|
||||
errtype = ErrCommandRequired
|
||||
|
||||
if len(cmdnames) == 1 {
|
||||
msg = fmt.Sprintf("Please specify the %s command", cmdnames[0])
|
||||
} else {
|
||||
msg = fmt.Sprintf("Please specify one command of: %s or %s",
|
||||
strings.Join(cmdnames[:len(cmdnames)-1], ", "),
|
||||
cmdnames[len(cmdnames)-1])
|
||||
}
|
||||
}
|
||||
|
||||
return newError(errtype, msg)
|
||||
}
|
||||
|
||||
func (p *Parser) parseOption(s *parseState, name string, option *Option, canarg bool, argument *string) (err error) {
|
||||
if !option.canArgument() {
|
||||
if argument != nil {
|
||||
return newErrorf(ErrNoArgumentForBool, "bool flag `%s' cannot have an argument", option)
|
||||
}
|
||||
|
||||
err = option.set(nil)
|
||||
} else if argument != nil || (canarg && !s.eof()) {
|
||||
var arg string
|
||||
|
||||
if argument != nil {
|
||||
arg = *argument
|
||||
} else {
|
||||
arg = s.pop()
|
||||
|
||||
if argumentIsOption(arg) {
|
||||
return newErrorf(ErrExpectedArgument, "expected argument for flag `%s', but got option `%s'", option, arg)
|
||||
} else if p.Options&PassDoubleDash != 0 && arg == "--" {
|
||||
return newErrorf(ErrExpectedArgument, "expected argument for flag `%s', but got double dash `--'", option)
|
||||
}
|
||||
}
|
||||
|
||||
if option.tag.Get("unquote") != "false" {
|
||||
arg, err = unquoteIfPossible(arg)
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
err = option.set(&arg)
|
||||
}
|
||||
} else if option.OptionalArgument {
|
||||
option.empty()
|
||||
|
||||
for _, v := range option.OptionalValue {
|
||||
err = option.set(&v)
|
||||
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
} else {
|
||||
err = newErrorf(ErrExpectedArgument, "expected argument for flag `%s'", option)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if _, ok := err.(*Error); !ok {
|
||||
err = newErrorf(ErrMarshal, "invalid argument for flag `%s' (expected %s): %s",
|
||||
option,
|
||||
option.value.Type(),
|
||||
err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (p *Parser) parseLong(s *parseState, name string, argument *string) error {
|
||||
if option := s.lookup.longNames[name]; option != nil {
|
||||
// Only long options that are required can consume an argument
|
||||
// from the argument list
|
||||
canarg := !option.OptionalArgument
|
||||
|
||||
return p.parseOption(s, name, option, canarg, argument)
|
||||
}
|
||||
|
||||
return newErrorf(ErrUnknownFlag, "unknown flag `%s'", name)
|
||||
}
|
||||
|
||||
func (p *Parser) splitShortConcatArg(s *parseState, optname string) (string, *string) {
|
||||
c, n := utf8.DecodeRuneInString(optname)
|
||||
|
||||
if n == len(optname) {
|
||||
return optname, nil
|
||||
}
|
||||
|
||||
first := string(c)
|
||||
|
||||
if option := s.lookup.shortNames[first]; option != nil && option.canArgument() {
|
||||
arg := optname[n:]
|
||||
return first, &arg
|
||||
}
|
||||
|
||||
return optname, nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseShort(s *parseState, optname string, argument *string) error {
|
||||
if argument == nil {
|
||||
optname, argument = p.splitShortConcatArg(s, optname)
|
||||
}
|
||||
|
||||
for i, c := range optname {
|
||||
shortname := string(c)
|
||||
|
||||
if option := s.lookup.shortNames[shortname]; option != nil {
|
||||
// Only the last short argument can consume an argument from
|
||||
// the arguments list, and only if it's non optional
|
||||
canarg := (i+utf8.RuneLen(c) == len(optname)) && !option.OptionalArgument
|
||||
|
||||
if err := p.parseOption(s, shortname, option, canarg, argument); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return newErrorf(ErrUnknownFlag, "unknown flag `%s'", shortname)
|
||||
}
|
||||
|
||||
// Only the first option can have a concatted argument, so just
|
||||
// clear argument here
|
||||
argument = nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *parseState) addArgs(args ...string) error {
|
||||
for len(p.positional) > 0 && len(args) > 0 {
|
||||
arg := p.positional[0]
|
||||
|
||||
if err := convert(args[0], arg.value, arg.tag); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !arg.isRemaining() {
|
||||
p.positional = p.positional[1:]
|
||||
}
|
||||
|
||||
args = args[1:]
|
||||
}
|
||||
|
||||
p.retargs = append(p.retargs, args...)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseNonOption(s *parseState) error {
|
||||
if len(s.positional) > 0 {
|
||||
return s.addArgs(s.arg)
|
||||
}
|
||||
|
||||
if cmd := s.lookup.commands[s.arg]; cmd != nil {
|
||||
s.command.Active = cmd
|
||||
cmd.fillParseState(s)
|
||||
} else if (p.Options & PassAfterNonOption) != None {
|
||||
// If PassAfterNonOption is set then all remaining arguments
|
||||
// are considered positional
|
||||
if err := s.addArgs(s.arg); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := s.addArgs(s.args...); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
s.args = []string{}
|
||||
} else {
|
||||
return s.addArgs(s.arg)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Parser) showBuiltinHelp() error {
|
||||
var b bytes.Buffer
|
||||
|
||||
p.WriteHelp(&b)
|
||||
return newError(ErrHelp, b.String())
|
||||
}
|
||||
|
||||
func (p *Parser) printError(err error) error {
|
||||
if err != nil && (p.Options&PrintErrors) != None {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (p *Parser) clearIsSet() {
|
||||
p.eachCommand(func(c *Command) {
|
||||
c.eachGroup(func(g *Group) {
|
||||
for _, option := range g.options {
|
||||
option.isSet = false
|
||||
}
|
||||
})
|
||||
}, true)
|
||||
}
|
500
vendor/github.com/jessevdk/go-flags/parser_test.go
generated
vendored
Normal file
500
vendor/github.com/jessevdk/go-flags/parser_test.go
generated
vendored
Normal file
@ -0,0 +1,500 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"reflect"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
type defaultOptions struct {
|
||||
Int int `long:"i"`
|
||||
IntDefault int `long:"id" default:"1"`
|
||||
|
||||
Float64 float64 `long:"f"`
|
||||
Float64Default float64 `long:"fd" default:"-3.14"`
|
||||
|
||||
NumericFlag bool `short:"3"`
|
||||
|
||||
String string `long:"str"`
|
||||
StringDefault string `long:"strd" default:"abc"`
|
||||
StringNotUnquoted string `long:"strnot" unquote:"false"`
|
||||
|
||||
Time time.Duration `long:"t"`
|
||||
TimeDefault time.Duration `long:"td" default:"1m"`
|
||||
|
||||
Map map[string]int `long:"m"`
|
||||
MapDefault map[string]int `long:"md" default:"a:1"`
|
||||
|
||||
Slice []int `long:"s"`
|
||||
SliceDefault []int `long:"sd" default:"1" default:"2"`
|
||||
}
|
||||
|
||||
func TestDefaults(t *testing.T) {
|
||||
var tests = []struct {
|
||||
msg string
|
||||
args []string
|
||||
expected defaultOptions
|
||||
}{
|
||||
{
|
||||
msg: "no arguments, expecting default values",
|
||||
args: []string{},
|
||||
expected: defaultOptions{
|
||||
Int: 0,
|
||||
IntDefault: 1,
|
||||
|
||||
Float64: 0.0,
|
||||
Float64Default: -3.14,
|
||||
|
||||
NumericFlag: false,
|
||||
|
||||
String: "",
|
||||
StringDefault: "abc",
|
||||
|
||||
Time: 0,
|
||||
TimeDefault: time.Minute,
|
||||
|
||||
Map: map[string]int{},
|
||||
MapDefault: map[string]int{"a": 1},
|
||||
|
||||
Slice: []int{},
|
||||
SliceDefault: []int{1, 2},
|
||||
},
|
||||
},
|
||||
{
|
||||
msg: "non-zero value arguments, expecting overwritten arguments",
|
||||
args: []string{"--i=3", "--id=3", "--f=-2.71", "--fd=2.71", "-3", "--str=def", "--strd=def", "--t=3ms", "--td=3ms", "--m=c:3", "--md=c:3", "--s=3", "--sd=3"},
|
||||
expected: defaultOptions{
|
||||
Int: 3,
|
||||
IntDefault: 3,
|
||||
|
||||
Float64: -2.71,
|
||||
Float64Default: 2.71,
|
||||
|
||||
NumericFlag: true,
|
||||
|
||||
String: "def",
|
||||
StringDefault: "def",
|
||||
|
||||
Time: 3 * time.Millisecond,
|
||||
TimeDefault: 3 * time.Millisecond,
|
||||
|
||||
Map: map[string]int{"c": 3},
|
||||
MapDefault: map[string]int{"c": 3},
|
||||
|
||||
Slice: []int{3},
|
||||
SliceDefault: []int{3},
|
||||
},
|
||||
},
|
||||
{
|
||||
msg: "zero value arguments, expecting overwritten arguments",
|
||||
args: []string{"--i=0", "--id=0", "--f=0", "--fd=0", "--str", "", "--strd=\"\"", "--t=0ms", "--td=0s", "--m=:0", "--md=:0", "--s=0", "--sd=0"},
|
||||
expected: defaultOptions{
|
||||
Int: 0,
|
||||
IntDefault: 0,
|
||||
|
||||
Float64: 0,
|
||||
Float64Default: 0,
|
||||
|
||||
String: "",
|
||||
StringDefault: "",
|
||||
|
||||
Time: 0,
|
||||
TimeDefault: 0,
|
||||
|
||||
Map: map[string]int{"": 0},
|
||||
MapDefault: map[string]int{"": 0},
|
||||
|
||||
Slice: []int{0},
|
||||
SliceDefault: []int{0},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
var opts defaultOptions
|
||||
|
||||
_, err := ParseArgs(&opts, test.args)
|
||||
if err != nil {
|
||||
t.Fatalf("%s:\nUnexpected error: %v", test.msg, err)
|
||||
}
|
||||
|
||||
if opts.Slice == nil {
|
||||
opts.Slice = []int{}
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(opts, test.expected) {
|
||||
t.Errorf("%s:\nUnexpected options with arguments %+v\nexpected\n%+v\nbut got\n%+v\n", test.msg, test.args, test.expected, opts)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestNoDefaultsForBools(t *testing.T) {
|
||||
var opts struct {
|
||||
DefaultBool bool `short:"d" default:"true"`
|
||||
}
|
||||
|
||||
if runtime.GOOS == "windows" {
|
||||
assertParseFail(t, ErrInvalidTag, "boolean flag `/d' may not have default values, they always default to `false' and can only be turned on", &opts)
|
||||
} else {
|
||||
assertParseFail(t, ErrInvalidTag, "boolean flag `-d' may not have default values, they always default to `false' and can only be turned on", &opts)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUnquoting(t *testing.T) {
|
||||
var tests = []struct {
|
||||
arg string
|
||||
err error
|
||||
value string
|
||||
}{
|
||||
{
|
||||
arg: "\"abc",
|
||||
err: strconv.ErrSyntax,
|
||||
value: "",
|
||||
},
|
||||
{
|
||||
arg: "\"\"abc\"",
|
||||
err: strconv.ErrSyntax,
|
||||
value: "",
|
||||
},
|
||||
{
|
||||
arg: "\"abc\"",
|
||||
err: nil,
|
||||
value: "abc",
|
||||
},
|
||||
{
|
||||
arg: "\"\\\"abc\\\"\"",
|
||||
err: nil,
|
||||
value: "\"abc\"",
|
||||
},
|
||||
{
|
||||
arg: "\"\\\"abc\"",
|
||||
err: nil,
|
||||
value: "\"abc",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
var opts defaultOptions
|
||||
|
||||
for _, delimiter := range []bool{false, true} {
|
||||
p := NewParser(&opts, None)
|
||||
|
||||
var err error
|
||||
if delimiter {
|
||||
_, err = p.ParseArgs([]string{"--str=" + test.arg, "--strnot=" + test.arg})
|
||||
} else {
|
||||
_, err = p.ParseArgs([]string{"--str", test.arg, "--strnot", test.arg})
|
||||
}
|
||||
|
||||
if test.err == nil {
|
||||
if err != nil {
|
||||
t.Fatalf("Expected no error but got: %v", err)
|
||||
}
|
||||
|
||||
if test.value != opts.String {
|
||||
t.Fatalf("Expected String to be %q but got %q", test.value, opts.String)
|
||||
}
|
||||
if q := strconv.Quote(test.value); q != opts.StringNotUnquoted {
|
||||
t.Fatalf("Expected StringDefault to be %q but got %q", q, opts.StringNotUnquoted)
|
||||
}
|
||||
} else {
|
||||
if err == nil {
|
||||
t.Fatalf("Expected error")
|
||||
} else if e, ok := err.(*Error); ok {
|
||||
if strings.HasPrefix(e.Message, test.err.Error()) {
|
||||
t.Fatalf("Expected error message to end with %q but got %v", test.err.Error(), e.Message)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// envRestorer keeps a copy of a set of env variables and can restore the env from them
|
||||
type envRestorer struct {
|
||||
env map[string]string
|
||||
}
|
||||
|
||||
func (r *envRestorer) Restore() {
|
||||
os.Clearenv()
|
||||
for k, v := range r.env {
|
||||
os.Setenv(k, v)
|
||||
}
|
||||
}
|
||||
|
||||
// EnvSnapshot returns a snapshot of the currently set env variables
|
||||
func EnvSnapshot() *envRestorer {
|
||||
r := envRestorer{make(map[string]string)}
|
||||
for _, kv := range os.Environ() {
|
||||
parts := strings.SplitN(kv, "=", 2)
|
||||
if len(parts) != 2 {
|
||||
panic("got a weird env variable: " + kv)
|
||||
}
|
||||
r.env[parts[0]] = parts[1]
|
||||
}
|
||||
return &r
|
||||
}
|
||||
|
||||
type envDefaultOptions struct {
|
||||
Int int `long:"i" default:"1" env:"TEST_I"`
|
||||
Time time.Duration `long:"t" default:"1m" env:"TEST_T"`
|
||||
Map map[string]int `long:"m" default:"a:1" env:"TEST_M" env-delim:";"`
|
||||
Slice []int `long:"s" default:"1" default:"2" env:"TEST_S" env-delim:","`
|
||||
}
|
||||
|
||||
func TestEnvDefaults(t *testing.T) {
|
||||
var tests = []struct {
|
||||
msg string
|
||||
args []string
|
||||
expected envDefaultOptions
|
||||
env map[string]string
|
||||
}{
|
||||
{
|
||||
msg: "no arguments, no env, expecting default values",
|
||||
args: []string{},
|
||||
expected: envDefaultOptions{
|
||||
Int: 1,
|
||||
Time: time.Minute,
|
||||
Map: map[string]int{"a": 1},
|
||||
Slice: []int{1, 2},
|
||||
},
|
||||
},
|
||||
{
|
||||
msg: "no arguments, env defaults, expecting env default values",
|
||||
args: []string{},
|
||||
expected: envDefaultOptions{
|
||||
Int: 2,
|
||||
Time: 2 * time.Minute,
|
||||
Map: map[string]int{"a": 2, "b": 3},
|
||||
Slice: []int{4, 5, 6},
|
||||
},
|
||||
env: map[string]string{
|
||||
"TEST_I": "2",
|
||||
"TEST_T": "2m",
|
||||
"TEST_M": "a:2;b:3",
|
||||
"TEST_S": "4,5,6",
|
||||
},
|
||||
},
|
||||
{
|
||||
msg: "non-zero value arguments, expecting overwritten arguments",
|
||||
args: []string{"--i=3", "--t=3ms", "--m=c:3", "--s=3"},
|
||||
expected: envDefaultOptions{
|
||||
Int: 3,
|
||||
Time: 3 * time.Millisecond,
|
||||
Map: map[string]int{"c": 3},
|
||||
Slice: []int{3},
|
||||
},
|
||||
env: map[string]string{
|
||||
"TEST_I": "2",
|
||||
"TEST_T": "2m",
|
||||
"TEST_M": "a:2;b:3",
|
||||
"TEST_S": "4,5,6",
|
||||
},
|
||||
},
|
||||
{
|
||||
msg: "zero value arguments, expecting overwritten arguments",
|
||||
args: []string{"--i=0", "--t=0ms", "--m=:0", "--s=0"},
|
||||
expected: envDefaultOptions{
|
||||
Int: 0,
|
||||
Time: 0,
|
||||
Map: map[string]int{"": 0},
|
||||
Slice: []int{0},
|
||||
},
|
||||
env: map[string]string{
|
||||
"TEST_I": "2",
|
||||
"TEST_T": "2m",
|
||||
"TEST_M": "a:2;b:3",
|
||||
"TEST_S": "4,5,6",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
oldEnv := EnvSnapshot()
|
||||
defer oldEnv.Restore()
|
||||
|
||||
for _, test := range tests {
|
||||
var opts envDefaultOptions
|
||||
oldEnv.Restore()
|
||||
for envKey, envValue := range test.env {
|
||||
os.Setenv(envKey, envValue)
|
||||
}
|
||||
_, err := ParseArgs(&opts, test.args)
|
||||
if err != nil {
|
||||
t.Fatalf("%s:\nUnexpected error: %v", test.msg, err)
|
||||
}
|
||||
|
||||
if opts.Slice == nil {
|
||||
opts.Slice = []int{}
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(opts, test.expected) {
|
||||
t.Errorf("%s:\nUnexpected options with arguments %+v\nexpected\n%+v\nbut got\n%+v\n", test.msg, test.args, test.expected, opts)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestOptionAsArgument(t *testing.T) {
|
||||
var tests = []struct {
|
||||
args []string
|
||||
expectError bool
|
||||
errType ErrorType
|
||||
errMsg string
|
||||
rest []string
|
||||
}{
|
||||
{
|
||||
// short option must not be accepted as argument
|
||||
args: []string{"--string-slice", "foobar", "--string-slice", "-o"},
|
||||
expectError: true,
|
||||
errType: ErrExpectedArgument,
|
||||
errMsg: "expected argument for flag `" + defaultLongOptDelimiter + "string-slice', but got option `-o'",
|
||||
},
|
||||
{
|
||||
// long option must not be accepted as argument
|
||||
args: []string{"--string-slice", "foobar", "--string-slice", "--other-option"},
|
||||
expectError: true,
|
||||
errType: ErrExpectedArgument,
|
||||
errMsg: "expected argument for flag `" + defaultLongOptDelimiter + "string-slice', but got option `--other-option'",
|
||||
},
|
||||
{
|
||||
// long option must not be accepted as argument
|
||||
args: []string{"--string-slice", "--"},
|
||||
expectError: true,
|
||||
errType: ErrExpectedArgument,
|
||||
errMsg: "expected argument for flag `" + defaultLongOptDelimiter + "string-slice', but got double dash `--'",
|
||||
},
|
||||
{
|
||||
// quoted and appended option should be accepted as argument (even if it looks like an option)
|
||||
args: []string{"--string-slice", "foobar", "--string-slice=\"--other-option\""},
|
||||
},
|
||||
{
|
||||
// Accept any single character arguments including '-'
|
||||
args: []string{"--string-slice", "-"},
|
||||
},
|
||||
{
|
||||
// Do not accept arguments which start with '-' even if the next character is a digit
|
||||
args: []string{"--string-slice", "-3.14"},
|
||||
expectError: true,
|
||||
errType: ErrExpectedArgument,
|
||||
errMsg: "expected argument for flag `" + defaultLongOptDelimiter + "string-slice', but got option `-3.14'",
|
||||
},
|
||||
{
|
||||
// Do not accept arguments which start with '-' if the next character is not a digit
|
||||
args: []string{"--string-slice", "-character"},
|
||||
expectError: true,
|
||||
errType: ErrExpectedArgument,
|
||||
errMsg: "expected argument for flag `" + defaultLongOptDelimiter + "string-slice', but got option `-character'",
|
||||
},
|
||||
{
|
||||
args: []string{"-o", "-", "-"},
|
||||
rest: []string{"-", "-"},
|
||||
},
|
||||
}
|
||||
var opts struct {
|
||||
StringSlice []string `long:"string-slice"`
|
||||
OtherOption bool `long:"other-option" short:"o"`
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
if test.expectError {
|
||||
assertParseFail(t, test.errType, test.errMsg, &opts, test.args...)
|
||||
} else {
|
||||
args := assertParseSuccess(t, &opts, test.args...)
|
||||
|
||||
assertStringArray(t, args, test.rest)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestUnknownFlagHandler(t *testing.T) {
|
||||
|
||||
var opts struct {
|
||||
Flag1 string `long:"flag1"`
|
||||
Flag2 string `long:"flag2"`
|
||||
}
|
||||
|
||||
p := NewParser(&opts, None)
|
||||
|
||||
var unknownFlag1 string
|
||||
var unknownFlag2 bool
|
||||
var unknownFlag3 string
|
||||
|
||||
// Set up a callback to intercept unknown options during parsing
|
||||
p.UnknownOptionHandler = func(option string, arg SplitArgument, args []string) ([]string, error) {
|
||||
if option == "unknownFlag1" {
|
||||
if argValue, ok := arg.Value(); ok {
|
||||
unknownFlag1 = argValue
|
||||
return args, nil
|
||||
}
|
||||
// consume a value from remaining args list
|
||||
unknownFlag1 = args[0]
|
||||
return args[1:], nil
|
||||
} else if option == "unknownFlag2" {
|
||||
// treat this one as a bool switch, don't consume any args
|
||||
unknownFlag2 = true
|
||||
return args, nil
|
||||
} else if option == "unknownFlag3" {
|
||||
if argValue, ok := arg.Value(); ok {
|
||||
unknownFlag3 = argValue
|
||||
return args, nil
|
||||
}
|
||||
// consume a value from remaining args list
|
||||
unknownFlag3 = args[0]
|
||||
return args[1:], nil
|
||||
}
|
||||
|
||||
return args, fmt.Errorf("Unknown flag: %v", option)
|
||||
}
|
||||
|
||||
// Parse args containing some unknown flags, verify that
|
||||
// our callback can handle all of them
|
||||
_, err := p.ParseArgs([]string{"--flag1=stuff", "--unknownFlag1", "blah", "--unknownFlag2", "--unknownFlag3=baz", "--flag2=foo"})
|
||||
|
||||
if err != nil {
|
||||
assertErrorf(t, "Parser returned unexpected error %v", err)
|
||||
}
|
||||
|
||||
assertString(t, opts.Flag1, "stuff")
|
||||
assertString(t, opts.Flag2, "foo")
|
||||
assertString(t, unknownFlag1, "blah")
|
||||
assertString(t, unknownFlag3, "baz")
|
||||
|
||||
if !unknownFlag2 {
|
||||
assertErrorf(t, "Flag should have been set by unknown handler, but had value: %v", unknownFlag2)
|
||||
}
|
||||
|
||||
// Parse args with unknown flags that callback doesn't handle, verify it returns error
|
||||
_, err = p.ParseArgs([]string{"--flag1=stuff", "--unknownFlagX", "blah", "--flag2=foo"})
|
||||
|
||||
if err == nil {
|
||||
assertErrorf(t, "Parser should have returned error, but returned nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestChoices(t *testing.T) {
|
||||
var opts struct {
|
||||
Choice string `long:"choose" choice:"v1" choice:"v2"`
|
||||
}
|
||||
|
||||
assertParseFail(t, ErrInvalidChoice, "Invalid value `invalid' for option `"+defaultLongOptDelimiter+"choose'. Allowed values are: v1 or v2", &opts, "--choose", "invalid")
|
||||
assertParseSuccess(t, &opts, "--choose", "v2")
|
||||
assertString(t, opts.Choice, "v2")
|
||||
}
|
||||
|
||||
func TestEmbedded(t *testing.T) {
|
||||
type embedded struct {
|
||||
V bool `short:"v"`
|
||||
}
|
||||
var opts struct {
|
||||
embedded
|
||||
}
|
||||
|
||||
assertParseSuccess(t, &opts, "-v")
|
||||
if !opts.V {
|
||||
t.Errorf("Expected V to be true")
|
||||
}
|
||||
}
|
81
vendor/github.com/jessevdk/go-flags/pointer_test.go
generated
vendored
Normal file
81
vendor/github.com/jessevdk/go-flags/pointer_test.go
generated
vendored
Normal file
@ -0,0 +1,81 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestPointerBool(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value *bool `short:"v"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "-v")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
|
||||
if !*opts.Value {
|
||||
t.Errorf("Expected Value to be true")
|
||||
}
|
||||
}
|
||||
|
||||
func TestPointerString(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value *string `short:"v"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "-v", "value")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
assertString(t, *opts.Value, "value")
|
||||
}
|
||||
|
||||
func TestPointerSlice(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value *[]string `short:"v"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "-v", "value1", "-v", "value2")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
assertStringArray(t, *opts.Value, []string{"value1", "value2"})
|
||||
}
|
||||
|
||||
func TestPointerMap(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value *map[string]int `short:"v"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "-v", "k1:2", "-v", "k2:-5")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
|
||||
if v, ok := (*opts.Value)["k1"]; !ok {
|
||||
t.Errorf("Expected key \"k1\" to exist")
|
||||
} else if v != 2 {
|
||||
t.Errorf("Expected \"k1\" to be 2, but got %#v", v)
|
||||
}
|
||||
|
||||
if v, ok := (*opts.Value)["k2"]; !ok {
|
||||
t.Errorf("Expected key \"k2\" to exist")
|
||||
} else if v != -5 {
|
||||
t.Errorf("Expected \"k2\" to be -5, but got %#v", v)
|
||||
}
|
||||
}
|
||||
|
||||
type PointerGroup struct {
|
||||
Value bool `short:"v"`
|
||||
}
|
||||
|
||||
func TestPointerGroup(t *testing.T) {
|
||||
var opts = struct {
|
||||
Group *PointerGroup `group:"Group Options"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "-v")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
|
||||
if !opts.Group.Value {
|
||||
t.Errorf("Expected Group.Value to be true")
|
||||
}
|
||||
}
|
194
vendor/github.com/jessevdk/go-flags/short_test.go
generated
vendored
Normal file
194
vendor/github.com/jessevdk/go-flags/short_test.go
generated
vendored
Normal file
@ -0,0 +1,194 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestShort(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "-v")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
|
||||
if !opts.Value {
|
||||
t.Errorf("Expected Value to be true")
|
||||
}
|
||||
}
|
||||
|
||||
func TestShortTooLong(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"vv"`
|
||||
}{}
|
||||
|
||||
assertParseFail(t, ErrShortNameTooLong, "short names can only be 1 character long, not `vv'", &opts)
|
||||
}
|
||||
|
||||
func TestShortRequired(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v" required:"true"`
|
||||
}{}
|
||||
|
||||
assertParseFail(t, ErrRequired, fmt.Sprintf("the required flag `%cv' was not specified", defaultShortOptDelimiter), &opts)
|
||||
}
|
||||
|
||||
func TestShortMultiConcat(t *testing.T) {
|
||||
var opts = struct {
|
||||
V bool `short:"v"`
|
||||
O bool `short:"o"`
|
||||
F bool `short:"f"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "-vo", "-f")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
|
||||
if !opts.V {
|
||||
t.Errorf("Expected V to be true")
|
||||
}
|
||||
|
||||
if !opts.O {
|
||||
t.Errorf("Expected O to be true")
|
||||
}
|
||||
|
||||
if !opts.F {
|
||||
t.Errorf("Expected F to be true")
|
||||
}
|
||||
}
|
||||
|
||||
func TestShortMultiRequiredConcat(t *testing.T) {
|
||||
var opts = struct {
|
||||
V bool `short:"v" required:"true"`
|
||||
O bool `short:"o" required:"true"`
|
||||
F bool `short:"f" required:"true"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "-vo", "-f")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
|
||||
if !opts.V {
|
||||
t.Errorf("Expected V to be true")
|
||||
}
|
||||
|
||||
if !opts.O {
|
||||
t.Errorf("Expected O to be true")
|
||||
}
|
||||
|
||||
if !opts.F {
|
||||
t.Errorf("Expected F to be true")
|
||||
}
|
||||
}
|
||||
|
||||
func TestShortMultiSlice(t *testing.T) {
|
||||
var opts = struct {
|
||||
Values []bool `short:"v"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "-v", "-v")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
assertBoolArray(t, opts.Values, []bool{true, true})
|
||||
}
|
||||
|
||||
func TestShortMultiSliceConcat(t *testing.T) {
|
||||
var opts = struct {
|
||||
Values []bool `short:"v"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "-vvv")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
assertBoolArray(t, opts.Values, []bool{true, true, true})
|
||||
}
|
||||
|
||||
func TestShortWithEqualArg(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value string `short:"v"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "-v=value")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
assertString(t, opts.Value, "value")
|
||||
}
|
||||
|
||||
func TestShortWithArg(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value string `short:"v"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "-vvalue")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
assertString(t, opts.Value, "value")
|
||||
}
|
||||
|
||||
func TestShortArg(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value string `short:"v"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "-v", "value")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
assertString(t, opts.Value, "value")
|
||||
}
|
||||
|
||||
func TestShortMultiWithEqualArg(t *testing.T) {
|
||||
var opts = struct {
|
||||
F []bool `short:"f"`
|
||||
Value string `short:"v"`
|
||||
}{}
|
||||
|
||||
assertParseFail(t, ErrExpectedArgument, fmt.Sprintf("expected argument for flag `%cv'", defaultShortOptDelimiter), &opts, "-ffv=value")
|
||||
}
|
||||
|
||||
func TestShortMultiArg(t *testing.T) {
|
||||
var opts = struct {
|
||||
F []bool `short:"f"`
|
||||
Value string `short:"v"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "-ffv", "value")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
assertBoolArray(t, opts.F, []bool{true, true})
|
||||
assertString(t, opts.Value, "value")
|
||||
}
|
||||
|
||||
func TestShortMultiArgConcatFail(t *testing.T) {
|
||||
var opts = struct {
|
||||
F []bool `short:"f"`
|
||||
Value string `short:"v"`
|
||||
}{}
|
||||
|
||||
assertParseFail(t, ErrExpectedArgument, fmt.Sprintf("expected argument for flag `%cv'", defaultShortOptDelimiter), &opts, "-ffvvalue")
|
||||
}
|
||||
|
||||
func TestShortMultiArgConcat(t *testing.T) {
|
||||
var opts = struct {
|
||||
F []bool `short:"f"`
|
||||
Value string `short:"v"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "-vff")
|
||||
|
||||
assertStringArray(t, ret, []string{})
|
||||
assertString(t, opts.Value, "ff")
|
||||
}
|
||||
|
||||
func TestShortOptional(t *testing.T) {
|
||||
var opts = struct {
|
||||
F []bool `short:"f"`
|
||||
Value string `short:"v" optional:"yes" optional-value:"value"`
|
||||
}{}
|
||||
|
||||
ret := assertParseSuccess(t, &opts, "-fv", "f")
|
||||
|
||||
assertStringArray(t, ret, []string{"f"})
|
||||
assertString(t, opts.Value, "value")
|
||||
}
|
38
vendor/github.com/jessevdk/go-flags/tag_test.go
generated
vendored
Normal file
38
vendor/github.com/jessevdk/go-flags/tag_test.go
generated
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestTagMissingColon(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short`
|
||||
}{}
|
||||
|
||||
assertParseFail(t, ErrTag, "expected `:' after key name, but got end of tag (in `short`)", &opts, "")
|
||||
}
|
||||
|
||||
func TestTagMissingValue(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:`
|
||||
}{}
|
||||
|
||||
assertParseFail(t, ErrTag, "expected `\"' to start tag value at end of tag (in `short:`)", &opts, "")
|
||||
}
|
||||
|
||||
func TestTagMissingQuote(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `short:"v`
|
||||
}{}
|
||||
|
||||
assertParseFail(t, ErrTag, "expected end of tag value `\"' at end of tag (in `short:\"v`)", &opts, "")
|
||||
}
|
||||
|
||||
func TestTagNewline(t *testing.T) {
|
||||
var opts = struct {
|
||||
Value bool `long:"verbose" description:"verbose
|
||||
something"`
|
||||
}{}
|
||||
|
||||
assertParseFail(t, ErrTag, "unexpected newline in tag value `description' (in `long:\"verbose\" description:\"verbose\nsomething\"`)", &opts, "")
|
||||
}
|
28
vendor/github.com/jessevdk/go-flags/termsize.go
generated
vendored
Normal file
28
vendor/github.com/jessevdk/go-flags/termsize.go
generated
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
// +build !windows,!plan9,!solaris
|
||||
|
||||
package flags
|
||||
|
||||
import (
|
||||
"syscall"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type winsize struct {
|
||||
row, col uint16
|
||||
xpixel, ypixel uint16
|
||||
}
|
||||
|
||||
func getTerminalColumns() int {
|
||||
ws := winsize{}
|
||||
|
||||
if tIOCGWINSZ != 0 {
|
||||
syscall.Syscall(syscall.SYS_IOCTL,
|
||||
uintptr(0),
|
||||
uintptr(tIOCGWINSZ),
|
||||
uintptr(unsafe.Pointer(&ws)))
|
||||
|
||||
return int(ws.col)
|
||||
}
|
||||
|
||||
return 80
|
||||
}
|
7
vendor/github.com/jessevdk/go-flags/termsize_linux.go
generated
vendored
Normal file
7
vendor/github.com/jessevdk/go-flags/termsize_linux.go
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
// +build linux
|
||||
|
||||
package flags
|
||||
|
||||
const (
|
||||
tIOCGWINSZ = 0x5413
|
||||
)
|
7
vendor/github.com/jessevdk/go-flags/termsize_nosysioctl.go
generated
vendored
Normal file
7
vendor/github.com/jessevdk/go-flags/termsize_nosysioctl.go
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
// +build windows plan9 solaris
|
||||
|
||||
package flags
|
||||
|
||||
func getTerminalColumns() int {
|
||||
return 80
|
||||
}
|
7
vendor/github.com/jessevdk/go-flags/termsize_other.go
generated
vendored
Normal file
7
vendor/github.com/jessevdk/go-flags/termsize_other.go
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
// +build !darwin,!freebsd,!netbsd,!openbsd,!linux
|
||||
|
||||
package flags
|
||||
|
||||
const (
|
||||
tIOCGWINSZ = 0
|
||||
)
|
7
vendor/github.com/jessevdk/go-flags/termsize_unix.go
generated
vendored
Normal file
7
vendor/github.com/jessevdk/go-flags/termsize_unix.go
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
// +build darwin freebsd netbsd openbsd
|
||||
|
||||
package flags
|
||||
|
||||
const (
|
||||
tIOCGWINSZ = 0x40087468
|
||||
)
|
66
vendor/github.com/jessevdk/go-flags/unknown_test.go
generated
vendored
Normal file
66
vendor/github.com/jessevdk/go-flags/unknown_test.go
generated
vendored
Normal file
@ -0,0 +1,66 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestUnknownFlags(t *testing.T) {
|
||||
var opts = struct {
|
||||
Verbose []bool `short:"v" long:"verbose" description:"Verbose output"`
|
||||
}{}
|
||||
|
||||
args := []string{
|
||||
"-f",
|
||||
}
|
||||
|
||||
p := NewParser(&opts, 0)
|
||||
args, err := p.ParseArgs(args)
|
||||
|
||||
if err == nil {
|
||||
t.Fatal("Expected error for unknown argument")
|
||||
}
|
||||
}
|
||||
|
||||
func TestIgnoreUnknownFlags(t *testing.T) {
|
||||
var opts = struct {
|
||||
Verbose []bool `short:"v" long:"verbose" description:"Verbose output"`
|
||||
}{}
|
||||
|
||||
args := []string{
|
||||
"hello",
|
||||
"world",
|
||||
"-v",
|
||||
"--foo=bar",
|
||||
"--verbose",
|
||||
"-f",
|
||||
}
|
||||
|
||||
p := NewParser(&opts, IgnoreUnknown)
|
||||
args, err := p.ParseArgs(args)
|
||||
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
exargs := []string{
|
||||
"hello",
|
||||
"world",
|
||||
"--foo=bar",
|
||||
"-f",
|
||||
}
|
||||
|
||||
issame := (len(args) == len(exargs))
|
||||
|
||||
if issame {
|
||||
for i := 0; i < len(args); i++ {
|
||||
if args[i] != exargs[i] {
|
||||
issame = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !issame {
|
||||
t.Fatalf("Expected %v but got %v", exargs, args)
|
||||
}
|
||||
}
|
27
vendor/github.com/klauspost/compress/LICENSE
generated
vendored
Normal file
27
vendor/github.com/klauspost/compress/LICENSE
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
Copyright (c) 2012 The Go Authors. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
330
vendor/github.com/klauspost/compress/README.md
generated
vendored
Normal file
330
vendor/github.com/klauspost/compress/README.md
generated
vendored
Normal file
@ -0,0 +1,330 @@
|
||||
# compress
|
||||
|
||||
This package is based on an optimized Deflate function, which is used by gzip/zip/zlib packages.
|
||||
|
||||
It offers slightly better compression at lower compression settings, and up to 3x faster encoding at highest compression level.
|
||||
|
||||
* [High Throughput Benchmark](http://blog.klauspost.com/go-gzipdeflate-benchmarks/).
|
||||
* [Small Payload/Webserver Benchmarks](http://blog.klauspost.com/gzip-performance-for-go-webservers/).
|
||||
* [Linear Time Compression](http://blog.klauspost.com/constant-time-gzipzip-compression/).
|
||||
* [Re-balancing Deflate Compression Levels](https://blog.klauspost.com/rebalancing-deflate-compression-levels/)
|
||||
|
||||
[](https://travis-ci.org/klauspost/compress)
|
||||
|
||||
# changelog
|
||||
* Mar 24, 2016: Always attempt Huffman encoding on level 4-7. This improves base 64 encoded data compression.
|
||||
* Mar 24, 2016: Small speedup for level 1-3.
|
||||
* Feb 19, 2016: Faster bit writer, level -2 is 15% faster, level 1 is 4% faster.
|
||||
* Feb 19, 2016: Handle small payloads faster in level 1-3.
|
||||
* Feb 19, 2016: Added faster level 2 + 3 compression modes.
|
||||
* Feb 19, 2016: [Rebalanced compression levels](https://blog.klauspost.com/rebalancing-deflate-compression-levels/), so there is a more even progresssion in terms of compression. New default level is 5.
|
||||
* Feb 14, 2016: Snappy: Merge upstream changes.
|
||||
* Feb 14, 2016: Snappy: Fix aggressive skipping.
|
||||
* Feb 14, 2016: Snappy: Update benchmark.
|
||||
* Feb 13, 2016: Deflate: Fixed assembler problem that could lead to sub-optimal compression.
|
||||
* Feb 12, 2016: Snappy: Added AMD64 SSE 4.2 optimizations to matching, which makes easy to compress material run faster. Typical speedup is around 25%.
|
||||
* Feb 9, 2016: Added Snappy package fork. This version is 5-7% faster, much more on hard to compress content.
|
||||
* Jan 30, 2016: Optimize level 1 to 3 by not considering static dictionary or storing uncompressed. ~4-5% speedup.
|
||||
* Jan 16, 2016: Optimization on deflate level 1,2,3 compression.
|
||||
* Jan 8 2016: Merge [CL 18317](https://go-review.googlesource.com/#/c/18317): fix reading, writing of zip64 archives.
|
||||
* Dec 8 2015: Make level 1 and -2 deterministic even if write size differs.
|
||||
* Dec 8 2015: Split encoding functions, so hashing and matching can potentially be inlined. 1-3% faster on AMD64. 5% faster on other platforms.
|
||||
* Dec 8 2015: Fixed rare [one byte out-of bounds read](https://github.com/klauspost/compress/issues/20). Please update!
|
||||
* Nov 23 2015: Optimization on token writer. ~2-4% faster. Contributed by [@dsnet](https://github.com/dsnet).
|
||||
* Nov 20 2015: Small optimization to bit writer on 64 bit systems.
|
||||
* Nov 17 2015: Fixed out-of-bound errors if the underlying Writer returned an error. See [#15](https://github.com/klauspost/compress/issues/15).
|
||||
* Nov 12 2015: Added [io.WriterTo](https://golang.org/pkg/io/#WriterTo) support to gzip/inflate.
|
||||
* Nov 11 2015: Merged [CL 16669](https://go-review.googlesource.com/#/c/16669/4): archive/zip: enable overriding (de)compressors per file
|
||||
* Oct 15 2015: Added skipping on uncompressible data. Random data speed up >5x.
|
||||
|
||||
# usage
|
||||
|
||||
The packages are drop-in replacements for standard libraries. Simply replace the import path to use them:
|
||||
|
||||
| old import | new import |
|
||||
|--------------------|-----------------------------------------|
|
||||
| `compress/gzip` | `github.com/klauspost/compress/gzip` |
|
||||
| `compress/zlib` | `github.com/klauspost/compress/zlib` |
|
||||
| `archive/zip` | `github.com/klauspost/compress/zip` |
|
||||
| `compress/deflate` | `github.com/klauspost/compress/deflate` |
|
||||
| `github.com/golang/snappy` | `github.com/klauspost/compress/snappy` |
|
||||
|
||||
You may also be interested in [pgzip](https://github.com/klauspost/pgzip), which is a drop in replacement for gzip, which support multithreaded compression on big files and the optimized [crc32](https://github.com/klauspost/crc32) package used by these packages.
|
||||
|
||||
The packages contains the same as the standard library, so you can use the godoc for that: [gzip](http://golang.org/pkg/compress/gzip/), [zip](http://golang.org/pkg/archive/zip/), [zlib](http://golang.org/pkg/compress/zlib/), [flate](http://golang.org/pkg/compress/flate/), [snappy](http://golang.org/pkg/compress/snappy/).
|
||||
|
||||
Currently there is only minor speedup on decompression (mostly CRC32 calculation).
|
||||
|
||||
# deflate optimizations
|
||||
|
||||
* Minimum matches are 4 bytes, this leads to fewer searches and better compression.
|
||||
* Stronger hash (iSCSI CRC32) for matches on x64 with SSE 4.2 support. This leads to fewer hash collisions.
|
||||
* Literal byte matching using SSE 4.2 for faster match comparisons.
|
||||
* Bulk hashing on matches.
|
||||
* Much faster dictionary indexing with `NewWriterDict()`/`Reset()`.
|
||||
* Make Bit Coder faster by assuming we are on a 64 bit CPU.
|
||||
* Level 1 compression replaced by converted "Snappy" algorithm.
|
||||
* Uncompressible content is detected and skipped faster.
|
||||
* A lot of branching eliminated by having two encoders for levels 2+3 and 4+.
|
||||
* All heap memory allocations eliminated.
|
||||
|
||||
```
|
||||
benchmark old ns/op new ns/op delta
|
||||
BenchmarkEncodeDigitsSpeed1e4-4 554029 265175 -52.14%
|
||||
BenchmarkEncodeDigitsSpeed1e5-4 3908558 2416595 -38.17%
|
||||
BenchmarkEncodeDigitsSpeed1e6-4 37546692 24875330 -33.75%
|
||||
BenchmarkEncodeDigitsDefault1e4-4 781510 486322 -37.77%
|
||||
BenchmarkEncodeDigitsDefault1e5-4 15530248 6740175 -56.60%
|
||||
BenchmarkEncodeDigitsDefault1e6-4 174915710 76498625 -56.27%
|
||||
BenchmarkEncodeDigitsCompress1e4-4 769995 485652 -36.93%
|
||||
BenchmarkEncodeDigitsCompress1e5-4 15450113 6929589 -55.15%
|
||||
BenchmarkEncodeDigitsCompress1e6-4 175114660 73348495 -58.11%
|
||||
BenchmarkEncodeTwainSpeed1e4-4 560122 275977 -50.73%
|
||||
BenchmarkEncodeTwainSpeed1e5-4 3740978 2506095 -33.01%
|
||||
BenchmarkEncodeTwainSpeed1e6-4 35542802 21904440 -38.37%
|
||||
BenchmarkEncodeTwainDefault1e4-4 828534 549026 -33.74%
|
||||
BenchmarkEncodeTwainDefault1e5-4 13667153 7528455 -44.92%
|
||||
BenchmarkEncodeTwainDefault1e6-4 141191770 79952170 -43.37%
|
||||
BenchmarkEncodeTwainCompress1e4-4 830050 545694 -34.26%
|
||||
BenchmarkEncodeTwainCompress1e5-4 16620852 8460600 -49.10%
|
||||
BenchmarkEncodeTwainCompress1e6-4 193326820 90808750 -53.03%
|
||||
|
||||
benchmark old MB/s new MB/s speedup
|
||||
BenchmarkEncodeDigitsSpeed1e4-4 18.05 37.71 2.09x
|
||||
BenchmarkEncodeDigitsSpeed1e5-4 25.58 41.38 1.62x
|
||||
BenchmarkEncodeDigitsSpeed1e6-4 26.63 40.20 1.51x
|
||||
BenchmarkEncodeDigitsDefault1e4-4 12.80 20.56 1.61x
|
||||
BenchmarkEncodeDigitsDefault1e5-4 6.44 14.84 2.30x
|
||||
BenchmarkEncodeDigitsDefault1e6-4 5.72 13.07 2.28x
|
||||
BenchmarkEncodeDigitsCompress1e4-4 12.99 20.59 1.59x
|
||||
BenchmarkEncodeDigitsCompress1e5-4 6.47 14.43 2.23x
|
||||
BenchmarkEncodeDigitsCompress1e6-4 5.71 13.63 2.39x
|
||||
BenchmarkEncodeTwainSpeed1e4-4 17.85 36.23 2.03x
|
||||
BenchmarkEncodeTwainSpeed1e5-4 26.73 39.90 1.49x
|
||||
BenchmarkEncodeTwainSpeed1e6-4 28.14 45.65 1.62x
|
||||
BenchmarkEncodeTwainDefault1e4-4 12.07 18.21 1.51x
|
||||
BenchmarkEncodeTwainDefault1e5-4 7.32 13.28 1.81x
|
||||
BenchmarkEncodeTwainDefault1e6-4 7.08 12.51 1.77x
|
||||
BenchmarkEncodeTwainCompress1e4-4 12.05 18.33 1.52x
|
||||
BenchmarkEncodeTwainCompress1e5-4 6.02 11.82 1.96x
|
||||
BenchmarkEncodeTwainCompress1e6-4 5.17 11.01 2.13x
|
||||
```
|
||||
* "Speed" is compression level 1
|
||||
* "Default" is compression level 6
|
||||
* "Compress" is compression level 9
|
||||
* Test files are [Digits](https://github.com/klauspost/compress/blob/master/testdata/e.txt) (no matches) and [Twain](https://github.com/klauspost/compress/blob/master/testdata/Mark.Twain-Tom.Sawyer.txt) (plain text) .
|
||||
|
||||
As can be seen it shows a very good speedup all across the line.
|
||||
|
||||
`Twain` is a much more realistic benchmark, and will be closer to JSON/HTML performance. Here speed is equivalent or faster, up to 2 times.
|
||||
|
||||
**Without assembly**. This is what you can expect on systems that does not have amd64 and SSE 4:
|
||||
```
|
||||
benchmark old ns/op new ns/op delta
|
||||
BenchmarkEncodeDigitsSpeed1e4-4 554029 249558 -54.96%
|
||||
BenchmarkEncodeDigitsSpeed1e5-4 3908558 2295216 -41.28%
|
||||
BenchmarkEncodeDigitsSpeed1e6-4 37546692 22594905 -39.82%
|
||||
BenchmarkEncodeDigitsDefault1e4-4 781510 579850 -25.80%
|
||||
BenchmarkEncodeDigitsDefault1e5-4 15530248 10096561 -34.99%
|
||||
BenchmarkEncodeDigitsDefault1e6-4 174915710 111470780 -36.27%
|
||||
BenchmarkEncodeDigitsCompress1e4-4 769995 579708 -24.71%
|
||||
BenchmarkEncodeDigitsCompress1e5-4 15450113 10266373 -33.55%
|
||||
BenchmarkEncodeDigitsCompress1e6-4 175114660 110170120 -37.09%
|
||||
BenchmarkEncodeTwainSpeed1e4-4 560122 260679 -53.46%
|
||||
BenchmarkEncodeTwainSpeed1e5-4 3740978 2097372 -43.94%
|
||||
BenchmarkEncodeTwainSpeed1e6-4 35542802 20353449 -42.74%
|
||||
BenchmarkEncodeTwainDefault1e4-4 828534 646016 -22.03%
|
||||
BenchmarkEncodeTwainDefault1e5-4 13667153 10056369 -26.42%
|
||||
BenchmarkEncodeTwainDefault1e6-4 141191770 105268770 -25.44%
|
||||
BenchmarkEncodeTwainCompress1e4-4 830050 642401 -22.61%
|
||||
BenchmarkEncodeTwainCompress1e5-4 16620852 11157081 -32.87%
|
||||
BenchmarkEncodeTwainCompress1e6-4 193326820 121780770 -37.01%
|
||||
|
||||
benchmark old MB/s new MB/s speedup
|
||||
BenchmarkEncodeDigitsSpeed1e4-4 18.05 40.07 2.22x
|
||||
BenchmarkEncodeDigitsSpeed1e5-4 25.58 43.57 1.70x
|
||||
BenchmarkEncodeDigitsSpeed1e6-4 26.63 44.26 1.66x
|
||||
BenchmarkEncodeDigitsDefault1e4-4 12.80 17.25 1.35x
|
||||
BenchmarkEncodeDigitsDefault1e5-4 6.44 9.90 1.54x
|
||||
BenchmarkEncodeDigitsDefault1e6-4 5.72 8.97 1.57x
|
||||
BenchmarkEncodeDigitsCompress1e4-4 12.99 17.25 1.33x
|
||||
BenchmarkEncodeDigitsCompress1e5-4 6.47 9.74 1.51x
|
||||
BenchmarkEncodeDigitsCompress1e6-4 5.71 9.08 1.59x
|
||||
BenchmarkEncodeTwainSpeed1e4-4 17.85 38.36 2.15x
|
||||
BenchmarkEncodeTwainSpeed1e5-4 26.73 47.68 1.78x
|
||||
BenchmarkEncodeTwainSpeed1e6-4 28.14 49.13 1.75x
|
||||
BenchmarkEncodeTwainDefault1e4-4 12.07 15.48 1.28x
|
||||
BenchmarkEncodeTwainDefault1e5-4 7.32 9.94 1.36x
|
||||
BenchmarkEncodeTwainDefault1e6-4 7.08 9.50 1.34x
|
||||
BenchmarkEncodeTwainCompress1e4-4 12.05 15.57 1.29x
|
||||
BenchmarkEncodeTwainCompress1e5-4 6.02 8.96 1.49x
|
||||
BenchmarkEncodeTwainCompress1e6-4 5.17 8.21 1.59x
|
||||
```
|
||||
So even without the assembly optimizations there is a general speedup across the board.
|
||||
|
||||
## level 1-3 "snappy" compression
|
||||
|
||||
Level 1 "Best Speed" is completely replaced by a converted version of the algorithm found in Snappy, modified to be fully
|
||||
compatible with the deflate bitstream (and thus still compatible with all existing zlib/gzip libraries and tools).
|
||||
This version is considerably faster than the "old" deflate at level 1. It does however come at a compression loss, usually in the order of 3-4% compared to the old level 1. However, the speed is usually 1.75 times that of the fastest deflate mode.
|
||||
|
||||
In my previous experiments the most common case for "level 1" was that it provided no significant speedup, only lower compression compared to level 2 and sometimes even 3. However, the modified Snappy algorithm provides a very good sweet spot. Usually about 75% faster and with only little compression loss. Therefore I decided to *replace* level 1 with this mode entirely.
|
||||
|
||||
Input is split into blocks of 64kb of, and they are encoded independently (no backreferences across blocks) for the best speed. Contrary to Snappy the output is entropy-encoded, so you will almost always see better compression than Snappy. But Snappy is still about twice as fast as Snappy in deflate mode.
|
||||
|
||||
Level 2 and 3 have also been replaced. Level 2 is capable is matching between blocks and level 3 checks up to two hashes for matches before choosing the longest for encoding the match.
|
||||
|
||||
## compression levels
|
||||
|
||||
This table shows the compression at each level, and the percentage of the output size compared to output
|
||||
at the similar level with the standard library. Compression data is `Twain`, see above.
|
||||
|
||||
(Not up-to-date after rebalancing)
|
||||
|
||||
| Level | Bytes | % size |
|
||||
|-------|--------|--------|
|
||||
| 1 | 194622 | 103.7% |
|
||||
| 2 | 174684 | 96.85% |
|
||||
| 3 | 170301 | 98.45% |
|
||||
| 4 | 165253 | 97.69% |
|
||||
| 5 | 161274 | 98.65% |
|
||||
| 6 | 160464 | 99.71% |
|
||||
| 7 | 160304 | 99.87% |
|
||||
| 8 | 160279 | 99.99% |
|
||||
| 9 | 160279 | 99.99% |
|
||||
|
||||
To interpret and example, this version of deflate compresses input of 407287 bytes to 161274 bytes at level 5, which is 98.6% of the size of what the standard library produces; 161274 bytes.
|
||||
|
||||
This means that from level 4 you can expect a compression level increase of a few percent. Level 1 is about 3% worse, as descibed above.
|
||||
|
||||
# linear time compression
|
||||
|
||||
This compression library adds a special compression level, named `ConstantCompression`, which allows near linear time compression. This is done by completely disabling matching of previous data, and only reduce the number of bits to represent each character.
|
||||
|
||||
This means that often used characters, like 'e' and ' ' (space) in text use the fewest bits to represent, and rare characters like '¤' takes more bits to represent. For more information see [wikipedia](https://en.wikipedia.org/wiki/Huffman_coding) or this nice [video](https://youtu.be/ZdooBTdW5bM).
|
||||
|
||||
Since this type of compression has much less variance, the compression speed is mostly unaffected by the input data, and is usually more than *180MB/s* for a single core.
|
||||
|
||||
The downside is that the compression ratio is usually considerably worse than even the fastest conventional compression. The compression raio can never be better than 8:1 (12.5%).
|
||||
|
||||
The linear time compression can be used as a "better than nothing" mode, where you cannot risk the encoder to slow down on some content. For comparison, the size of the "Twain" text is *233460 bytes* (+29% vs. level 1) and encode speed is 144MB/s (4.5x level 1). So in this case you trade a 30% size increase for a 4 times speedup.
|
||||
|
||||
For more information see my blog post on [Fast Linear Time Compression](http://blog.klauspost.com/constant-time-gzipzip-compression/).
|
||||
|
||||
# gzip/zip optimizations
|
||||
* Uses the faster deflate
|
||||
* Uses SSE 4.2 CRC32 calculations.
|
||||
|
||||
Speed increase is up to 3x of the standard library, but usually around 2x.
|
||||
|
||||
This is close to a real world benchmark as you will get. A 2.3MB JSON file. (NOTE: not up-to-date)
|
||||
|
||||
```
|
||||
benchmark old ns/op new ns/op delta
|
||||
BenchmarkGzipL1-4 95212470 59938275 -37.05%
|
||||
BenchmarkGzipL2-4 102069730 76349195 -25.20%
|
||||
BenchmarkGzipL3-4 115472770 82492215 -28.56%
|
||||
BenchmarkGzipL4-4 153197780 107570890 -29.78%
|
||||
BenchmarkGzipL5-4 203930260 134387930 -34.10%
|
||||
BenchmarkGzipL6-4 233172100 145495400 -37.60%
|
||||
BenchmarkGzipL7-4 297190260 197926950 -33.40%
|
||||
BenchmarkGzipL8-4 512819750 376244733 -26.63%
|
||||
BenchmarkGzipL9-4 563366800 403266833 -28.42%
|
||||
|
||||
benchmark old MB/s new MB/s speedup
|
||||
BenchmarkGzipL1-4 52.11 82.78 1.59x
|
||||
BenchmarkGzipL2-4 48.61 64.99 1.34x
|
||||
BenchmarkGzipL3-4 42.97 60.15 1.40x
|
||||
BenchmarkGzipL4-4 32.39 46.13 1.42x
|
||||
BenchmarkGzipL5-4 24.33 36.92 1.52x
|
||||
BenchmarkGzipL6-4 21.28 34.10 1.60x
|
||||
BenchmarkGzipL7-4 16.70 25.07 1.50x
|
||||
BenchmarkGzipL8-4 9.68 13.19 1.36x
|
||||
BenchmarkGzipL9-4 8.81 12.30 1.40x
|
||||
```
|
||||
|
||||
Multithreaded compression using [pgzip](https://github.com/klauspost/pgzip) comparison, Quadcore, CPU = 8:
|
||||
|
||||
(Not updated, old numbers)
|
||||
|
||||
```
|
||||
benchmark old ns/op new ns/op delta
|
||||
BenchmarkGzipL1 96155500 25981486 -72.98%
|
||||
BenchmarkGzipL2 101905830 24601408 -75.86%
|
||||
BenchmarkGzipL3 113506490 26321506 -76.81%
|
||||
BenchmarkGzipL4 143708220 31761818 -77.90%
|
||||
BenchmarkGzipL5 188210770 39602266 -78.96%
|
||||
BenchmarkGzipL6 209812000 40402313 -80.74%
|
||||
BenchmarkGzipL7 270015440 56103210 -79.22%
|
||||
BenchmarkGzipL8 461359700 91255220 -80.22%
|
||||
BenchmarkGzipL9 498361833 88755075 -82.19%
|
||||
|
||||
benchmark old MB/s new MB/s speedup
|
||||
BenchmarkGzipL1 51.60 190.97 3.70x
|
||||
BenchmarkGzipL2 48.69 201.69 4.14x
|
||||
BenchmarkGzipL3 43.71 188.51 4.31x
|
||||
BenchmarkGzipL4 34.53 156.22 4.52x
|
||||
BenchmarkGzipL5 26.36 125.29 4.75x
|
||||
BenchmarkGzipL6 23.65 122.81 5.19x
|
||||
BenchmarkGzipL7 18.38 88.44 4.81x
|
||||
BenchmarkGzipL8 10.75 54.37 5.06x
|
||||
BenchmarkGzipL9 9.96 55.90 5.61x
|
||||
```
|
||||
|
||||
# snappy package
|
||||
|
||||
### This is still in development, and should not be used for critical applications.
|
||||
|
||||
The Snappy package contains some optimizations over the standard package.
|
||||
|
||||
This speeds up mainly **hard** and **easy** to compress material.
|
||||
|
||||
Here are the "standard" benchmarks, compared to current Snappy master (13 feb 2016).
|
||||
|
||||
## Speed
|
||||
```
|
||||
name old speed new speed delta
|
||||
WordsDecode1e3-8 405MB/s ± 5% 444MB/s ± 1% +9.60% (p=0.045 n=3+3)
|
||||
WordsEncode1e1-8 4.55MB/s ± 1% 98.93MB/s ± 2% +2075.95% (p=0.000 n=3+3)
|
||||
WordsEncode1e2-8 36.4MB/s ± 0% 166.1MB/s ± 3% +356.03% (p=0.000 n=3+3)
|
||||
WordsEncode1e3-8 129MB/s ± 0% 185MB/s ± 1% +43.82% (p=0.000 n=3+3)
|
||||
WordsEncode1e5-8 125MB/s ± 1% 140MB/s ± 2% +11.77% (p=0.005 n=3+3)
|
||||
WordsEncode1e6-8 121MB/s ± 3% 134MB/s ± 0% +11.15% (p=0.026 n=3+3)
|
||||
RandomEncode-8 2.80GB/s ± 2% 2.68GB/s ± 1% -4.32% (p=0.019 n=3+3)
|
||||
_UFlat3-8 746MB/s ± 2% 812MB/s ± 1% +8.90% (p=0.004 n=3+3)
|
||||
_UFlat4-8 2.50GB/s ± 1% 3.06GB/s ± 1% +22.68% (p=0.000 n=3+3)
|
||||
_ZFlat0-8 284MB/s ± 1% 362MB/s ± 1% +27.45% (p=0.000 n=3+3)
|
||||
_ZFlat2-8 2.85GB/s ± 0% 3.71GB/s ± 1% +30.21% (p=0.000 n=3+3)
|
||||
_ZFlat3-8 64.5MB/s ± 1% 216.9MB/s ± 2% +236.02% (p=0.000 n=3+3)
|
||||
_ZFlat4-8 415MB/s ± 1% 2000MB/s ± 1% +382.43% (p=0.000 n=3+3)
|
||||
_ZFlat5-8 282MB/s ± 1% 354MB/s ± 2% +25.67% (p=0.003 n=3+3)
|
||||
_ZFlat6-8 124MB/s ± 1% 136MB/s ± 2% +9.84% (p=0.013 n=3+3)
|
||||
_ZFlat7-8 116MB/s ± 2% 127MB/s ± 1% +10.12% (p=0.002 n=3+3)
|
||||
_ZFlat8-8 128MB/s ± 1% 142MB/s ± 1% +11.38% (p=0.000 n=3+3)
|
||||
_ZFlat9-8 111MB/s ± 2% 120MB/s ± 1% +8.45% (p=0.009 n=3+3)
|
||||
_ZFlat10-8 318MB/s ± 1% 439MB/s ± 1% +38.16% (p=0.000 n=3+3)
|
||||
_ZFlat11-8 183MB/s ± 0% 226MB/s ± 3% +23.53% (p=0.004 n=3+3)
|
||||
```
|
||||
Only significant differences are included.
|
||||
|
||||
## Size Comparison:
|
||||
```
|
||||
name data insize outsize ref red. ref-red r-delta
|
||||
Flat0: html 102400 23317 23330 77.23% 77.23% 0.01%
|
||||
Flat1: urls 712086 337290 335282 52.63% 52.63% -0.28%
|
||||
Flat2: jpg 123093 123035 123032 0.05% 0.05% -0.00%
|
||||
Flat3: jpg_200 123093 123035 123032 0.05% 0.05% -0.00%
|
||||
Flat4: pdf 102400 84897 83754 17.09% 17.09% -1.12%
|
||||
Flat5: html4 409600 92689 92366 77.37% 77.37% -0.08%
|
||||
Flat6: txt1 152089 89544 89495 41.12% 41.12% -0.03%
|
||||
Flat7: txt2 129301 80531 80518 37.72% 37.72% -0.01%
|
||||
Flat8: txt3 426754 238857 238849 44.03% 44.03% -0.00%
|
||||
Flat9: txt4 481861 324755 325047 32.60% 32.60% 0.06%
|
||||
Flat10: pb 118588 24723 23392 79.15% 79.15% -1.12%
|
||||
Flat11: gaviota 184320 73963 73962 59.87% 59.87% -0.00%
|
||||
```
|
||||
r-delta is difference in compression. Negative means this package performs worse.
|
||||
|
||||
# license
|
||||
|
||||
This code is licensed under the same conditions as the original Go code. See LICENSE file.
|
193
vendor/github.com/klauspost/compress/flate/asm_test.go
generated
vendored
Normal file
193
vendor/github.com/klauspost/compress/flate/asm_test.go
generated
vendored
Normal file
@ -0,0 +1,193 @@
|
||||
// Copyright 2015, Klaus Post, see LICENSE for details.
|
||||
|
||||
//+build amd64
|
||||
|
||||
package flate
|
||||
|
||||
import (
|
||||
"math/rand"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCRC(t *testing.T) {
|
||||
if !useSSE42 {
|
||||
t.Skip("Skipping CRC test, no SSE 4.2 available")
|
||||
}
|
||||
for _, x := range deflateTests {
|
||||
y := x.out
|
||||
if len(y) >= minMatchLength {
|
||||
t.Logf("In: %v, Out:0x%08x", y[0:minMatchLength], crc32sse(y[0:minMatchLength]))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCRCBulk(t *testing.T) {
|
||||
if !useSSE42 {
|
||||
t.Skip("Skipping CRC test, no SSE 4.2 available")
|
||||
}
|
||||
for _, x := range deflateTests {
|
||||
y := x.out
|
||||
y = append(y, y...)
|
||||
y = append(y, y...)
|
||||
y = append(y, y...)
|
||||
y = append(y, y...)
|
||||
y = append(y, y...)
|
||||
y = append(y, y...)
|
||||
if !testing.Short() {
|
||||
y = append(y, y...)
|
||||
y = append(y, y...)
|
||||
}
|
||||
y = append(y, 1)
|
||||
if len(y) >= minMatchLength {
|
||||
for j := len(y) - 1; j >= 4; j-- {
|
||||
|
||||
// Create copy, so we easier detect of-of-bound reads
|
||||
test := make([]byte, j)
|
||||
test2 := make([]byte, j)
|
||||
copy(test, y[:j])
|
||||
copy(test2, y[:j])
|
||||
|
||||
// We allocate one more than we need to test for unintentional overwrites
|
||||
dst := make([]hash, j-3+1)
|
||||
ref := make([]hash, j-3+1)
|
||||
for i := range dst {
|
||||
dst[i] = hash(i + 100)
|
||||
ref[i] = hash(i + 101)
|
||||
}
|
||||
// Last entry must NOT be overwritten.
|
||||
dst[j-3] = 0x1234
|
||||
ref[j-3] = 0x1234
|
||||
|
||||
// Do two encodes we can compare
|
||||
crc32sseAll(test, dst)
|
||||
crc32sseAll(test2, ref)
|
||||
|
||||
// Check all values
|
||||
for i, got := range dst {
|
||||
if i == j-3 {
|
||||
if dst[i] != 0x1234 {
|
||||
t.Fatalf("end of expected dst overwritten, was %08x", uint32(dst[i]))
|
||||
}
|
||||
continue
|
||||
}
|
||||
expect := crc32sse(y[i : i+4])
|
||||
if got != expect && got == hash(i)+100 {
|
||||
t.Errorf("Len:%d Index:%d, expected 0x%08x but not modified", len(y), i, uint32(expect))
|
||||
} else if got != expect {
|
||||
t.Errorf("Len:%d Index:%d, got 0x%08x expected:0x%08x", len(y), i, uint32(got), uint32(expect))
|
||||
}
|
||||
expect = ref[i]
|
||||
if got != expect {
|
||||
t.Errorf("Len:%d Index:%d, got 0x%08x expected:0x%08x", len(y), i, got, expect)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestMatchLen(t *testing.T) {
|
||||
if !useSSE42 {
|
||||
t.Skip("Skipping Matchlen test, no SSE 4.2 available")
|
||||
}
|
||||
// Maximum length tested
|
||||
var maxLen = 512
|
||||
|
||||
// Skips per iteration
|
||||
is, js, ks := 3, 2, 1
|
||||
if testing.Short() {
|
||||
is, js, ks = 7, 5, 3
|
||||
}
|
||||
|
||||
a := make([]byte, maxLen)
|
||||
b := make([]byte, maxLen)
|
||||
bb := make([]byte, maxLen)
|
||||
rand.Seed(1)
|
||||
for i := range a {
|
||||
a[i] = byte(rand.Int63())
|
||||
b[i] = byte(rand.Int63())
|
||||
}
|
||||
|
||||
// Test different lengths
|
||||
for i := 0; i < maxLen; i += is {
|
||||
// Test different dst offsets.
|
||||
for j := 0; j < maxLen-1; j += js {
|
||||
copy(bb, b)
|
||||
// Test different src offsets
|
||||
for k := i - 1; k >= 0; k -= ks {
|
||||
copy(bb[j:], a[k:i])
|
||||
maxTest := maxLen - j
|
||||
if maxTest > maxLen-k {
|
||||
maxTest = maxLen - k
|
||||
}
|
||||
got := matchLenSSE4(a[k:], bb[j:], maxTest)
|
||||
expect := matchLenReference(a[k:], bb[j:], maxTest)
|
||||
if got > maxTest || got < 0 {
|
||||
t.Fatalf("unexpected result %d (len:%d, src offset: %d, dst offset:%d)", got, maxTest, k, j)
|
||||
}
|
||||
if got != expect {
|
||||
t.Fatalf("Mismatch, expected %d, got %d", expect, got)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// matchLenReference is a reference matcher.
|
||||
func matchLenReference(a, b []byte, max int) int {
|
||||
for i := 0; i < max; i++ {
|
||||
if a[i] != b[i] {
|
||||
return i
|
||||
}
|
||||
}
|
||||
return max
|
||||
}
|
||||
|
||||
func TestHistogram(t *testing.T) {
|
||||
if !useSSE42 {
|
||||
t.Skip("Skipping Matchlen test, no SSE 4.2 available")
|
||||
}
|
||||
// Maximum length tested
|
||||
const maxLen = 65536
|
||||
var maxOff = 8
|
||||
|
||||
// Skips per iteration
|
||||
is, js := 5, 3
|
||||
if testing.Short() {
|
||||
is, js = 9, 1
|
||||
maxOff = 1
|
||||
}
|
||||
|
||||
a := make([]byte, maxLen+maxOff)
|
||||
rand.Seed(1)
|
||||
for i := range a {
|
||||
a[i] = byte(rand.Int63())
|
||||
}
|
||||
|
||||
// Test different lengths
|
||||
for i := 0; i <= maxLen; i += is {
|
||||
// Test different offsets
|
||||
for j := 0; j < maxOff; j += js {
|
||||
var got [256]int32
|
||||
var reference [256]int32
|
||||
|
||||
histogram(a[j:i+j], got[:])
|
||||
histogramReference(a[j:i+j], reference[:])
|
||||
for k := range got {
|
||||
if got[k] != reference[k] {
|
||||
t.Fatalf("mismatch at len:%d, offset:%d, value %d: (got) %d != %d (expected)", i, j, k, got[k], reference[k])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// histogramReference is a reference
|
||||
func histogramReference(b []byte, h []int32) {
|
||||
if len(h) < 256 {
|
||||
panic("Histogram too small")
|
||||
}
|
||||
for _, t := range b {
|
||||
h[t]++
|
||||
}
|
||||
}
|
32
vendor/github.com/klauspost/compress/flate/copy.go
generated
vendored
Normal file
32
vendor/github.com/klauspost/compress/flate/copy.go
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package flate
|
||||
|
||||
// forwardCopy is like the built-in copy function except that it always goes
|
||||
// forward from the start, even if the dst and src overlap.
|
||||
// It is equivalent to:
|
||||
// for i := 0; i < n; i++ {
|
||||
// mem[dst+i] = mem[src+i]
|
||||
// }
|
||||
func forwardCopy(mem []byte, dst, src, n int) {
|
||||
if dst <= src {
|
||||
copy(mem[dst:dst+n], mem[src:src+n])
|
||||
return
|
||||
}
|
||||
for {
|
||||
if dst >= src+n {
|
||||
copy(mem[dst:dst+n], mem[src:src+n])
|
||||
return
|
||||
}
|
||||
// There is some forward overlap. The destination
|
||||
// will be filled with a repeated pattern of mem[src:src+k].
|
||||
// We copy one instance of the pattern here, then repeat.
|
||||
// Each time around this loop k will double.
|
||||
k := dst - src
|
||||
copy(mem[dst:dst+k], mem[src:src+k])
|
||||
n -= k
|
||||
dst += k
|
||||
}
|
||||
}
|
54
vendor/github.com/klauspost/compress/flate/copy_test.go
generated
vendored
Normal file
54
vendor/github.com/klauspost/compress/flate/copy_test.go
generated
vendored
Normal file
@ -0,0 +1,54 @@
|
||||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package flate
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestForwardCopy(t *testing.T) {
|
||||
testCases := []struct {
|
||||
dst0, dst1 int
|
||||
src0, src1 int
|
||||
want string
|
||||
}{
|
||||
{0, 9, 0, 9, "012345678"},
|
||||
{0, 5, 4, 9, "45678"},
|
||||
{4, 9, 0, 5, "01230"},
|
||||
{1, 6, 3, 8, "34567"},
|
||||
{3, 8, 1, 6, "12121"},
|
||||
{0, 9, 3, 6, "345"},
|
||||
{3, 6, 0, 9, "012"},
|
||||
{1, 6, 0, 9, "00000"},
|
||||
{0, 4, 7, 8, "7"},
|
||||
{0, 1, 6, 8, "6"},
|
||||
{4, 4, 6, 9, ""},
|
||||
{2, 8, 6, 6, ""},
|
||||
{0, 0, 0, 0, ""},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
b := []byte("0123456789")
|
||||
n := tc.dst1 - tc.dst0
|
||||
if tc.src1-tc.src0 < n {
|
||||
n = tc.src1 - tc.src0
|
||||
}
|
||||
forwardCopy(b, tc.dst0, tc.src0, n)
|
||||
got := string(b[tc.dst0 : tc.dst0+n])
|
||||
if got != tc.want {
|
||||
t.Errorf("dst=b[%d:%d], src=b[%d:%d]: got %q, want %q",
|
||||
tc.dst0, tc.dst1, tc.src0, tc.src1, got, tc.want)
|
||||
}
|
||||
// Check that the bytes outside of dst[:n] were not modified.
|
||||
for i, x := range b {
|
||||
if i >= tc.dst0 && i < tc.dst0+n {
|
||||
continue
|
||||
}
|
||||
if int(x) != '0'+i {
|
||||
t.Errorf("dst=b[%d:%d], src=b[%d:%d]: copy overrun at b[%d]: got '%c', want '%c'",
|
||||
tc.dst0, tc.dst1, tc.src0, tc.src1, i, x, '0'+i)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
39
vendor/github.com/klauspost/compress/flate/crc32_amd64.go
generated
vendored
Normal file
39
vendor/github.com/klauspost/compress/flate/crc32_amd64.go
generated
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
//+build !noasm
|
||||
//+build !appengine
|
||||
|
||||
// Copyright 2015, Klaus Post, see LICENSE for details.
|
||||
|
||||
package flate
|
||||
|
||||
import (
|
||||
"github.com/klauspost/cpuid"
|
||||
)
|
||||
|
||||
// crc32sse returns a hash for the first 4 bytes of the slice
|
||||
// len(a) must be >= 4.
|
||||
//go:noescape
|
||||
func crc32sse(a []byte) hash
|
||||
|
||||
// crc32sseAll calculates hashes for each 4-byte set in a.
|
||||
// dst must be east len(a) - 4 in size.
|
||||
// The size is not checked by the assembly.
|
||||
//go:noescape
|
||||
func crc32sseAll(a []byte, dst []hash)
|
||||
|
||||
// matchLenSSE4 returns the number of matching bytes in a and b
|
||||
// up to length 'max'. Both slices must be at least 'max'
|
||||
// bytes in size.
|
||||
// It uses the PCMPESTRI SSE 4.2 instruction.
|
||||
//go:noescape
|
||||
func matchLenSSE4(a, b []byte, max int) int
|
||||
|
||||
// histogram accumulates a histogram of b in h.
|
||||
// h must be at least 256 entries in length,
|
||||
// and must be cleared before calling this function.
|
||||
//go:noescape
|
||||
func histogram(b []byte, h []int32)
|
||||
|
||||
// Detect SSE 4.2 feature.
|
||||
func init() {
|
||||
useSSE42 = cpuid.CPU.SSE42()
|
||||
}
|
219
vendor/github.com/klauspost/compress/flate/crc32_amd64.s
generated
vendored
Normal file
219
vendor/github.com/klauspost/compress/flate/crc32_amd64.s
generated
vendored
Normal file
@ -0,0 +1,219 @@
|
||||
//+build !noasm
|
||||
//+build !appengine
|
||||
|
||||
// Copyright 2015, Klaus Post, see LICENSE for details.
|
||||
|
||||
// func crc32sse(a []byte) hash
|
||||
TEXT ·crc32sse(SB), 4, $0
|
||||
MOVQ a+0(FP), R10
|
||||
XORQ BX, BX
|
||||
|
||||
// CRC32 dword (R10), EBX
|
||||
BYTE $0xF2; BYTE $0x41; BYTE $0x0f
|
||||
BYTE $0x38; BYTE $0xf1; BYTE $0x1a
|
||||
|
||||
MOVL BX, ret+24(FP)
|
||||
RET
|
||||
|
||||
// func crc32sseAll(a []byte, dst []hash)
|
||||
TEXT ·crc32sseAll(SB), 4, $0
|
||||
MOVQ a+0(FP), R8 // R8: src
|
||||
MOVQ a_len+8(FP), R10 // input length
|
||||
MOVQ dst+24(FP), R9 // R9: dst
|
||||
SUBQ $4, R10
|
||||
JS end
|
||||
JZ one_crc
|
||||
MOVQ R10, R13
|
||||
SHRQ $2, R10 // len/4
|
||||
ANDQ $3, R13 // len&3
|
||||
XORQ BX, BX
|
||||
ADDQ $1, R13
|
||||
TESTQ R10, R10
|
||||
JZ rem_loop
|
||||
|
||||
crc_loop:
|
||||
MOVQ (R8), R11
|
||||
XORQ BX, BX
|
||||
XORQ DX, DX
|
||||
XORQ DI, DI
|
||||
MOVQ R11, R12
|
||||
SHRQ $8, R11
|
||||
MOVQ R12, AX
|
||||
MOVQ R11, CX
|
||||
SHRQ $16, R12
|
||||
SHRQ $16, R11
|
||||
MOVQ R12, SI
|
||||
|
||||
// CRC32 EAX, EBX
|
||||
BYTE $0xF2; BYTE $0x0f
|
||||
BYTE $0x38; BYTE $0xf1; BYTE $0xd8
|
||||
|
||||
// CRC32 ECX, EDX
|
||||
BYTE $0xF2; BYTE $0x0f
|
||||
BYTE $0x38; BYTE $0xf1; BYTE $0xd1
|
||||
|
||||
// CRC32 ESI, EDI
|
||||
BYTE $0xF2; BYTE $0x0f
|
||||
BYTE $0x38; BYTE $0xf1; BYTE $0xfe
|
||||
MOVL BX, (R9)
|
||||
MOVL DX, 4(R9)
|
||||
MOVL DI, 8(R9)
|
||||
|
||||
XORQ BX, BX
|
||||
MOVL R11, AX
|
||||
|
||||
// CRC32 EAX, EBX
|
||||
BYTE $0xF2; BYTE $0x0f
|
||||
BYTE $0x38; BYTE $0xf1; BYTE $0xd8
|
||||
MOVL BX, 12(R9)
|
||||
|
||||
ADDQ $16, R9
|
||||
ADDQ $4, R8
|
||||
XORQ BX, BX
|
||||
SUBQ $1, R10
|
||||
JNZ crc_loop
|
||||
|
||||
rem_loop:
|
||||
MOVL (R8), AX
|
||||
|
||||
// CRC32 EAX, EBX
|
||||
BYTE $0xF2; BYTE $0x0f
|
||||
BYTE $0x38; BYTE $0xf1; BYTE $0xd8
|
||||
|
||||
MOVL BX, (R9)
|
||||
ADDQ $4, R9
|
||||
ADDQ $1, R8
|
||||
XORQ BX, BX
|
||||
SUBQ $1, R13
|
||||
JNZ rem_loop
|
||||
|
||||
end:
|
||||
RET
|
||||
|
||||
one_crc:
|
||||
MOVQ $1, R13
|
||||
XORQ BX, BX
|
||||
JMP rem_loop
|
||||
|
||||
// func matchLenSSE4(a, b []byte, max int) int
|
||||
TEXT ·matchLenSSE4(SB), 4, $0
|
||||
MOVQ a+0(FP), SI // RSI: &a
|
||||
MOVQ b+24(FP), DI // RDI: &b
|
||||
MOVQ max+48(FP), R10 // R10: max
|
||||
XORQ R11, R11 // R11: match length
|
||||
MOVQ R10, R12 // R12: Remainder
|
||||
SHRQ $4, R10 // max / 16
|
||||
MOVQ $16, AX // Set length for PCMPESTRI
|
||||
MOVQ $16, DX // Set length for PCMPESTRI
|
||||
ANDQ $15, R12 // max & 15
|
||||
TESTQ R10, R10
|
||||
JZ matchlen_verysmall
|
||||
|
||||
loopback_matchlen:
|
||||
MOVOU (SI), X0 // a[x]
|
||||
MOVOU (DI), X1 // b[x]
|
||||
|
||||
// PCMPESTRI $0x18, X1, X0
|
||||
// 0x18 = _SIDD_UBYTE_OPS (0x0) | _SIDD_CMP_EQUAL_EACH (0x8) | _SIDD_NEGATIVE_POLARITY (0x10)
|
||||
BYTE $0x66; BYTE $0x0f; BYTE $0x3a
|
||||
BYTE $0x61; BYTE $0xc1; BYTE $0x18
|
||||
|
||||
JC match_ended
|
||||
|
||||
ADDQ $16, SI
|
||||
ADDQ $16, DI
|
||||
ADDQ $16, R11
|
||||
|
||||
SUBQ $1, R10
|
||||
JNZ loopback_matchlen
|
||||
|
||||
// Check the remainder using REP CMPSB
|
||||
matchlen_verysmall:
|
||||
TESTQ R12, R12
|
||||
JZ done_matchlen
|
||||
MOVQ R12, CX
|
||||
ADDQ R12, R11
|
||||
|
||||
// Compare CX bytes at [SI] [DI]
|
||||
// Subtract one from CX for every match.
|
||||
// Terminates when CX is zero (checked pre-compare)
|
||||
CLD
|
||||
REP; CMPSB
|
||||
|
||||
// Check if last was a match.
|
||||
JZ done_matchlen
|
||||
|
||||
// Subtract remanding bytes.
|
||||
SUBQ CX, R11
|
||||
SUBQ $1, R11
|
||||
MOVQ R11, ret+56(FP)
|
||||
RET
|
||||
|
||||
match_ended:
|
||||
ADDQ CX, R11
|
||||
|
||||
done_matchlen:
|
||||
MOVQ R11, ret+56(FP)
|
||||
RET
|
||||
|
||||
// func histogram(b []byte, h []int32)
|
||||
TEXT ·histogram(SB), 4, $0
|
||||
MOVQ b+0(FP), SI // SI: &b
|
||||
MOVQ b_len+8(FP), R9 // R9: len(b)
|
||||
MOVQ h+24(FP), DI // DI: Histogram
|
||||
MOVQ R9, R8
|
||||
SHRQ $3, R8
|
||||
JZ hist1
|
||||
XORQ R11, R11
|
||||
|
||||
loop_hist8:
|
||||
MOVQ (SI), R10
|
||||
|
||||
MOVB R10, R11
|
||||
INCL (DI)(R11*4)
|
||||
SHRQ $8, R10
|
||||
|
||||
MOVB R10, R11
|
||||
INCL (DI)(R11*4)
|
||||
SHRQ $8, R10
|
||||
|
||||
MOVB R10, R11
|
||||
INCL (DI)(R11*4)
|
||||
SHRQ $8, R10
|
||||
|
||||
MOVB R10, R11
|
||||
INCL (DI)(R11*4)
|
||||
SHRQ $8, R10
|
||||
|
||||
MOVB R10, R11
|
||||
INCL (DI)(R11*4)
|
||||
SHRQ $8, R10
|
||||
|
||||
MOVB R10, R11
|
||||
INCL (DI)(R11*4)
|
||||
SHRQ $8, R10
|
||||
|
||||
MOVB R10, R11
|
||||
INCL (DI)(R11*4)
|
||||
SHRQ $8, R10
|
||||
|
||||
INCL (DI)(R10*4)
|
||||
|
||||
ADDQ $8, SI
|
||||
DECQ R8
|
||||
JNZ loop_hist8
|
||||
|
||||
hist1:
|
||||
ANDQ $7, R9
|
||||
JZ end_hist
|
||||
XORQ R10, R10
|
||||
|
||||
loop_hist1:
|
||||
MOVB (SI), R10
|
||||
INCL (DI)(R10*4)
|
||||
INCQ SI
|
||||
DECQ R9
|
||||
JNZ loop_hist1
|
||||
|
||||
end_hist:
|
||||
RET
|
34
vendor/github.com/klauspost/compress/flate/crc32_noasm.go
generated
vendored
Normal file
34
vendor/github.com/klauspost/compress/flate/crc32_noasm.go
generated
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
//+build !amd64 noasm appengine
|
||||
|
||||
// Copyright 2015, Klaus Post, see LICENSE for details.
|
||||
|
||||
package flate
|
||||
|
||||
func init() {
|
||||
useSSE42 = false
|
||||
}
|
||||
|
||||
// crc32sse should never be called.
|
||||
func crc32sse(a []byte) hash {
|
||||
panic("no assembler")
|
||||
}
|
||||
|
||||
// crc32sseAll should never be called.
|
||||
func crc32sseAll(a []byte, dst []hash) {
|
||||
panic("no assembler")
|
||||
}
|
||||
|
||||
// matchLenSSE4 should never be called.
|
||||
func matchLenSSE4(a, b []byte, max int) int {
|
||||
panic("no assembler")
|
||||
return 0
|
||||
}
|
||||
|
||||
// histogram accumulates a histogram of b in h.
|
||||
// h must be at least 256 entries in length,
|
||||
// and must be cleared before calling this function.
|
||||
func histogram(b []byte, h []int32) {
|
||||
for _, t := range b {
|
||||
h[t]++
|
||||
}
|
||||
}
|
1357
vendor/github.com/klauspost/compress/flate/deflate.go
generated
vendored
Normal file
1357
vendor/github.com/klauspost/compress/flate/deflate.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
632
vendor/github.com/klauspost/compress/flate/deflate_test.go
generated
vendored
Normal file
632
vendor/github.com/klauspost/compress/flate/deflate_test.go
generated
vendored
Normal file
@ -0,0 +1,632 @@
|
||||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Copyright (c) 2015 Klaus Post
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package flate
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"reflect"
|
||||
"strings"
|
||||
"sync"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type deflateTest struct {
|
||||
in []byte
|
||||
level int
|
||||
out []byte
|
||||
}
|
||||
|
||||
type deflateInflateTest struct {
|
||||
in []byte
|
||||
}
|
||||
|
||||
type reverseBitsTest struct {
|
||||
in uint16
|
||||
bitCount uint8
|
||||
out uint16
|
||||
}
|
||||
|
||||
var deflateTests = []*deflateTest{
|
||||
{[]byte{}, 0, []byte{1, 0, 0, 255, 255}},
|
||||
{[]byte{0x11}, BestCompression, []byte{18, 4, 4, 0, 0, 255, 255}},
|
||||
{[]byte{0x11}, BestCompression, []byte{18, 4, 4, 0, 0, 255, 255}},
|
||||
{[]byte{0x11}, BestCompression, []byte{18, 4, 4, 0, 0, 255, 255}},
|
||||
|
||||
{[]byte{0x11}, 0, []byte{0, 1, 0, 254, 255, 17, 1, 0, 0, 255, 255}},
|
||||
{[]byte{0x11, 0x12}, 0, []byte{0, 2, 0, 253, 255, 17, 18, 1, 0, 0, 255, 255}},
|
||||
{[]byte{0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11}, 0,
|
||||
[]byte{0, 8, 0, 247, 255, 17, 17, 17, 17, 17, 17, 17, 17, 1, 0, 0, 255, 255},
|
||||
},
|
||||
{[]byte{}, 1, []byte{1, 0, 0, 255, 255}},
|
||||
{[]byte{0x11}, BestCompression, []byte{18, 4, 4, 0, 0, 255, 255}},
|
||||
{[]byte{0x11, 0x12}, BestCompression, []byte{18, 20, 2, 4, 0, 0, 255, 255}},
|
||||
{[]byte{0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11}, BestCompression, []byte{18, 132, 2, 64, 0, 0, 0, 255, 255}},
|
||||
{[]byte{}, 9, []byte{1, 0, 0, 255, 255}},
|
||||
{[]byte{0x11}, 9, []byte{18, 4, 4, 0, 0, 255, 255}},
|
||||
{[]byte{0x11, 0x12}, 9, []byte{18, 20, 2, 4, 0, 0, 255, 255}},
|
||||
{[]byte{0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11}, 9, []byte{18, 132, 2, 64, 0, 0, 0, 255, 255}},
|
||||
}
|
||||
|
||||
var deflateInflateTests = []*deflateInflateTest{
|
||||
{[]byte{}},
|
||||
{[]byte{0x11}},
|
||||
{[]byte{0x11, 0x12}},
|
||||
{[]byte{0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11}},
|
||||
{[]byte{0x11, 0x10, 0x13, 0x41, 0x21, 0x21, 0x41, 0x13, 0x87, 0x78, 0x13}},
|
||||
{largeDataChunk()},
|
||||
}
|
||||
|
||||
var reverseBitsTests = []*reverseBitsTest{
|
||||
{1, 1, 1},
|
||||
{1, 2, 2},
|
||||
{1, 3, 4},
|
||||
{1, 4, 8},
|
||||
{1, 5, 16},
|
||||
{17, 5, 17},
|
||||
{257, 9, 257},
|
||||
{29, 5, 23},
|
||||
}
|
||||
|
||||
func largeDataChunk() []byte {
|
||||
result := make([]byte, 100000)
|
||||
for i := range result {
|
||||
result[i] = byte(i * i & 0xFF)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func TestCRCBulkOld(t *testing.T) {
|
||||
for _, x := range deflateTests {
|
||||
y := x.out
|
||||
if len(y) >= minMatchLength {
|
||||
y = append(y, y...)
|
||||
for j := 4; j < len(y); j++ {
|
||||
y := y[:j]
|
||||
dst := make([]hash, len(y)-minMatchLength+1)
|
||||
for i := range dst {
|
||||
dst[i] = hash(i + 100)
|
||||
}
|
||||
oldBulkHash(y, dst)
|
||||
for i, val := range dst {
|
||||
got := val & hashMask
|
||||
expect := oldHash(y[i:]) & hashMask
|
||||
if got != expect && got == hash(i)+100 {
|
||||
t.Errorf("Len:%d Index:%d, expected 0x%08x but not modified", len(y), i, expect)
|
||||
} else if got != expect {
|
||||
t.Errorf("Len:%d Index:%d, got 0x%08x expected:0x%08x", len(y), i, got, expect)
|
||||
} else {
|
||||
//t.Logf("Len:%d Index:%d OK (0x%08x)", len(y), i, got)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDeflate(t *testing.T) {
|
||||
for _, h := range deflateTests {
|
||||
var buf bytes.Buffer
|
||||
w, err := NewWriter(&buf, h.level)
|
||||
if err != nil {
|
||||
t.Errorf("NewWriter: %v", err)
|
||||
continue
|
||||
}
|
||||
w.Write(h.in)
|
||||
w.Close()
|
||||
if !bytes.Equal(buf.Bytes(), h.out) {
|
||||
t.Errorf("Deflate(%d, %x) = \n%#v, want \n%#v", h.level, h.in, buf.Bytes(), h.out)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// A sparseReader returns a stream consisting of 0s followed by 1<<16 1s.
|
||||
// This tests missing hash references in a very large input.
|
||||
type sparseReader struct {
|
||||
l int64
|
||||
cur int64
|
||||
}
|
||||
|
||||
func (r *sparseReader) Read(b []byte) (n int, err error) {
|
||||
if r.cur >= r.l {
|
||||
return 0, io.EOF
|
||||
}
|
||||
n = len(b)
|
||||
cur := r.cur + int64(n)
|
||||
if cur > r.l {
|
||||
n -= int(cur - r.l)
|
||||
cur = r.l
|
||||
}
|
||||
for i := range b[0:n] {
|
||||
if r.cur+int64(i) >= r.l-1<<16 {
|
||||
b[i] = 1
|
||||
} else {
|
||||
b[i] = 0
|
||||
}
|
||||
}
|
||||
r.cur = cur
|
||||
return
|
||||
}
|
||||
|
||||
func TestVeryLongSparseChunk(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("skipping sparse chunk during short test")
|
||||
}
|
||||
w, err := NewWriter(ioutil.Discard, 1)
|
||||
if err != nil {
|
||||
t.Errorf("NewWriter: %v", err)
|
||||
return
|
||||
}
|
||||
if _, err = io.Copy(w, &sparseReader{l: 23E8}); err != nil {
|
||||
t.Errorf("Compress failed: %v", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
type syncBuffer struct {
|
||||
buf bytes.Buffer
|
||||
mu sync.RWMutex
|
||||
closed bool
|
||||
ready chan bool
|
||||
}
|
||||
|
||||
func newSyncBuffer() *syncBuffer {
|
||||
return &syncBuffer{ready: make(chan bool, 1)}
|
||||
}
|
||||
|
||||
func (b *syncBuffer) Read(p []byte) (n int, err error) {
|
||||
for {
|
||||
b.mu.RLock()
|
||||
n, err = b.buf.Read(p)
|
||||
b.mu.RUnlock()
|
||||
if n > 0 || b.closed {
|
||||
return
|
||||
}
|
||||
<-b.ready
|
||||
}
|
||||
}
|
||||
|
||||
func (b *syncBuffer) signal() {
|
||||
select {
|
||||
case b.ready <- true:
|
||||
default:
|
||||
}
|
||||
}
|
||||
|
||||
func (b *syncBuffer) Write(p []byte) (n int, err error) {
|
||||
n, err = b.buf.Write(p)
|
||||
b.signal()
|
||||
return
|
||||
}
|
||||
|
||||
func (b *syncBuffer) WriteMode() {
|
||||
b.mu.Lock()
|
||||
}
|
||||
|
||||
func (b *syncBuffer) ReadMode() {
|
||||
b.mu.Unlock()
|
||||
b.signal()
|
||||
}
|
||||
|
||||
func (b *syncBuffer) Close() error {
|
||||
b.closed = true
|
||||
b.signal()
|
||||
return nil
|
||||
}
|
||||
|
||||
func testSync(t *testing.T, level int, input []byte, name string) {
|
||||
if len(input) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
t.Logf("--testSync %d, %d, %s", level, len(input), name)
|
||||
buf := newSyncBuffer()
|
||||
buf1 := new(bytes.Buffer)
|
||||
buf.WriteMode()
|
||||
w, err := NewWriter(io.MultiWriter(buf, buf1), level)
|
||||
if err != nil {
|
||||
t.Errorf("NewWriter: %v", err)
|
||||
return
|
||||
}
|
||||
r := NewReader(buf)
|
||||
|
||||
// Write half the input and read back.
|
||||
for i := 0; i < 2; i++ {
|
||||
var lo, hi int
|
||||
if i == 0 {
|
||||
lo, hi = 0, (len(input)+1)/2
|
||||
} else {
|
||||
lo, hi = (len(input)+1)/2, len(input)
|
||||
}
|
||||
t.Logf("#%d: write %d-%d", i, lo, hi)
|
||||
if _, err := w.Write(input[lo:hi]); err != nil {
|
||||
t.Errorf("testSync: write: %v", err)
|
||||
return
|
||||
}
|
||||
if i == 0 {
|
||||
if err := w.Flush(); err != nil {
|
||||
t.Errorf("testSync: flush: %v", err)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
if err := w.Close(); err != nil {
|
||||
t.Errorf("testSync: close: %v", err)
|
||||
}
|
||||
}
|
||||
buf.ReadMode()
|
||||
out := make([]byte, hi-lo+1)
|
||||
m, err := io.ReadAtLeast(r, out, hi-lo)
|
||||
t.Logf("#%d: read %d", i, m)
|
||||
if m != hi-lo || err != nil {
|
||||
t.Errorf("testSync/%d (%d, %d, %s): read %d: %d, %v (%d left)", i, level, len(input), name, hi-lo, m, err, buf.buf.Len())
|
||||
return
|
||||
}
|
||||
if !bytes.Equal(input[lo:hi], out[:hi-lo]) {
|
||||
t.Errorf("testSync/%d: read wrong bytes: %x vs %x", i, input[lo:hi], out[:hi-lo])
|
||||
return
|
||||
}
|
||||
// This test originally checked that after reading
|
||||
// the first half of the input, there was nothing left
|
||||
// in the read buffer (buf.buf.Len() != 0) but that is
|
||||
// not necessarily the case: the write Flush may emit
|
||||
// some extra framing bits that are not necessary
|
||||
// to process to obtain the first half of the uncompressed
|
||||
// data. The test ran correctly most of the time, because
|
||||
// the background goroutine had usually read even
|
||||
// those extra bits by now, but it's not a useful thing to
|
||||
// check.
|
||||
buf.WriteMode()
|
||||
}
|
||||
buf.ReadMode()
|
||||
out := make([]byte, 10)
|
||||
if n, err := r.Read(out); n > 0 || err != io.EOF {
|
||||
t.Errorf("testSync (%d, %d, %s): final Read: %d, %v (hex: %x)", level, len(input), name, n, err, out[0:n])
|
||||
}
|
||||
if buf.buf.Len() != 0 {
|
||||
t.Errorf("testSync (%d, %d, %s): extra data at end", level, len(input), name)
|
||||
}
|
||||
r.Close()
|
||||
|
||||
// stream should work for ordinary reader too
|
||||
r = NewReader(buf1)
|
||||
out, err = ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
t.Errorf("testSync: read: %s", err)
|
||||
return
|
||||
}
|
||||
r.Close()
|
||||
if !bytes.Equal(input, out) {
|
||||
t.Errorf("testSync: decompress(compress(data)) != data: level=%d input=%s", level, name)
|
||||
}
|
||||
}
|
||||
|
||||
func testToFromWithLevelAndLimit(t *testing.T, level int, input []byte, name string, limit int) {
|
||||
var buffer bytes.Buffer
|
||||
w, err := NewWriter(&buffer, level)
|
||||
if err != nil {
|
||||
t.Errorf("NewWriter: %v", err)
|
||||
return
|
||||
}
|
||||
w.Write(input)
|
||||
w.Close()
|
||||
if limit > 0 && buffer.Len() > limit {
|
||||
t.Errorf("level: %d, len(compress(data)) = %d > limit = %d", level, buffer.Len(), limit)
|
||||
return
|
||||
}
|
||||
if limit > 0 {
|
||||
t.Logf("level: %d - Size:%.2f%%, %d b\n", level, float64(buffer.Len()*100)/float64(limit), buffer.Len())
|
||||
}
|
||||
r := NewReader(&buffer)
|
||||
out, err := ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
t.Errorf("read: %s", err)
|
||||
return
|
||||
}
|
||||
r.Close()
|
||||
if !bytes.Equal(input, out) {
|
||||
t.Errorf("decompress(compress(data)) != data: level=%d input=%s", level, name)
|
||||
return
|
||||
}
|
||||
testSync(t, level, input, name)
|
||||
}
|
||||
|
||||
func testToFromWithLimit(t *testing.T, input []byte, name string, limit [11]int) {
|
||||
for i := 0; i < 10; i++ {
|
||||
testToFromWithLevelAndLimit(t, i, input, name, limit[i])
|
||||
}
|
||||
testToFromWithLevelAndLimit(t, -2, input, name, limit[10])
|
||||
}
|
||||
|
||||
func TestDeflateInflate(t *testing.T) {
|
||||
for i, h := range deflateInflateTests {
|
||||
testToFromWithLimit(t, h.in, fmt.Sprintf("#%d", i), [11]int{})
|
||||
}
|
||||
}
|
||||
|
||||
func TestReverseBits(t *testing.T) {
|
||||
for _, h := range reverseBitsTests {
|
||||
if v := reverseBits(h.in, h.bitCount); v != h.out {
|
||||
t.Errorf("reverseBits(%v,%v) = %v, want %v",
|
||||
h.in, h.bitCount, v, h.out)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type deflateInflateStringTest struct {
|
||||
filename string
|
||||
label string
|
||||
limit [11]int // Number 11 is ConstantCompression
|
||||
}
|
||||
|
||||
var deflateInflateStringTests = []deflateInflateStringTest{
|
||||
{
|
||||
"../testdata/e.txt",
|
||||
"2.718281828...",
|
||||
[...]int{100018, 67900, 50960, 51150, 50930, 50790, 50790, 50790, 50790, 50790, 43683 + 100},
|
||||
},
|
||||
{
|
||||
"../testdata/Mark.Twain-Tom.Sawyer.txt",
|
||||
"Mark.Twain-Tom.Sawyer",
|
||||
[...]int{407330, 195000, 185361, 180974, 169160, 164476, 162936, 160506, 160295, 160295, 233460 + 100},
|
||||
},
|
||||
}
|
||||
|
||||
func TestDeflateInflateString(t *testing.T) {
|
||||
for _, test := range deflateInflateStringTests {
|
||||
gold, err := ioutil.ReadFile(test.filename)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
// Remove returns that may be present on Windows
|
||||
neutral := strings.Map(func(r rune) rune {
|
||||
if r != '\r' {
|
||||
return r
|
||||
}
|
||||
return -1
|
||||
}, string(gold))
|
||||
|
||||
testToFromWithLimit(t, []byte(neutral), test.label, test.limit)
|
||||
|
||||
if testing.Short() {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestReaderDict(t *testing.T) {
|
||||
const (
|
||||
dict = "hello world"
|
||||
text = "hello again world"
|
||||
)
|
||||
var b bytes.Buffer
|
||||
w, err := NewWriter(&b, 5)
|
||||
if err != nil {
|
||||
t.Fatalf("NewWriter: %v", err)
|
||||
}
|
||||
w.Write([]byte(dict))
|
||||
w.Flush()
|
||||
b.Reset()
|
||||
w.Write([]byte(text))
|
||||
w.Close()
|
||||
|
||||
r := NewReaderDict(&b, []byte(dict))
|
||||
data, err := ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if string(data) != "hello again world" {
|
||||
t.Fatalf("read returned %q want %q", string(data), text)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriterDict(t *testing.T) {
|
||||
const (
|
||||
dict = "hello world Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua."
|
||||
text = "hello world Lorem ipsum dolor sit amet"
|
||||
)
|
||||
// This test is sensitive to algorithm changes that skip
|
||||
// data in favour of speed. Higher levels are less prone to this
|
||||
// so we test level 4-9.
|
||||
for l := 4; l < 9; l++ {
|
||||
var b bytes.Buffer
|
||||
w, err := NewWriter(&b, l)
|
||||
if err != nil {
|
||||
t.Fatalf("level %d, NewWriter: %v", l, err)
|
||||
}
|
||||
w.Write([]byte(dict))
|
||||
w.Flush()
|
||||
b.Reset()
|
||||
w.Write([]byte(text))
|
||||
w.Close()
|
||||
|
||||
var b1 bytes.Buffer
|
||||
w, _ = NewWriterDict(&b1, l, []byte(dict))
|
||||
w.Write([]byte(text))
|
||||
w.Close()
|
||||
|
||||
if !bytes.Equal(b1.Bytes(), b.Bytes()) {
|
||||
t.Errorf("level %d, writer wrote\n%v\n want\n%v", l, b1.Bytes(), b.Bytes())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// See http://code.google.com/p/go/issues/detail?id=2508
|
||||
func TestRegression2508(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Logf("test disabled with -short")
|
||||
return
|
||||
}
|
||||
w, err := NewWriter(ioutil.Discard, 1)
|
||||
if err != nil {
|
||||
t.Fatalf("NewWriter: %v", err)
|
||||
}
|
||||
buf := make([]byte, 1024)
|
||||
for i := 0; i < 131072; i++ {
|
||||
if _, err := w.Write(buf); err != nil {
|
||||
t.Fatalf("writer failed: %v", err)
|
||||
}
|
||||
}
|
||||
w.Close()
|
||||
}
|
||||
|
||||
func TestWriterReset(t *testing.T) {
|
||||
for level := -2; level <= 9; level++ {
|
||||
if level == -1 {
|
||||
level++
|
||||
}
|
||||
if testing.Short() && level > 1 {
|
||||
break
|
||||
}
|
||||
w, err := NewWriter(ioutil.Discard, level)
|
||||
if err != nil {
|
||||
t.Fatalf("NewWriter: %v", err)
|
||||
}
|
||||
buf := []byte("hello world")
|
||||
for i := 0; i < 1024; i++ {
|
||||
w.Write(buf)
|
||||
}
|
||||
w.Reset(ioutil.Discard)
|
||||
|
||||
wref, err := NewWriter(ioutil.Discard, level)
|
||||
if err != nil {
|
||||
t.Fatalf("NewWriter: %v", err)
|
||||
}
|
||||
|
||||
// DeepEqual doesn't compare functions.
|
||||
w.d.fill, wref.d.fill = nil, nil
|
||||
w.d.step, wref.d.step = nil, nil
|
||||
w.d.bulkHasher, wref.d.bulkHasher = nil, nil
|
||||
w.d.snap, wref.d.snap = nil, nil
|
||||
|
||||
// hashMatch is always overwritten when used.
|
||||
copy(w.d.hashMatch[:], wref.d.hashMatch[:])
|
||||
if w.d.tokens.n != 0 {
|
||||
t.Errorf("level %d Writer not reset after Reset. %d tokens were present", level, w.d.tokens.n)
|
||||
}
|
||||
// As long as the length is 0, we don't care about the content.
|
||||
w.d.tokens = wref.d.tokens
|
||||
|
||||
// We don't care if there are values in the window, as long as it is at d.index is 0
|
||||
w.d.window = wref.d.window
|
||||
if !reflect.DeepEqual(w, wref) {
|
||||
t.Errorf("level %d Writer not reset after Reset", level)
|
||||
}
|
||||
}
|
||||
testResetOutput(t, func(w io.Writer) (*Writer, error) { return NewWriter(w, NoCompression) })
|
||||
testResetOutput(t, func(w io.Writer) (*Writer, error) { return NewWriter(w, DefaultCompression) })
|
||||
testResetOutput(t, func(w io.Writer) (*Writer, error) { return NewWriter(w, BestCompression) })
|
||||
testResetOutput(t, func(w io.Writer) (*Writer, error) { return NewWriter(w, ConstantCompression) })
|
||||
dict := []byte("we are the world")
|
||||
testResetOutput(t, func(w io.Writer) (*Writer, error) { return NewWriterDict(w, NoCompression, dict) })
|
||||
testResetOutput(t, func(w io.Writer) (*Writer, error) { return NewWriterDict(w, DefaultCompression, dict) })
|
||||
testResetOutput(t, func(w io.Writer) (*Writer, error) { return NewWriterDict(w, BestCompression, dict) })
|
||||
testResetOutput(t, func(w io.Writer) (*Writer, error) { return NewWriterDict(w, ConstantCompression, dict) })
|
||||
}
|
||||
|
||||
func testResetOutput(t *testing.T, newWriter func(w io.Writer) (*Writer, error)) {
|
||||
buf := new(bytes.Buffer)
|
||||
w, err := newWriter(buf)
|
||||
if err != nil {
|
||||
t.Fatalf("NewWriter: %v", err)
|
||||
}
|
||||
b := []byte("hello world")
|
||||
for i := 0; i < 1024; i++ {
|
||||
w.Write(b)
|
||||
}
|
||||
w.Close()
|
||||
out1 := buf.Bytes()
|
||||
|
||||
buf2 := new(bytes.Buffer)
|
||||
w.Reset(buf2)
|
||||
for i := 0; i < 1024; i++ {
|
||||
w.Write(b)
|
||||
}
|
||||
w.Close()
|
||||
out2 := buf2.Bytes()
|
||||
|
||||
if len(out1) != len(out2) {
|
||||
t.Errorf("got %d, expected %d bytes", len(out2), len(out1))
|
||||
}
|
||||
if bytes.Compare(out1, out2) != 0 {
|
||||
mm := 0
|
||||
for i, b := range out1[:len(out2)] {
|
||||
if b != out2[i] {
|
||||
t.Errorf("mismatch index %d: %02x, expected %02x", i, out2[i], b)
|
||||
}
|
||||
mm++
|
||||
if mm == 10 {
|
||||
t.Fatal("Stopping")
|
||||
}
|
||||
}
|
||||
}
|
||||
t.Logf("got %d bytes", len(out1))
|
||||
}
|
||||
|
||||
// A writer that fails after N writes.
|
||||
type errorWriter struct {
|
||||
N int
|
||||
}
|
||||
|
||||
func (e *errorWriter) Write(b []byte) (int, error) {
|
||||
if e.N <= 0 {
|
||||
return 0, io.ErrClosedPipe
|
||||
}
|
||||
e.N--
|
||||
return len(b), nil
|
||||
}
|
||||
|
||||
// Test if errors from the underlying writer is passed upwards.
|
||||
func TestWriteError(t *testing.T) {
|
||||
buf := new(bytes.Buffer)
|
||||
for i := 0; i < 1024*1024; i++ {
|
||||
buf.WriteString(fmt.Sprintf("asdasfasf%d%dfghfgujyut%dyutyu\n", i, i, i))
|
||||
}
|
||||
in := buf.Bytes()
|
||||
for l := -2; l < 10; l++ {
|
||||
for fail := 1; fail <= 512; fail *= 2 {
|
||||
// Fail after 2 writes
|
||||
ew := &errorWriter{N: fail}
|
||||
w, err := NewWriter(ew, l)
|
||||
if err != nil {
|
||||
t.Errorf("NewWriter: level %d: %v", l, err)
|
||||
}
|
||||
n, err := io.Copy(w, bytes.NewBuffer(in))
|
||||
if err == nil {
|
||||
t.Errorf("Level %d: Expected an error, writer was %#v", l, ew)
|
||||
}
|
||||
n2, err := w.Write([]byte{1, 2, 2, 3, 4, 5})
|
||||
if n2 != 0 {
|
||||
t.Error("Level", l, "Expected 0 length write, got", n)
|
||||
}
|
||||
if err == nil {
|
||||
t.Error("Level", l, "Expected an error")
|
||||
}
|
||||
err = w.Flush()
|
||||
if err == nil {
|
||||
t.Error("Level", l, "Expected an error on close")
|
||||
}
|
||||
err = w.Close()
|
||||
if err == nil {
|
||||
t.Error("Level", l, "Expected an error on close")
|
||||
}
|
||||
|
||||
w.Reset(ioutil.Discard)
|
||||
n2, err = w.Write([]byte{1, 2, 3, 4, 5, 6})
|
||||
if err != nil {
|
||||
t.Error("Level", l, "Got unexpected error after reset:", err)
|
||||
}
|
||||
if n2 == 0 {
|
||||
t.Error("Level", l, "Got 0 length write, expected > 0")
|
||||
}
|
||||
if testing.Short() {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
78
vendor/github.com/klauspost/compress/flate/fixedhuff.go
generated
vendored
Normal file
78
vendor/github.com/klauspost/compress/flate/fixedhuff.go
generated
vendored
Normal file
@ -0,0 +1,78 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package flate
|
||||
|
||||
// autogenerated by go run gen.go -output fixedhuff.go, DO NOT EDIT
|
||||
|
||||
var fixedHuffmanDecoder = huffmanDecoder{
|
||||
7,
|
||||
[huffmanNumChunks]uint32{
|
||||
0x1007, 0x0508, 0x0108, 0x1188, 0x1107, 0x0708, 0x0308, 0x0c09,
|
||||
0x1087, 0x0608, 0x0208, 0x0a09, 0x0008, 0x0808, 0x0408, 0x0e09,
|
||||
0x1047, 0x0588, 0x0188, 0x0909, 0x1147, 0x0788, 0x0388, 0x0d09,
|
||||
0x10c7, 0x0688, 0x0288, 0x0b09, 0x0088, 0x0888, 0x0488, 0x0f09,
|
||||
0x1027, 0x0548, 0x0148, 0x11c8, 0x1127, 0x0748, 0x0348, 0x0c89,
|
||||
0x10a7, 0x0648, 0x0248, 0x0a89, 0x0048, 0x0848, 0x0448, 0x0e89,
|
||||
0x1067, 0x05c8, 0x01c8, 0x0989, 0x1167, 0x07c8, 0x03c8, 0x0d89,
|
||||
0x10e7, 0x06c8, 0x02c8, 0x0b89, 0x00c8, 0x08c8, 0x04c8, 0x0f89,
|
||||
0x1017, 0x0528, 0x0128, 0x11a8, 0x1117, 0x0728, 0x0328, 0x0c49,
|
||||
0x1097, 0x0628, 0x0228, 0x0a49, 0x0028, 0x0828, 0x0428, 0x0e49,
|
||||
0x1057, 0x05a8, 0x01a8, 0x0949, 0x1157, 0x07a8, 0x03a8, 0x0d49,
|
||||
0x10d7, 0x06a8, 0x02a8, 0x0b49, 0x00a8, 0x08a8, 0x04a8, 0x0f49,
|
||||
0x1037, 0x0568, 0x0168, 0x11e8, 0x1137, 0x0768, 0x0368, 0x0cc9,
|
||||
0x10b7, 0x0668, 0x0268, 0x0ac9, 0x0068, 0x0868, 0x0468, 0x0ec9,
|
||||
0x1077, 0x05e8, 0x01e8, 0x09c9, 0x1177, 0x07e8, 0x03e8, 0x0dc9,
|
||||
0x10f7, 0x06e8, 0x02e8, 0x0bc9, 0x00e8, 0x08e8, 0x04e8, 0x0fc9,
|
||||
0x1007, 0x0518, 0x0118, 0x1198, 0x1107, 0x0718, 0x0318, 0x0c29,
|
||||
0x1087, 0x0618, 0x0218, 0x0a29, 0x0018, 0x0818, 0x0418, 0x0e29,
|
||||
0x1047, 0x0598, 0x0198, 0x0929, 0x1147, 0x0798, 0x0398, 0x0d29,
|
||||
0x10c7, 0x0698, 0x0298, 0x0b29, 0x0098, 0x0898, 0x0498, 0x0f29,
|
||||
0x1027, 0x0558, 0x0158, 0x11d8, 0x1127, 0x0758, 0x0358, 0x0ca9,
|
||||
0x10a7, 0x0658, 0x0258, 0x0aa9, 0x0058, 0x0858, 0x0458, 0x0ea9,
|
||||
0x1067, 0x05d8, 0x01d8, 0x09a9, 0x1167, 0x07d8, 0x03d8, 0x0da9,
|
||||
0x10e7, 0x06d8, 0x02d8, 0x0ba9, 0x00d8, 0x08d8, 0x04d8, 0x0fa9,
|
||||
0x1017, 0x0538, 0x0138, 0x11b8, 0x1117, 0x0738, 0x0338, 0x0c69,
|
||||
0x1097, 0x0638, 0x0238, 0x0a69, 0x0038, 0x0838, 0x0438, 0x0e69,
|
||||
0x1057, 0x05b8, 0x01b8, 0x0969, 0x1157, 0x07b8, 0x03b8, 0x0d69,
|
||||
0x10d7, 0x06b8, 0x02b8, 0x0b69, 0x00b8, 0x08b8, 0x04b8, 0x0f69,
|
||||
0x1037, 0x0578, 0x0178, 0x11f8, 0x1137, 0x0778, 0x0378, 0x0ce9,
|
||||
0x10b7, 0x0678, 0x0278, 0x0ae9, 0x0078, 0x0878, 0x0478, 0x0ee9,
|
||||
0x1077, 0x05f8, 0x01f8, 0x09e9, 0x1177, 0x07f8, 0x03f8, 0x0de9,
|
||||
0x10f7, 0x06f8, 0x02f8, 0x0be9, 0x00f8, 0x08f8, 0x04f8, 0x0fe9,
|
||||
0x1007, 0x0508, 0x0108, 0x1188, 0x1107, 0x0708, 0x0308, 0x0c19,
|
||||
0x1087, 0x0608, 0x0208, 0x0a19, 0x0008, 0x0808, 0x0408, 0x0e19,
|
||||
0x1047, 0x0588, 0x0188, 0x0919, 0x1147, 0x0788, 0x0388, 0x0d19,
|
||||
0x10c7, 0x0688, 0x0288, 0x0b19, 0x0088, 0x0888, 0x0488, 0x0f19,
|
||||
0x1027, 0x0548, 0x0148, 0x11c8, 0x1127, 0x0748, 0x0348, 0x0c99,
|
||||
0x10a7, 0x0648, 0x0248, 0x0a99, 0x0048, 0x0848, 0x0448, 0x0e99,
|
||||
0x1067, 0x05c8, 0x01c8, 0x0999, 0x1167, 0x07c8, 0x03c8, 0x0d99,
|
||||
0x10e7, 0x06c8, 0x02c8, 0x0b99, 0x00c8, 0x08c8, 0x04c8, 0x0f99,
|
||||
0x1017, 0x0528, 0x0128, 0x11a8, 0x1117, 0x0728, 0x0328, 0x0c59,
|
||||
0x1097, 0x0628, 0x0228, 0x0a59, 0x0028, 0x0828, 0x0428, 0x0e59,
|
||||
0x1057, 0x05a8, 0x01a8, 0x0959, 0x1157, 0x07a8, 0x03a8, 0x0d59,
|
||||
0x10d7, 0x06a8, 0x02a8, 0x0b59, 0x00a8, 0x08a8, 0x04a8, 0x0f59,
|
||||
0x1037, 0x0568, 0x0168, 0x11e8, 0x1137, 0x0768, 0x0368, 0x0cd9,
|
||||
0x10b7, 0x0668, 0x0268, 0x0ad9, 0x0068, 0x0868, 0x0468, 0x0ed9,
|
||||
0x1077, 0x05e8, 0x01e8, 0x09d9, 0x1177, 0x07e8, 0x03e8, 0x0dd9,
|
||||
0x10f7, 0x06e8, 0x02e8, 0x0bd9, 0x00e8, 0x08e8, 0x04e8, 0x0fd9,
|
||||
0x1007, 0x0518, 0x0118, 0x1198, 0x1107, 0x0718, 0x0318, 0x0c39,
|
||||
0x1087, 0x0618, 0x0218, 0x0a39, 0x0018, 0x0818, 0x0418, 0x0e39,
|
||||
0x1047, 0x0598, 0x0198, 0x0939, 0x1147, 0x0798, 0x0398, 0x0d39,
|
||||
0x10c7, 0x0698, 0x0298, 0x0b39, 0x0098, 0x0898, 0x0498, 0x0f39,
|
||||
0x1027, 0x0558, 0x0158, 0x11d8, 0x1127, 0x0758, 0x0358, 0x0cb9,
|
||||
0x10a7, 0x0658, 0x0258, 0x0ab9, 0x0058, 0x0858, 0x0458, 0x0eb9,
|
||||
0x1067, 0x05d8, 0x01d8, 0x09b9, 0x1167, 0x07d8, 0x03d8, 0x0db9,
|
||||
0x10e7, 0x06d8, 0x02d8, 0x0bb9, 0x00d8, 0x08d8, 0x04d8, 0x0fb9,
|
||||
0x1017, 0x0538, 0x0138, 0x11b8, 0x1117, 0x0738, 0x0338, 0x0c79,
|
||||
0x1097, 0x0638, 0x0238, 0x0a79, 0x0038, 0x0838, 0x0438, 0x0e79,
|
||||
0x1057, 0x05b8, 0x01b8, 0x0979, 0x1157, 0x07b8, 0x03b8, 0x0d79,
|
||||
0x10d7, 0x06b8, 0x02b8, 0x0b79, 0x00b8, 0x08b8, 0x04b8, 0x0f79,
|
||||
0x1037, 0x0578, 0x0178, 0x11f8, 0x1137, 0x0778, 0x0378, 0x0cf9,
|
||||
0x10b7, 0x0678, 0x0278, 0x0af9, 0x0078, 0x0878, 0x0478, 0x0ef9,
|
||||
0x1077, 0x05f8, 0x01f8, 0x09f9, 0x1177, 0x07f8, 0x03f8, 0x0df9,
|
||||
0x10f7, 0x06f8, 0x02f8, 0x0bf9, 0x00f8, 0x08f8, 0x04f8, 0x0ff9,
|
||||
},
|
||||
nil, 0,
|
||||
}
|
260
vendor/github.com/klauspost/compress/flate/flate_test.go
generated
vendored
Normal file
260
vendor/github.com/klauspost/compress/flate/flate_test.go
generated
vendored
Normal file
@ -0,0 +1,260 @@
|
||||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This test tests some internals of the flate package.
|
||||
// The tests in package compress/gzip serve as the
|
||||
// end-to-end test of the decompressor.
|
||||
|
||||
package flate
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/hex"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// The following test should not panic.
|
||||
func TestIssue5915(t *testing.T) {
|
||||
bits := []int{4, 0, 0, 6, 4, 3, 2, 3, 3, 4, 4, 5, 0, 0, 0, 0, 5, 5, 6,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 8, 6, 0, 11, 0, 8, 0, 6, 6, 10, 8}
|
||||
var h huffmanDecoder
|
||||
if h.init(bits) {
|
||||
t.Fatalf("Given sequence of bits is bad, and should not succeed.")
|
||||
}
|
||||
}
|
||||
|
||||
// The following test should not panic.
|
||||
func TestIssue5962(t *testing.T) {
|
||||
bits := []int{4, 0, 0, 6, 4, 3, 2, 3, 3, 4, 4, 5, 0, 0, 0, 0,
|
||||
5, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11}
|
||||
var h huffmanDecoder
|
||||
if h.init(bits) {
|
||||
t.Fatalf("Given sequence of bits is bad, and should not succeed.")
|
||||
}
|
||||
}
|
||||
|
||||
// The following test should not panic.
|
||||
func TestIssue6255(t *testing.T) {
|
||||
bits1 := []int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 11}
|
||||
bits2 := []int{11, 13}
|
||||
var h huffmanDecoder
|
||||
if !h.init(bits1) {
|
||||
t.Fatalf("Given sequence of bits is good and should succeed.")
|
||||
}
|
||||
if h.init(bits2) {
|
||||
t.Fatalf("Given sequence of bits is bad and should not succeed.")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidEncoding(t *testing.T) {
|
||||
// Initialize Huffman decoder to recognize "0".
|
||||
var h huffmanDecoder
|
||||
if !h.init([]int{1}) {
|
||||
t.Fatal("Failed to initialize Huffman decoder")
|
||||
}
|
||||
|
||||
// Initialize decompressor with invalid Huffman coding.
|
||||
var f decompressor
|
||||
f.r = bytes.NewReader([]byte{0xff})
|
||||
|
||||
_, err := f.huffSym(&h)
|
||||
if err == nil {
|
||||
t.Fatal("Should have rejected invalid bit sequence")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidBits(t *testing.T) {
|
||||
oversubscribed := []int{1, 2, 3, 4, 4, 5}
|
||||
incomplete := []int{1, 2, 4, 4}
|
||||
var h huffmanDecoder
|
||||
if h.init(oversubscribed) {
|
||||
t.Fatal("Should reject oversubscribed bit-length set")
|
||||
}
|
||||
if h.init(incomplete) {
|
||||
t.Fatal("Should reject incomplete bit-length set")
|
||||
}
|
||||
}
|
||||
|
||||
func TestStreams(t *testing.T) {
|
||||
// To verify any of these hexstrings as valid or invalid flate streams
|
||||
// according to the C zlib library, you can use the Python wrapper library:
|
||||
// >>> hex_string = "010100feff11"
|
||||
// >>> import zlib
|
||||
// >>> zlib.decompress(hex_string.decode("hex"), -15) # Negative means raw DEFLATE
|
||||
// '\x11'
|
||||
|
||||
testCases := []struct {
|
||||
desc string // Description of the stream
|
||||
stream string // Hexstring of the input DEFLATE stream
|
||||
want string // Expected result. Use "fail" to expect failure
|
||||
}{{
|
||||
"degenerate HCLenTree",
|
||||
"05e0010000000000100000000000000000000000000000000000000000000000" +
|
||||
"00000000000000000004",
|
||||
"fail",
|
||||
}, {
|
||||
"complete HCLenTree, empty HLitTree, empty HDistTree",
|
||||
"05e0010400000000000000000000000000000000000000000000000000000000" +
|
||||
"00000000000000000010",
|
||||
"fail",
|
||||
}, {
|
||||
"empty HCLenTree",
|
||||
"05e0010000000000000000000000000000000000000000000000000000000000" +
|
||||
"00000000000000000010",
|
||||
"fail",
|
||||
}, {
|
||||
"complete HCLenTree, complete HLitTree, empty HDistTree, use missing HDist symbol",
|
||||
"000100feff000de0010400000000100000000000000000000000000000000000" +
|
||||
"0000000000000000000000000000002c",
|
||||
"fail",
|
||||
}, {
|
||||
"complete HCLenTree, complete HLitTree, degenerate HDistTree, use missing HDist symbol",
|
||||
"000100feff000de0010000000000000000000000000000000000000000000000" +
|
||||
"00000000000000000610000000004070",
|
||||
"fail",
|
||||
}, {
|
||||
"complete HCLenTree, empty HLitTree, empty HDistTree",
|
||||
"05e0010400000000100400000000000000000000000000000000000000000000" +
|
||||
"0000000000000000000000000008",
|
||||
"fail",
|
||||
}, {
|
||||
"complete HCLenTree, empty HLitTree, degenerate HDistTree",
|
||||
"05e0010400000000100400000000000000000000000000000000000000000000" +
|
||||
"0000000000000000000800000008",
|
||||
"fail",
|
||||
}, {
|
||||
"complete HCLenTree, degenerate HLitTree, degenerate HDistTree, use missing HLit symbol",
|
||||
"05e0010400000000100000000000000000000000000000000000000000000000" +
|
||||
"0000000000000000001c",
|
||||
"fail",
|
||||
}, {
|
||||
"complete HCLenTree, complete HLitTree, too large HDistTree",
|
||||
"edff870500000000200400000000000000000000000000000000000000000000" +
|
||||
"000000000000000000080000000000000004",
|
||||
"fail",
|
||||
}, {
|
||||
"complete HCLenTree, complete HLitTree, empty HDistTree, excessive repeater code",
|
||||
"edfd870500000000200400000000000000000000000000000000000000000000" +
|
||||
"000000000000000000e8b100",
|
||||
"fail",
|
||||
}, {
|
||||
"complete HCLenTree, complete HLitTree, empty HDistTree of normal length 30",
|
||||
"05fd01240000000000f8ffffffffffffffffffffffffffffffffffffffffffff" +
|
||||
"ffffffffffffffffff07000000fe01",
|
||||
"",
|
||||
}, {
|
||||
"complete HCLenTree, complete HLitTree, empty HDistTree of excessive length 31",
|
||||
"05fe01240000000000f8ffffffffffffffffffffffffffffffffffffffffffff" +
|
||||
"ffffffffffffffffff07000000fc03",
|
||||
"fail",
|
||||
}, {
|
||||
"complete HCLenTree, over-subscribed HLitTree, empty HDistTree",
|
||||
"05e001240000000000fcffffffffffffffffffffffffffffffffffffffffffff" +
|
||||
"ffffffffffffffffff07f00f",
|
||||
"fail",
|
||||
}, {
|
||||
"complete HCLenTree, under-subscribed HLitTree, empty HDistTree",
|
||||
"05e001240000000000fcffffffffffffffffffffffffffffffffffffffffffff" +
|
||||
"fffffffffcffffffff07f00f",
|
||||
"fail",
|
||||
}, {
|
||||
"complete HCLenTree, complete HLitTree with single code, empty HDistTree",
|
||||
"05e001240000000000f8ffffffffffffffffffffffffffffffffffffffffffff" +
|
||||
"ffffffffffffffffff07f00f",
|
||||
"01",
|
||||
}, {
|
||||
"complete HCLenTree, complete HLitTree with multiple codes, empty HDistTree",
|
||||
"05e301240000000000f8ffffffffffffffffffffffffffffffffffffffffffff" +
|
||||
"ffffffffffffffffff07807f",
|
||||
"01",
|
||||
}, {
|
||||
"complete HCLenTree, complete HLitTree, degenerate HDistTree, use valid HDist symbol",
|
||||
"000100feff000de0010400000000100000000000000000000000000000000000" +
|
||||
"0000000000000000000000000000003c",
|
||||
"00000000",
|
||||
}, {
|
||||
"complete HCLenTree, degenerate HLitTree, degenerate HDistTree",
|
||||
"05e0010400000000100000000000000000000000000000000000000000000000" +
|
||||
"0000000000000000000c",
|
||||
"",
|
||||
}, {
|
||||
"complete HCLenTree, degenerate HLitTree, empty HDistTree",
|
||||
"05e0010400000000100000000000000000000000000000000000000000000000" +
|
||||
"00000000000000000004",
|
||||
"",
|
||||
}, {
|
||||
"complete HCLenTree, complete HLitTree, empty HDistTree, spanning repeater code",
|
||||
"edfd870500000000200400000000000000000000000000000000000000000000" +
|
||||
"000000000000000000e8b000",
|
||||
"",
|
||||
}, {
|
||||
"complete HCLenTree with length codes, complete HLitTree, empty HDistTree",
|
||||
"ede0010400000000100000000000000000000000000000000000000000000000" +
|
||||
"0000000000000000000400004000",
|
||||
"",
|
||||
}, {
|
||||
"complete HCLenTree, complete HLitTree, degenerate HDistTree, use valid HLit symbol 284 with count 31",
|
||||
"000100feff00ede0010400000000100000000000000000000000000000000000" +
|
||||
"000000000000000000000000000000040000407f00",
|
||||
"0000000000000000000000000000000000000000000000000000000000000000" +
|
||||
"0000000000000000000000000000000000000000000000000000000000000000" +
|
||||
"0000000000000000000000000000000000000000000000000000000000000000" +
|
||||
"0000000000000000000000000000000000000000000000000000000000000000" +
|
||||
"0000000000000000000000000000000000000000000000000000000000000000" +
|
||||
"0000000000000000000000000000000000000000000000000000000000000000" +
|
||||
"0000000000000000000000000000000000000000000000000000000000000000" +
|
||||
"0000000000000000000000000000000000000000000000000000000000000000" +
|
||||
"000000",
|
||||
}, {
|
||||
"complete HCLenTree, complete HLitTree, degenerate HDistTree, use valid HLit and HDist symbols",
|
||||
"0cc2010d00000082b0ac4aff0eb07d27060000ffff",
|
||||
"616263616263",
|
||||
}, {
|
||||
"fixed block, use reserved symbol 287",
|
||||
"33180700",
|
||||
"fail",
|
||||
}, {
|
||||
"raw block",
|
||||
"010100feff11",
|
||||
"11",
|
||||
}, {
|
||||
"issue 10426 - over-subscribed HCLenTree causes a hang",
|
||||
"344c4a4e494d4b070000ff2e2eff2e2e2e2e2eff",
|
||||
"fail",
|
||||
}, {
|
||||
"issue 11030 - empty HDistTree unexpectedly leads to error",
|
||||
"05c0070600000080400fff37a0ca",
|
||||
"",
|
||||
}, {
|
||||
"issue 11033 - empty HDistTree unexpectedly leads to error",
|
||||
"050fb109c020cca5d017dcbca044881ee1034ec149c8980bbc413c2ab35be9dc" +
|
||||
"b1473449922449922411202306ee97b0383a521b4ffdcf3217f9f7d3adb701",
|
||||
"3130303634342068652e706870005d05355f7ed957ff084a90925d19e3ebc6d0" +
|
||||
"c6d7",
|
||||
}}
|
||||
|
||||
for i, tc := range testCases {
|
||||
data, err := hex.DecodeString(tc.stream)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
data, err = ioutil.ReadAll(NewReader(bytes.NewReader(data)))
|
||||
if tc.want == "fail" {
|
||||
if err == nil {
|
||||
t.Errorf("#%d (%s): got nil error, want non-nil", i, tc.desc)
|
||||
}
|
||||
} else {
|
||||
if err != nil {
|
||||
t.Errorf("#%d (%s): %v", i, tc.desc, err)
|
||||
continue
|
||||
}
|
||||
if got := hex.EncodeToString(data); got != tc.want {
|
||||
t.Errorf("#%d (%s):\ngot %q\nwant %q", i, tc.desc, got, tc.want)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
265
vendor/github.com/klauspost/compress/flate/gen.go
generated
vendored
Normal file
265
vendor/github.com/klauspost/compress/flate/gen.go
generated
vendored
Normal file
@ -0,0 +1,265 @@
|
||||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build ignore
|
||||
|
||||
// This program generates fixedhuff.go
|
||||
// Invoke as
|
||||
//
|
||||
// go run gen.go -output fixedhuff.go
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/format"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
)
|
||||
|
||||
var filename = flag.String("output", "fixedhuff.go", "output file name")
|
||||
|
||||
const maxCodeLen = 16
|
||||
|
||||
// Note: the definition of the huffmanDecoder struct is copied from
|
||||
// inflate.go, as it is private to the implementation.
|
||||
|
||||
// chunk & 15 is number of bits
|
||||
// chunk >> 4 is value, including table link
|
||||
|
||||
const (
|
||||
huffmanChunkBits = 9
|
||||
huffmanNumChunks = 1 << huffmanChunkBits
|
||||
huffmanCountMask = 15
|
||||
huffmanValueShift = 4
|
||||
)
|
||||
|
||||
type huffmanDecoder struct {
|
||||
min int // the minimum code length
|
||||
chunks [huffmanNumChunks]uint32 // chunks as described above
|
||||
links [][]uint32 // overflow links
|
||||
linkMask uint32 // mask the width of the link table
|
||||
}
|
||||
|
||||
// Initialize Huffman decoding tables from array of code lengths.
|
||||
// Following this function, h is guaranteed to be initialized into a complete
|
||||
// tree (i.e., neither over-subscribed nor under-subscribed). The exception is a
|
||||
// degenerate case where the tree has only a single symbol with length 1. Empty
|
||||
// trees are permitted.
|
||||
func (h *huffmanDecoder) init(bits []int) bool {
|
||||
// Sanity enables additional runtime tests during Huffman
|
||||
// table construction. It's intended to be used during
|
||||
// development to supplement the currently ad-hoc unit tests.
|
||||
const sanity = false
|
||||
|
||||
if h.min != 0 {
|
||||
*h = huffmanDecoder{}
|
||||
}
|
||||
|
||||
// Count number of codes of each length,
|
||||
// compute min and max length.
|
||||
var count [maxCodeLen]int
|
||||
var min, max int
|
||||
for _, n := range bits {
|
||||
if n == 0 {
|
||||
continue
|
||||
}
|
||||
if min == 0 || n < min {
|
||||
min = n
|
||||
}
|
||||
if n > max {
|
||||
max = n
|
||||
}
|
||||
count[n]++
|
||||
}
|
||||
|
||||
// Empty tree. The decompressor.huffSym function will fail later if the tree
|
||||
// is used. Technically, an empty tree is only valid for the HDIST tree and
|
||||
// not the HCLEN and HLIT tree. However, a stream with an empty HCLEN tree
|
||||
// is guaranteed to fail since it will attempt to use the tree to decode the
|
||||
// codes for the HLIT and HDIST trees. Similarly, an empty HLIT tree is
|
||||
// guaranteed to fail later since the compressed data section must be
|
||||
// composed of at least one symbol (the end-of-block marker).
|
||||
if max == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
code := 0
|
||||
var nextcode [maxCodeLen]int
|
||||
for i := min; i <= max; i++ {
|
||||
code <<= 1
|
||||
nextcode[i] = code
|
||||
code += count[i]
|
||||
}
|
||||
|
||||
// Check that the coding is complete (i.e., that we've
|
||||
// assigned all 2-to-the-max possible bit sequences).
|
||||
// Exception: To be compatible with zlib, we also need to
|
||||
// accept degenerate single-code codings. See also
|
||||
// TestDegenerateHuffmanCoding.
|
||||
if code != 1<<uint(max) && !(code == 1 && max == 1) {
|
||||
return false
|
||||
}
|
||||
|
||||
h.min = min
|
||||
if max > huffmanChunkBits {
|
||||
numLinks := 1 << (uint(max) - huffmanChunkBits)
|
||||
h.linkMask = uint32(numLinks - 1)
|
||||
|
||||
// create link tables
|
||||
link := nextcode[huffmanChunkBits+1] >> 1
|
||||
h.links = make([][]uint32, huffmanNumChunks-link)
|
||||
for j := uint(link); j < huffmanNumChunks; j++ {
|
||||
reverse := int(reverseByte[j>>8]) | int(reverseByte[j&0xff])<<8
|
||||
reverse >>= uint(16 - huffmanChunkBits)
|
||||
off := j - uint(link)
|
||||
if sanity && h.chunks[reverse] != 0 {
|
||||
panic("impossible: overwriting existing chunk")
|
||||
}
|
||||
h.chunks[reverse] = uint32(off<<huffmanValueShift | (huffmanChunkBits + 1))
|
||||
h.links[off] = make([]uint32, numLinks)
|
||||
}
|
||||
}
|
||||
|
||||
for i, n := range bits {
|
||||
if n == 0 {
|
||||
continue
|
||||
}
|
||||
code := nextcode[n]
|
||||
nextcode[n]++
|
||||
chunk := uint32(i<<huffmanValueShift | n)
|
||||
reverse := int(reverseByte[code>>8]) | int(reverseByte[code&0xff])<<8
|
||||
reverse >>= uint(16 - n)
|
||||
if n <= huffmanChunkBits {
|
||||
for off := reverse; off < len(h.chunks); off += 1 << uint(n) {
|
||||
// We should never need to overwrite
|
||||
// an existing chunk. Also, 0 is
|
||||
// never a valid chunk, because the
|
||||
// lower 4 "count" bits should be
|
||||
// between 1 and 15.
|
||||
if sanity && h.chunks[off] != 0 {
|
||||
panic("impossible: overwriting existing chunk")
|
||||
}
|
||||
h.chunks[off] = chunk
|
||||
}
|
||||
} else {
|
||||
j := reverse & (huffmanNumChunks - 1)
|
||||
if sanity && h.chunks[j]&huffmanCountMask != huffmanChunkBits+1 {
|
||||
// Longer codes should have been
|
||||
// associated with a link table above.
|
||||
panic("impossible: not an indirect chunk")
|
||||
}
|
||||
value := h.chunks[j] >> huffmanValueShift
|
||||
linktab := h.links[value]
|
||||
reverse >>= huffmanChunkBits
|
||||
for off := reverse; off < len(linktab); off += 1 << uint(n-huffmanChunkBits) {
|
||||
if sanity && linktab[off] != 0 {
|
||||
panic("impossible: overwriting existing chunk")
|
||||
}
|
||||
linktab[off] = chunk
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if sanity {
|
||||
// Above we've sanity checked that we never overwrote
|
||||
// an existing entry. Here we additionally check that
|
||||
// we filled the tables completely.
|
||||
for i, chunk := range h.chunks {
|
||||
if chunk == 0 {
|
||||
// As an exception, in the degenerate
|
||||
// single-code case, we allow odd
|
||||
// chunks to be missing.
|
||||
if code == 1 && i%2 == 1 {
|
||||
continue
|
||||
}
|
||||
panic("impossible: missing chunk")
|
||||
}
|
||||
}
|
||||
for _, linktab := range h.links {
|
||||
for _, chunk := range linktab {
|
||||
if chunk == 0 {
|
||||
panic("impossible: missing chunk")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.Parse()
|
||||
|
||||
var h huffmanDecoder
|
||||
var bits [288]int
|
||||
initReverseByte()
|
||||
for i := 0; i < 144; i++ {
|
||||
bits[i] = 8
|
||||
}
|
||||
for i := 144; i < 256; i++ {
|
||||
bits[i] = 9
|
||||
}
|
||||
for i := 256; i < 280; i++ {
|
||||
bits[i] = 7
|
||||
}
|
||||
for i := 280; i < 288; i++ {
|
||||
bits[i] = 8
|
||||
}
|
||||
h.init(bits[:])
|
||||
if h.links != nil {
|
||||
log.Fatal("Unexpected links table in fixed Huffman decoder")
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
||||
fmt.Fprintf(&buf, `// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.`+"\n\n")
|
||||
|
||||
fmt.Fprintln(&buf, "package flate")
|
||||
fmt.Fprintln(&buf)
|
||||
fmt.Fprintln(&buf, "// autogenerated by go run gen.go -output fixedhuff.go, DO NOT EDIT")
|
||||
fmt.Fprintln(&buf)
|
||||
fmt.Fprintln(&buf, "var fixedHuffmanDecoder = huffmanDecoder{")
|
||||
fmt.Fprintf(&buf, "\t%d,\n", h.min)
|
||||
fmt.Fprintln(&buf, "\t[huffmanNumChunks]uint32{")
|
||||
for i := 0; i < huffmanNumChunks; i++ {
|
||||
if i&7 == 0 {
|
||||
fmt.Fprintf(&buf, "\t\t")
|
||||
} else {
|
||||
fmt.Fprintf(&buf, " ")
|
||||
}
|
||||
fmt.Fprintf(&buf, "0x%04x,", h.chunks[i])
|
||||
if i&7 == 7 {
|
||||
fmt.Fprintln(&buf)
|
||||
}
|
||||
}
|
||||
fmt.Fprintln(&buf, "\t},")
|
||||
fmt.Fprintln(&buf, "\tnil, 0,")
|
||||
fmt.Fprintln(&buf, "}")
|
||||
|
||||
data, err := format.Source(buf.Bytes())
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
err = ioutil.WriteFile(*filename, data, 0644)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
var reverseByte [256]byte
|
||||
|
||||
func initReverseByte() {
|
||||
for x := 0; x < 256; x++ {
|
||||
var result byte
|
||||
for i := uint(0); i < 8; i++ {
|
||||
result |= byte(((x >> i) & 1) << (7 - i))
|
||||
}
|
||||
reverseByte[x] = result
|
||||
}
|
||||
}
|
717
vendor/github.com/klauspost/compress/flate/huffman_bit_writer.go
generated
vendored
Normal file
717
vendor/github.com/klauspost/compress/flate/huffman_bit_writer.go
generated
vendored
Normal file
@ -0,0 +1,717 @@
|
||||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package flate
|
||||
|
||||
import (
|
||||
"io"
|
||||
"math"
|
||||
)
|
||||
|
||||
const (
|
||||
// The largest offset code.
|
||||
offsetCodeCount = 30
|
||||
|
||||
// The special code used to mark the end of a block.
|
||||
endBlockMarker = 256
|
||||
|
||||
// The first length code.
|
||||
lengthCodesStart = 257
|
||||
|
||||
// The number of codegen codes.
|
||||
codegenCodeCount = 19
|
||||
badCode = 255
|
||||
|
||||
// Output byte buffer size
|
||||
// Must be multiple of 6 (48 bits) + 8
|
||||
bufferSize = 240 + 8
|
||||
)
|
||||
|
||||
// The number of extra bits needed by length code X - LENGTH_CODES_START.
|
||||
var lengthExtraBits = []int8{
|
||||
/* 257 */ 0, 0, 0,
|
||||
/* 260 */ 0, 0, 0, 0, 0, 1, 1, 1, 1, 2,
|
||||
/* 270 */ 2, 2, 2, 3, 3, 3, 3, 4, 4, 4,
|
||||
/* 280 */ 4, 5, 5, 5, 5, 0,
|
||||
}
|
||||
|
||||
// The length indicated by length code X - LENGTH_CODES_START.
|
||||
var lengthBase = []uint32{
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 10,
|
||||
12, 14, 16, 20, 24, 28, 32, 40, 48, 56,
|
||||
64, 80, 96, 112, 128, 160, 192, 224, 255,
|
||||
}
|
||||
|
||||
// offset code word extra bits.
|
||||
var offsetExtraBits = []int8{
|
||||
0, 0, 0, 0, 1, 1, 2, 2, 3, 3,
|
||||
4, 4, 5, 5, 6, 6, 7, 7, 8, 8,
|
||||
9, 9, 10, 10, 11, 11, 12, 12, 13, 13,
|
||||
/* extended window */
|
||||
14, 14, 15, 15, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20,
|
||||
}
|
||||
|
||||
var offsetBase = []uint32{
|
||||
/* normal deflate */
|
||||
0x000000, 0x000001, 0x000002, 0x000003, 0x000004,
|
||||
0x000006, 0x000008, 0x00000c, 0x000010, 0x000018,
|
||||
0x000020, 0x000030, 0x000040, 0x000060, 0x000080,
|
||||
0x0000c0, 0x000100, 0x000180, 0x000200, 0x000300,
|
||||
0x000400, 0x000600, 0x000800, 0x000c00, 0x001000,
|
||||
0x001800, 0x002000, 0x003000, 0x004000, 0x006000,
|
||||
|
||||
/* extended window */
|
||||
0x008000, 0x00c000, 0x010000, 0x018000, 0x020000,
|
||||
0x030000, 0x040000, 0x060000, 0x080000, 0x0c0000,
|
||||
0x100000, 0x180000, 0x200000, 0x300000,
|
||||
}
|
||||
|
||||
// The odd order in which the codegen code sizes are written.
|
||||
var codegenOrder = []uint32{16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15}
|
||||
|
||||
type huffmanBitWriter struct {
|
||||
w io.Writer
|
||||
// Data waiting to be written is bytes[0:nbytes]
|
||||
// and then the low nbits of bits.
|
||||
bits uint64
|
||||
nbits uint
|
||||
bytes [bufferSize]byte
|
||||
nbytes int
|
||||
literalFreq []int32
|
||||
offsetFreq []int32
|
||||
codegen []uint8
|
||||
codegenFreq []int32
|
||||
literalEncoding *huffmanEncoder
|
||||
offsetEncoding *huffmanEncoder
|
||||
codegenEncoding *huffmanEncoder
|
||||
err error
|
||||
}
|
||||
|
||||
func newHuffmanBitWriter(w io.Writer) *huffmanBitWriter {
|
||||
return &huffmanBitWriter{
|
||||
w: w,
|
||||
literalFreq: make([]int32, maxNumLit),
|
||||
offsetFreq: make([]int32, offsetCodeCount),
|
||||
codegen: make([]uint8, maxNumLit+offsetCodeCount+1),
|
||||
codegenFreq: make([]int32, codegenCodeCount),
|
||||
literalEncoding: newHuffmanEncoder(maxNumLit),
|
||||
codegenEncoding: newHuffmanEncoder(codegenCodeCount),
|
||||
offsetEncoding: newHuffmanEncoder(offsetCodeCount),
|
||||
}
|
||||
}
|
||||
|
||||
func (w *huffmanBitWriter) reset(writer io.Writer) {
|
||||
w.w = writer
|
||||
w.bits, w.nbits, w.nbytes, w.err = 0, 0, 0, nil
|
||||
w.bytes = [bufferSize]byte{}
|
||||
}
|
||||
|
||||
func (w *huffmanBitWriter) flush() {
|
||||
if w.err != nil {
|
||||
w.nbits = 0
|
||||
return
|
||||
}
|
||||
n := w.nbytes
|
||||
for w.nbits != 0 {
|
||||
w.bytes[n] = byte(w.bits)
|
||||
w.bits >>= 8
|
||||
if w.nbits > 8 { // Avoid underflow
|
||||
w.nbits -= 8
|
||||
} else {
|
||||
w.nbits = 0
|
||||
}
|
||||
n++
|
||||
}
|
||||
w.bits = 0
|
||||
_, w.err = w.w.Write(w.bytes[0:n])
|
||||
w.nbytes = 0
|
||||
}
|
||||
|
||||
func (w *huffmanBitWriter) writeBits(b int32, nb uint) {
|
||||
w.bits |= uint64(b) << w.nbits
|
||||
w.nbits += nb
|
||||
if w.nbits >= 48 {
|
||||
bits := w.bits
|
||||
w.bits >>= 48
|
||||
w.nbits -= 48
|
||||
n := w.nbytes
|
||||
w.bytes[n] = byte(bits)
|
||||
w.bytes[n+1] = byte(bits >> 8)
|
||||
w.bytes[n+2] = byte(bits >> 16)
|
||||
w.bytes[n+3] = byte(bits >> 24)
|
||||
w.bytes[n+4] = byte(bits >> 32)
|
||||
w.bytes[n+5] = byte(bits >> 40)
|
||||
n += 6
|
||||
if n >= bufferSize-8 {
|
||||
_, w.err = w.w.Write(w.bytes[:bufferSize-8])
|
||||
n = 0
|
||||
}
|
||||
w.nbytes = n
|
||||
}
|
||||
}
|
||||
|
||||
func (w *huffmanBitWriter) writeBytes(bytes []byte) {
|
||||
if w.err != nil {
|
||||
return
|
||||
}
|
||||
n := w.nbytes
|
||||
for w.nbits != 0 {
|
||||
w.bytes[n] = byte(w.bits)
|
||||
w.bits >>= 8
|
||||
w.nbits -= 8
|
||||
n++
|
||||
}
|
||||
if w.nbits != 0 {
|
||||
w.err = InternalError("writeBytes with unfinished bits")
|
||||
return
|
||||
}
|
||||
if n != 0 {
|
||||
_, w.err = w.w.Write(w.bytes[0:n])
|
||||
if w.err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
w.nbytes = 0
|
||||
_, w.err = w.w.Write(bytes)
|
||||
}
|
||||
|
||||
// RFC 1951 3.2.7 specifies a special run-length encoding for specifying
|
||||
// the literal and offset lengths arrays (which are concatenated into a single
|
||||
// array). This method generates that run-length encoding.
|
||||
//
|
||||
// The result is written into the codegen array, and the frequencies
|
||||
// of each code is written into the codegenFreq array.
|
||||
// Codes 0-15 are single byte codes. Codes 16-18 are followed by additional
|
||||
// information. Code badCode is an end marker
|
||||
//
|
||||
// numLiterals The number of literals in literalEncoding
|
||||
// numOffsets The number of offsets in offsetEncoding
|
||||
func (w *huffmanBitWriter) generateCodegen(numLiterals int, numOffsets int, offenc *huffmanEncoder) {
|
||||
for i := range w.codegenFreq {
|
||||
w.codegenFreq[i] = 0
|
||||
}
|
||||
// Note that we are using codegen both as a temporary variable for holding
|
||||
// a copy of the frequencies, and as the place where we put the result.
|
||||
// This is fine because the output is always shorter than the input used
|
||||
// so far.
|
||||
codegen := w.codegen // cache
|
||||
// Copy the concatenated code sizes to codegen. Put a marker at the end.
|
||||
cgnl := codegen[0:numLiterals]
|
||||
for i := range cgnl {
|
||||
cgnl[i] = uint8(w.literalEncoding.codes[i].bits())
|
||||
}
|
||||
|
||||
cgnl = codegen[numLiterals : numLiterals+numOffsets]
|
||||
for i := range cgnl {
|
||||
cgnl[i] = uint8(offenc.codes[i].bits())
|
||||
}
|
||||
codegen[numLiterals+numOffsets] = badCode
|
||||
|
||||
size := codegen[0]
|
||||
count := 1
|
||||
outIndex := 0
|
||||
for inIndex := 1; size != badCode; inIndex++ {
|
||||
// INVARIANT: We have seen "count" copies of size that have not yet
|
||||
// had output generated for them.
|
||||
nextSize := codegen[inIndex]
|
||||
if nextSize == size {
|
||||
count++
|
||||
continue
|
||||
}
|
||||
// We need to generate codegen indicating "count" of size.
|
||||
if size != 0 {
|
||||
codegen[outIndex] = size
|
||||
outIndex++
|
||||
w.codegenFreq[size]++
|
||||
count--
|
||||
for count >= 3 {
|
||||
n := 6
|
||||
if n > count {
|
||||
n = count
|
||||
}
|
||||
codegen[outIndex] = 16
|
||||
outIndex++
|
||||
codegen[outIndex] = uint8(n - 3)
|
||||
outIndex++
|
||||
w.codegenFreq[16]++
|
||||
count -= n
|
||||
}
|
||||
} else {
|
||||
for count >= 11 {
|
||||
n := 138
|
||||
if n > count {
|
||||
n = count
|
||||
}
|
||||
codegen[outIndex] = 18
|
||||
outIndex++
|
||||
codegen[outIndex] = uint8(n - 11)
|
||||
outIndex++
|
||||
w.codegenFreq[18]++
|
||||
count -= n
|
||||
}
|
||||
if count >= 3 {
|
||||
// count >= 3 && count <= 10
|
||||
codegen[outIndex] = 17
|
||||
outIndex++
|
||||
codegen[outIndex] = uint8(count - 3)
|
||||
outIndex++
|
||||
w.codegenFreq[17]++
|
||||
count = 0
|
||||
}
|
||||
}
|
||||
count--
|
||||
for ; count >= 0; count-- {
|
||||
codegen[outIndex] = size
|
||||
outIndex++
|
||||
w.codegenFreq[size]++
|
||||
}
|
||||
// Set up invariant for next time through the loop.
|
||||
size = nextSize
|
||||
count = 1
|
||||
}
|
||||
// Marker indicating the end of the codegen.
|
||||
codegen[outIndex] = badCode
|
||||
}
|
||||
|
||||
func (w *huffmanBitWriter) writeCode(c hcode) {
|
||||
if w.err != nil {
|
||||
return
|
||||
}
|
||||
w.bits |= uint64(c.code()) << w.nbits
|
||||
w.nbits += c.bits()
|
||||
if w.nbits >= 48 {
|
||||
bits := w.bits
|
||||
w.bits >>= 48
|
||||
w.nbits -= 48
|
||||
n := w.nbytes
|
||||
w.bytes[n] = byte(bits)
|
||||
w.bytes[n+1] = byte(bits >> 8)
|
||||
w.bytes[n+2] = byte(bits >> 16)
|
||||
w.bytes[n+3] = byte(bits >> 24)
|
||||
w.bytes[n+4] = byte(bits >> 32)
|
||||
w.bytes[n+5] = byte(bits >> 40)
|
||||
n += 6
|
||||
if n >= bufferSize-8 {
|
||||
_, w.err = w.w.Write(w.bytes[:bufferSize-8])
|
||||
n = 0
|
||||
}
|
||||
w.nbytes = n
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Write the header of a dynamic Huffman block to the output stream.
|
||||
//
|
||||
// numLiterals The number of literals specified in codegen
|
||||
// numOffsets The number of offsets specified in codegen
|
||||
// numCodegens The number of codegens used in codegen
|
||||
func (w *huffmanBitWriter) writeDynamicHeader(numLiterals int, numOffsets int, numCodegens int, isEof bool) {
|
||||
if w.err != nil {
|
||||
return
|
||||
}
|
||||
var firstBits int32 = 4
|
||||
if isEof {
|
||||
firstBits = 5
|
||||
}
|
||||
w.writeBits(firstBits, 3)
|
||||
w.writeBits(int32(numLiterals-257), 5)
|
||||
w.writeBits(int32(numOffsets-1), 5)
|
||||
w.writeBits(int32(numCodegens-4), 4)
|
||||
|
||||
for i := 0; i < numCodegens; i++ {
|
||||
value := w.codegenEncoding.codes[codegenOrder[i]].bits()
|
||||
w.writeBits(int32(value), 3)
|
||||
}
|
||||
|
||||
i := 0
|
||||
for {
|
||||
var codeWord int = int(w.codegen[i])
|
||||
i++
|
||||
if codeWord == badCode {
|
||||
break
|
||||
}
|
||||
// The low byte contains the actual code to generate.
|
||||
w.writeCode(w.codegenEncoding.codes[uint32(codeWord)])
|
||||
|
||||
switch codeWord {
|
||||
case 16:
|
||||
w.writeBits(int32(w.codegen[i]), 2)
|
||||
i++
|
||||
break
|
||||
case 17:
|
||||
w.writeBits(int32(w.codegen[i]), 3)
|
||||
i++
|
||||
break
|
||||
case 18:
|
||||
w.writeBits(int32(w.codegen[i]), 7)
|
||||
i++
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (w *huffmanBitWriter) writeStoredHeader(length int, isEof bool) {
|
||||
if w.err != nil {
|
||||
return
|
||||
}
|
||||
var flag int32
|
||||
if isEof {
|
||||
flag = 1
|
||||
}
|
||||
w.writeBits(flag, 3)
|
||||
w.flush()
|
||||
w.writeBits(int32(length), 16)
|
||||
w.writeBits(int32(^uint16(length)), 16)
|
||||
}
|
||||
|
||||
func (w *huffmanBitWriter) writeFixedHeader(isEof bool) {
|
||||
if w.err != nil {
|
||||
return
|
||||
}
|
||||
// Indicate that we are a fixed Huffman block
|
||||
var value int32 = 2
|
||||
if isEof {
|
||||
value = 3
|
||||
}
|
||||
w.writeBits(value, 3)
|
||||
}
|
||||
|
||||
func (w *huffmanBitWriter) writeBlock(tok tokens, eof bool, input []byte) {
|
||||
if w.err != nil {
|
||||
return
|
||||
}
|
||||
for i := range w.literalFreq {
|
||||
w.literalFreq[i] = 0
|
||||
}
|
||||
for i := range w.offsetFreq {
|
||||
w.offsetFreq[i] = 0
|
||||
}
|
||||
|
||||
tok.tokens[tok.n] = endBlockMarker
|
||||
tokens := tok.tokens[0 : tok.n+1]
|
||||
|
||||
for _, t := range tokens {
|
||||
if t < matchType {
|
||||
w.literalFreq[t.literal()]++
|
||||
} else {
|
||||
length := t.length()
|
||||
offset := t.offset()
|
||||
w.literalFreq[lengthCodesStart+lengthCode(length)]++
|
||||
w.offsetFreq[offsetCode(offset)]++
|
||||
}
|
||||
}
|
||||
|
||||
// get the number of literals
|
||||
numLiterals := len(w.literalFreq)
|
||||
for w.literalFreq[numLiterals-1] == 0 {
|
||||
numLiterals--
|
||||
}
|
||||
// get the number of offsets
|
||||
numOffsets := len(w.offsetFreq)
|
||||
for numOffsets > 0 && w.offsetFreq[numOffsets-1] == 0 {
|
||||
numOffsets--
|
||||
}
|
||||
if numOffsets == 0 {
|
||||
// We haven't found a single match. If we want to go with the dynamic encoding,
|
||||
// we should count at least one offset to be sure that the offset huffman tree could be encoded.
|
||||
w.offsetFreq[0] = 1
|
||||
numOffsets = 1
|
||||
}
|
||||
|
||||
w.literalEncoding.generate(w.literalFreq, 15)
|
||||
w.offsetEncoding.generate(w.offsetFreq, 15)
|
||||
|
||||
storedBytes := 0
|
||||
if input != nil {
|
||||
storedBytes = len(input)
|
||||
}
|
||||
var extraBits int64
|
||||
var storedSize int64 = math.MaxInt64
|
||||
if storedBytes <= maxStoreBlockSize && input != nil {
|
||||
storedSize = int64((storedBytes + 5) * 8)
|
||||
// We only bother calculating the costs of the extra bits required by
|
||||
// the length of offset fields (which will be the same for both fixed
|
||||
// and dynamic encoding), if we need to compare those two encodings
|
||||
// against stored encoding.
|
||||
for lengthCode := lengthCodesStart + 8; lengthCode < numLiterals; lengthCode++ {
|
||||
// First eight length codes have extra size = 0.
|
||||
extraBits += int64(w.literalFreq[lengthCode]) * int64(lengthExtraBits[lengthCode-lengthCodesStart])
|
||||
}
|
||||
for offsetCode := 4; offsetCode < numOffsets; offsetCode++ {
|
||||
// First four offset codes have extra size = 0.
|
||||
extraBits += int64(w.offsetFreq[offsetCode]) * int64(offsetExtraBits[offsetCode])
|
||||
}
|
||||
}
|
||||
|
||||
// Figure out smallest code.
|
||||
// Fixed Huffman baseline.
|
||||
var size = int64(3) +
|
||||
fixedLiteralEncoding.bitLength(w.literalFreq) +
|
||||
fixedOffsetEncoding.bitLength(w.offsetFreq) +
|
||||
extraBits
|
||||
var literalEncoding = fixedLiteralEncoding
|
||||
var offsetEncoding = fixedOffsetEncoding
|
||||
|
||||
// Dynamic Huffman?
|
||||
var numCodegens int
|
||||
|
||||
// Generate codegen and codegenFrequencies, which indicates how to encode
|
||||
// the literalEncoding and the offsetEncoding.
|
||||
w.generateCodegen(numLiterals, numOffsets, w.offsetEncoding)
|
||||
w.codegenEncoding.generate(w.codegenFreq, 7)
|
||||
numCodegens = len(w.codegenFreq)
|
||||
for numCodegens > 4 && w.codegenFreq[codegenOrder[numCodegens-1]] == 0 {
|
||||
numCodegens--
|
||||
}
|
||||
dynamicHeader := int64(3+5+5+4+(3*numCodegens)) +
|
||||
w.codegenEncoding.bitLength(w.codegenFreq) +
|
||||
int64(extraBits) +
|
||||
int64(w.codegenFreq[16]*2) +
|
||||
int64(w.codegenFreq[17]*3) +
|
||||
int64(w.codegenFreq[18]*7)
|
||||
dynamicSize := dynamicHeader +
|
||||
w.literalEncoding.bitLength(w.literalFreq) +
|
||||
w.offsetEncoding.bitLength(w.offsetFreq)
|
||||
|
||||
if dynamicSize < size {
|
||||
size = dynamicSize
|
||||
literalEncoding = w.literalEncoding
|
||||
offsetEncoding = w.offsetEncoding
|
||||
}
|
||||
|
||||
// Stored bytes?
|
||||
if storedSize < size {
|
||||
w.writeStoredHeader(storedBytes, eof)
|
||||
w.writeBytes(input[0:storedBytes])
|
||||
return
|
||||
}
|
||||
|
||||
// Huffman.
|
||||
if literalEncoding == fixedLiteralEncoding {
|
||||
w.writeFixedHeader(eof)
|
||||
} else {
|
||||
w.writeDynamicHeader(numLiterals, numOffsets, numCodegens, eof)
|
||||
}
|
||||
|
||||
leCodes := literalEncoding.codes
|
||||
oeCodes := offsetEncoding.codes
|
||||
for _, t := range tokens {
|
||||
if t < matchType {
|
||||
w.writeCode(leCodes[t.literal()])
|
||||
} else {
|
||||
// Write the length
|
||||
length := t.length()
|
||||
lengthCode := lengthCode(length)
|
||||
w.writeCode(leCodes[lengthCode+lengthCodesStart])
|
||||
extraLengthBits := uint(lengthExtraBits[lengthCode])
|
||||
if extraLengthBits > 0 {
|
||||
extraLength := int32(length - lengthBase[lengthCode])
|
||||
w.writeBits(extraLength, extraLengthBits)
|
||||
}
|
||||
// Write the offset
|
||||
offset := t.offset()
|
||||
offsetCode := offsetCode(offset)
|
||||
w.writeCode(oeCodes[offsetCode])
|
||||
extraOffsetBits := uint(offsetExtraBits[offsetCode])
|
||||
if extraOffsetBits > 0 {
|
||||
extraOffset := int32(offset - offsetBase[offsetCode])
|
||||
w.writeBits(extraOffset, extraOffsetBits)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// writeBlockDynamic will write a block as dynamic Huffman table
|
||||
// compressed. This should be used, if the caller has a reasonable expectation
|
||||
// that this block contains compressible data.
|
||||
func (w *huffmanBitWriter) writeBlockDynamic(tok tokens, eof bool, input []byte) {
|
||||
if w.err != nil {
|
||||
return
|
||||
}
|
||||
for i := range w.literalFreq {
|
||||
w.literalFreq[i] = 0
|
||||
}
|
||||
for i := range w.offsetFreq {
|
||||
w.offsetFreq[i] = 0
|
||||
}
|
||||
|
||||
tok.tokens[tok.n] = endBlockMarker
|
||||
tokens := tok.tokens[0 : tok.n+1]
|
||||
|
||||
for _, t := range tokens {
|
||||
if t < matchType {
|
||||
w.literalFreq[t.literal()]++
|
||||
} else {
|
||||
length := t.length()
|
||||
offset := t.offset()
|
||||
w.literalFreq[lengthCodesStart+lengthCode(length)]++
|
||||
w.offsetFreq[offsetCode(offset)]++
|
||||
}
|
||||
}
|
||||
|
||||
// get the number of literals
|
||||
numLiterals := len(w.literalFreq)
|
||||
for w.literalFreq[numLiterals-1] == 0 {
|
||||
numLiterals--
|
||||
}
|
||||
// get the number of offsets
|
||||
numOffsets := len(w.offsetFreq)
|
||||
for numOffsets > 0 && w.offsetFreq[numOffsets-1] == 0 {
|
||||
numOffsets--
|
||||
}
|
||||
if numOffsets == 0 {
|
||||
// We haven't found a single match. If we want to go with the dynamic encoding,
|
||||
// we should count at least one offset to be sure that the offset huffman tree could be encoded.
|
||||
w.offsetFreq[0] = 1
|
||||
numOffsets = 1
|
||||
}
|
||||
|
||||
w.literalEncoding.generate(w.literalFreq, 15)
|
||||
w.offsetEncoding.generate(w.offsetFreq, 15)
|
||||
|
||||
var numCodegens int
|
||||
|
||||
// Generate codegen and codegenFrequencies, which indicates how to encode
|
||||
// the literalEncoding and the offsetEncoding.
|
||||
w.generateCodegen(numLiterals, numOffsets, w.offsetEncoding)
|
||||
w.codegenEncoding.generate(w.codegenFreq, 7)
|
||||
numCodegens = len(w.codegenFreq)
|
||||
for numCodegens > 4 && w.codegenFreq[codegenOrder[numCodegens-1]] == 0 {
|
||||
numCodegens--
|
||||
}
|
||||
var literalEncoding = w.literalEncoding
|
||||
var offsetEncoding = w.offsetEncoding
|
||||
|
||||
// Write Huffman table.
|
||||
w.writeDynamicHeader(numLiterals, numOffsets, numCodegens, eof)
|
||||
leCodes := literalEncoding.codes
|
||||
oeCodes := offsetEncoding.codes
|
||||
|
||||
for _, t := range tokens {
|
||||
if t < matchType {
|
||||
w.writeCode(leCodes[t.literal()])
|
||||
} else {
|
||||
// Write the length
|
||||
length := t.length()
|
||||
lengthCode := lengthCode(length)
|
||||
w.writeCode(leCodes[lengthCode+lengthCodesStart])
|
||||
extraLengthBits := uint(lengthExtraBits[lengthCode])
|
||||
if extraLengthBits > 0 {
|
||||
extraLength := int32(length - lengthBase[lengthCode])
|
||||
w.writeBits(extraLength, extraLengthBits)
|
||||
}
|
||||
// Write the offset
|
||||
offset := t.offset()
|
||||
offsetCode := offsetCode(offset)
|
||||
w.writeCode(oeCodes[offsetCode])
|
||||
extraOffsetBits := uint(offsetExtraBits[offsetCode])
|
||||
if extraOffsetBits > 0 {
|
||||
extraOffset := int32(offset - offsetBase[offsetCode])
|
||||
w.writeBits(extraOffset, extraOffsetBits)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// static offset encoder used for huffman only encoding.
|
||||
var huffOffset *huffmanEncoder
|
||||
|
||||
func init() {
|
||||
var w = newHuffmanBitWriter(nil)
|
||||
w.offsetFreq[0] = 1
|
||||
huffOffset = newHuffmanEncoder(offsetCodeCount)
|
||||
huffOffset.generate(w.offsetFreq, 15)
|
||||
}
|
||||
|
||||
// writeBlockHuff will write a block of bytes as either
|
||||
// Huffman encoded literals or uncompressed bytes if the
|
||||
// results only gains very little from compression.
|
||||
func (w *huffmanBitWriter) writeBlockHuff(eof bool, input []byte) {
|
||||
if w.err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Clear histogram
|
||||
for i := range w.literalFreq {
|
||||
w.literalFreq[i] = 0
|
||||
}
|
||||
|
||||
// Add everything as literals
|
||||
histogram(input, w.literalFreq)
|
||||
|
||||
w.literalFreq[endBlockMarker] = 1
|
||||
|
||||
const numLiterals = endBlockMarker + 1
|
||||
const numOffsets = 1
|
||||
|
||||
w.literalEncoding.generate(w.literalFreq, 15)
|
||||
|
||||
// Figure out smallest code.
|
||||
// Always use dynamic Huffman or Store
|
||||
var numCodegens int
|
||||
|
||||
// Generate codegen and codegenFrequencies, which indicates how to encode
|
||||
// the literalEncoding and the offsetEncoding.
|
||||
w.generateCodegen(numLiterals, numOffsets, huffOffset)
|
||||
w.codegenEncoding.generate(w.codegenFreq, 7)
|
||||
numCodegens = len(w.codegenFreq)
|
||||
for numCodegens > 4 && w.codegenFreq[codegenOrder[numCodegens-1]] == 0 {
|
||||
numCodegens--
|
||||
}
|
||||
headerSize := int64(3+5+5+4+(3*numCodegens)) +
|
||||
w.codegenEncoding.bitLength(w.codegenFreq) +
|
||||
int64(w.codegenFreq[16]*2) +
|
||||
int64(w.codegenFreq[17]*3) +
|
||||
int64(w.codegenFreq[18]*7)
|
||||
|
||||
// Includes EOB marker
|
||||
size := headerSize + w.literalEncoding.bitLength(w.literalFreq)
|
||||
|
||||
// Calculate stored size
|
||||
var storedSize int64 = math.MaxInt64
|
||||
var storedBytes = len(input)
|
||||
if storedBytes <= maxStoreBlockSize {
|
||||
storedSize = int64(storedBytes+5) * 8
|
||||
}
|
||||
|
||||
// Store bytes, if we don't get a reasonable improvement.
|
||||
if storedSize < (size + size>>4) {
|
||||
w.writeStoredHeader(storedBytes, eof)
|
||||
w.writeBytes(input)
|
||||
return
|
||||
}
|
||||
|
||||
// Huffman.
|
||||
w.writeDynamicHeader(numLiterals, numOffsets, numCodegens, eof)
|
||||
encoding := w.literalEncoding.codes
|
||||
for _, t := range input {
|
||||
// Bitwriting inlined, ~30% speedup
|
||||
c := encoding[t]
|
||||
w.bits |= uint64(c.code()) << w.nbits
|
||||
w.nbits += c.bits()
|
||||
if w.nbits >= 48 {
|
||||
bits := w.bits
|
||||
w.bits >>= 48
|
||||
w.nbits -= 48
|
||||
n := w.nbytes
|
||||
w.bytes[n] = byte(bits)
|
||||
w.bytes[n+1] = byte(bits >> 8)
|
||||
w.bytes[n+2] = byte(bits >> 16)
|
||||
w.bytes[n+3] = byte(bits >> 24)
|
||||
w.bytes[n+4] = byte(bits >> 32)
|
||||
w.bytes[n+5] = byte(bits >> 40)
|
||||
n += 6
|
||||
if n >= bufferSize-8 {
|
||||
_, w.err = w.w.Write(w.bytes[:bufferSize-8])
|
||||
if w.err != nil {
|
||||
return
|
||||
}
|
||||
w.nbytes = 0
|
||||
} else {
|
||||
w.nbytes = n
|
||||
}
|
||||
}
|
||||
}
|
||||
w.writeCode(encoding[endBlockMarker])
|
||||
}
|
368
vendor/github.com/klauspost/compress/flate/huffman_bit_writer_test.go
generated
vendored
Normal file
368
vendor/github.com/klauspost/compress/flate/huffman_bit_writer_test.go
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
363
vendor/github.com/klauspost/compress/flate/huffman_code.go
generated
vendored
Normal file
363
vendor/github.com/klauspost/compress/flate/huffman_code.go
generated
vendored
Normal file
@ -0,0 +1,363 @@
|
||||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package flate
|
||||
|
||||
import (
|
||||
"math"
|
||||
"sort"
|
||||
)
|
||||
|
||||
type hcode uint32
|
||||
|
||||
type huffmanEncoder struct {
|
||||
codes []hcode
|
||||
freqcache []literalNode
|
||||
bitCount [17]int32
|
||||
lns literalNodeSorter
|
||||
lfs literalFreqSorter
|
||||
}
|
||||
|
||||
type literalNode struct {
|
||||
literal uint16
|
||||
freq int32
|
||||
}
|
||||
|
||||
// A levelInfo describes the state of the constructed tree for a given depth.
|
||||
type levelInfo struct {
|
||||
// Our level. for better printing
|
||||
level int32
|
||||
|
||||
// The frequency of the last node at this level
|
||||
lastFreq int32
|
||||
|
||||
// The frequency of the next character to add to this level
|
||||
nextCharFreq int32
|
||||
|
||||
// The frequency of the next pair (from level below) to add to this level.
|
||||
// Only valid if the "needed" value of the next lower level is 0.
|
||||
nextPairFreq int32
|
||||
|
||||
// The number of chains remaining to generate for this level before moving
|
||||
// up to the next level
|
||||
needed int32
|
||||
}
|
||||
|
||||
func (h hcode) codeBits() (code uint16, bits uint8) {
|
||||
return uint16(h), uint8(h >> 16)
|
||||
}
|
||||
|
||||
func (h *hcode) set(code uint16, bits uint8) {
|
||||
*h = hcode(code) | hcode(uint32(bits)<<16)
|
||||
}
|
||||
|
||||
func (h *hcode) setBits(bits uint8) {
|
||||
*h = hcode(*h&0xffff) | hcode(uint32(bits)<<16)
|
||||
}
|
||||
|
||||
func toCode(code uint16, bits uint8) hcode {
|
||||
return hcode(code) | hcode(uint32(bits)<<16)
|
||||
}
|
||||
|
||||
func (h hcode) code() (code uint16) {
|
||||
return uint16(h)
|
||||
}
|
||||
|
||||
func (h hcode) bits() (bits uint) {
|
||||
return uint(h >> 16)
|
||||
}
|
||||
|
||||
func maxNode() literalNode { return literalNode{math.MaxUint16, math.MaxInt32} }
|
||||
|
||||
func newHuffmanEncoder(size int) *huffmanEncoder {
|
||||
return &huffmanEncoder{codes: make([]hcode, size), freqcache: nil}
|
||||
}
|
||||
|
||||
// Generates a HuffmanCode corresponding to the fixed literal table
|
||||
func generateFixedLiteralEncoding() *huffmanEncoder {
|
||||
h := newHuffmanEncoder(maxNumLit)
|
||||
codes := h.codes
|
||||
var ch uint16
|
||||
for ch = 0; ch < maxNumLit; ch++ {
|
||||
var bits uint16
|
||||
var size uint8
|
||||
switch {
|
||||
case ch < 144:
|
||||
// size 8, 000110000 .. 10111111
|
||||
bits = ch + 48
|
||||
size = 8
|
||||
break
|
||||
case ch < 256:
|
||||
// size 9, 110010000 .. 111111111
|
||||
bits = ch + 400 - 144
|
||||
size = 9
|
||||
break
|
||||
case ch < 280:
|
||||
// size 7, 0000000 .. 0010111
|
||||
bits = ch - 256
|
||||
size = 7
|
||||
break
|
||||
default:
|
||||
// size 8, 11000000 .. 11000111
|
||||
bits = ch + 192 - 280
|
||||
size = 8
|
||||
}
|
||||
codes[ch] = toCode(reverseBits(bits, size), size)
|
||||
}
|
||||
return h
|
||||
}
|
||||
|
||||
func generateFixedOffsetEncoding() *huffmanEncoder {
|
||||
h := newHuffmanEncoder(30)
|
||||
codes := h.codes
|
||||
for ch := uint16(0); ch < 30; ch++ {
|
||||
codes[ch] = toCode(reverseBits(ch, 5), 5)
|
||||
}
|
||||
return h
|
||||
}
|
||||
|
||||
var fixedLiteralEncoding *huffmanEncoder = generateFixedLiteralEncoding()
|
||||
var fixedOffsetEncoding *huffmanEncoder = generateFixedOffsetEncoding()
|
||||
|
||||
func (h *huffmanEncoder) bitLength(freq []int32) int64 {
|
||||
var total int64
|
||||
for i, f := range freq {
|
||||
if f != 0 {
|
||||
total += int64(f) * int64(h.codes[i].bits())
|
||||
}
|
||||
}
|
||||
return total
|
||||
}
|
||||
|
||||
const maxBitsLimit = 16
|
||||
|
||||
// Return the number of literals assigned to each bit size in the Huffman encoding
|
||||
//
|
||||
// This method is only called when list.length >= 3
|
||||
// The cases of 0, 1, and 2 literals are handled by special case code.
|
||||
//
|
||||
// list An array of the literals with non-zero frequencies
|
||||
// and their associated frequencies. The array is in order of increasing
|
||||
// frequency, and has as its last element a special element with frequency
|
||||
// MaxInt32
|
||||
// maxBits The maximum number of bits that should be used to encode any literal.
|
||||
// Must be less than 16.
|
||||
// return An integer array in which array[i] indicates the number of literals
|
||||
// that should be encoded in i bits.
|
||||
func (h *huffmanEncoder) bitCounts(list []literalNode, maxBits int32) []int32 {
|
||||
if maxBits >= maxBitsLimit {
|
||||
panic("flate: maxBits too large")
|
||||
}
|
||||
n := int32(len(list))
|
||||
list = list[0 : n+1]
|
||||
list[n] = maxNode()
|
||||
|
||||
// The tree can't have greater depth than n - 1, no matter what. This
|
||||
// saves a little bit of work in some small cases
|
||||
if maxBits > n-1 {
|
||||
maxBits = n - 1
|
||||
}
|
||||
|
||||
// Create information about each of the levels.
|
||||
// A bogus "Level 0" whose sole purpose is so that
|
||||
// level1.prev.needed==0. This makes level1.nextPairFreq
|
||||
// be a legitimate value that never gets chosen.
|
||||
var levels [maxBitsLimit]levelInfo
|
||||
// leafCounts[i] counts the number of literals at the left
|
||||
// of ancestors of the rightmost node at level i.
|
||||
// leafCounts[i][j] is the number of literals at the left
|
||||
// of the level j ancestor.
|
||||
var leafCounts [maxBitsLimit][maxBitsLimit]int32
|
||||
|
||||
for level := int32(1); level <= maxBits; level++ {
|
||||
// For every level, the first two items are the first two characters.
|
||||
// We initialize the levels as if we had already figured this out.
|
||||
levels[level] = levelInfo{
|
||||
level: level,
|
||||
lastFreq: list[1].freq,
|
||||
nextCharFreq: list[2].freq,
|
||||
nextPairFreq: list[0].freq + list[1].freq,
|
||||
}
|
||||
leafCounts[level][level] = 2
|
||||
if level == 1 {
|
||||
levels[level].nextPairFreq = math.MaxInt32
|
||||
}
|
||||
}
|
||||
|
||||
// We need a total of 2*n - 2 items at top level and have already generated 2.
|
||||
levels[maxBits].needed = 2*n - 4
|
||||
|
||||
level := maxBits
|
||||
for {
|
||||
l := &levels[level]
|
||||
if l.nextPairFreq == math.MaxInt32 && l.nextCharFreq == math.MaxInt32 {
|
||||
// We've run out of both leafs and pairs.
|
||||
// End all calculations for this level.
|
||||
// To make sure we never come back to this level or any lower level,
|
||||
// set nextPairFreq impossibly large.
|
||||
l.needed = 0
|
||||
levels[level+1].nextPairFreq = math.MaxInt32
|
||||
level++
|
||||
continue
|
||||
}
|
||||
|
||||
prevFreq := l.lastFreq
|
||||
if l.nextCharFreq < l.nextPairFreq {
|
||||
// The next item on this row is a leaf node.
|
||||
n := leafCounts[level][level] + 1
|
||||
l.lastFreq = l.nextCharFreq
|
||||
// Lower leafCounts are the same of the previous node.
|
||||
leafCounts[level][level] = n
|
||||
l.nextCharFreq = list[n].freq
|
||||
} else {
|
||||
// The next item on this row is a pair from the previous row.
|
||||
// nextPairFreq isn't valid until we generate two
|
||||
// more values in the level below
|
||||
l.lastFreq = l.nextPairFreq
|
||||
// Take leaf counts from the lower level, except counts[level] remains the same.
|
||||
copy(leafCounts[level][:level], leafCounts[level-1][:level])
|
||||
levels[l.level-1].needed = 2
|
||||
}
|
||||
|
||||
if l.needed--; l.needed == 0 {
|
||||
// We've done everything we need to do for this level.
|
||||
// Continue calculating one level up. Fill in nextPairFreq
|
||||
// of that level with the sum of the two nodes we've just calculated on
|
||||
// this level.
|
||||
if l.level == maxBits {
|
||||
// All done!
|
||||
break
|
||||
}
|
||||
levels[l.level+1].nextPairFreq = prevFreq + l.lastFreq
|
||||
level++
|
||||
} else {
|
||||
// If we stole from below, move down temporarily to replenish it.
|
||||
for levels[level-1].needed > 0 {
|
||||
level--
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Somethings is wrong if at the end, the top level is null or hasn't used
|
||||
// all of the leaves.
|
||||
if leafCounts[maxBits][maxBits] != n {
|
||||
panic("leafCounts[maxBits][maxBits] != n")
|
||||
}
|
||||
|
||||
bitCount := h.bitCount[:maxBits+1]
|
||||
//make([]int32, maxBits+1)
|
||||
bits := 1
|
||||
counts := &leafCounts[maxBits]
|
||||
for level := maxBits; level > 0; level-- {
|
||||
// chain.leafCount gives the number of literals requiring at least "bits"
|
||||
// bits to encode.
|
||||
bitCount[bits] = counts[level] - counts[level-1]
|
||||
bits++
|
||||
}
|
||||
return bitCount
|
||||
}
|
||||
|
||||
// Look at the leaves and assign them a bit count and an encoding as specified
|
||||
// in RFC 1951 3.2.2
|
||||
func (h *huffmanEncoder) assignEncodingAndSize(bitCount []int32, list []literalNode) {
|
||||
code := uint16(0)
|
||||
for n, bits := range bitCount {
|
||||
code <<= 1
|
||||
if n == 0 || bits == 0 {
|
||||
continue
|
||||
}
|
||||
// The literals list[len(list)-bits] .. list[len(list)-bits]
|
||||
// are encoded using "bits" bits, and get the values
|
||||
// code, code + 1, .... The code values are
|
||||
// assigned in literal order (not frequency order).
|
||||
chunk := list[len(list)-int(bits):]
|
||||
|
||||
h.lns.Sort(chunk)
|
||||
for _, node := range chunk {
|
||||
h.codes[node.literal] = toCode(reverseBits(code, uint8(n)), uint8(n))
|
||||
code++
|
||||
}
|
||||
list = list[0 : len(list)-int(bits)]
|
||||
}
|
||||
}
|
||||
|
||||
// Update this Huffman Code object to be the minimum code for the specified frequency count.
|
||||
//
|
||||
// freq An array of frequencies, in which frequency[i] gives the frequency of literal i.
|
||||
// maxBits The maximum number of bits to use for any literal.
|
||||
func (h *huffmanEncoder) generate(freq []int32, maxBits int32) {
|
||||
if h.freqcache == nil {
|
||||
h.freqcache = make([]literalNode, 300)
|
||||
}
|
||||
list := h.freqcache[:len(freq)+1]
|
||||
// Number of non-zero literals
|
||||
count := 0
|
||||
// Set list to be the set of all non-zero literals and their frequencies
|
||||
for i, f := range freq {
|
||||
if f != 0 {
|
||||
list[count] = literalNode{uint16(i), f}
|
||||
count++
|
||||
} else {
|
||||
list[count] = literalNode{}
|
||||
//h.codeBits[i] = 0
|
||||
h.codes[i].setBits(0)
|
||||
}
|
||||
}
|
||||
list[len(freq)] = literalNode{}
|
||||
// If freq[] is shorter than codeBits[], fill rest of codeBits[] with zeros
|
||||
// FIXME: Doesn't do what it says on the tin (klauspost)
|
||||
//h.codeBits = h.codeBits[0:len(freq)]
|
||||
|
||||
list = list[0:count]
|
||||
if count <= 2 {
|
||||
// Handle the small cases here, because they are awkward for the general case code. With
|
||||
// two or fewer literals, everything has bit length 1.
|
||||
for i, node := range list {
|
||||
// "list" is in order of increasing literal value.
|
||||
h.codes[node.literal].set(uint16(i), 1)
|
||||
//h.codeBits[node.literal] = 1
|
||||
//h.code[node.literal] = uint16(i)
|
||||
}
|
||||
return
|
||||
}
|
||||
h.lfs.Sort(list)
|
||||
|
||||
// Get the number of literals for each bit count
|
||||
bitCount := h.bitCounts(list, maxBits)
|
||||
// And do the assignment
|
||||
h.assignEncodingAndSize(bitCount, list)
|
||||
}
|
||||
|
||||
type literalNodeSorter []literalNode
|
||||
|
||||
func (s *literalNodeSorter) Sort(a []literalNode) {
|
||||
*s = literalNodeSorter(a)
|
||||
sort.Sort(s)
|
||||
}
|
||||
|
||||
func (s literalNodeSorter) Len() int { return len(s) }
|
||||
|
||||
func (s literalNodeSorter) Less(i, j int) bool {
|
||||
return s[i].literal < s[j].literal
|
||||
}
|
||||
|
||||
func (s literalNodeSorter) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
||||
|
||||
type literalFreqSorter []literalNode
|
||||
|
||||
func (s *literalFreqSorter) Sort(a []literalNode) {
|
||||
*s = literalFreqSorter(a)
|
||||
sort.Sort(s)
|
||||
}
|
||||
|
||||
func (s literalFreqSorter) Len() int { return len(s) }
|
||||
|
||||
func (s literalFreqSorter) Less(i, j int) bool {
|
||||
if s[i].freq == s[j].freq {
|
||||
return s[i].literal < s[j].literal
|
||||
}
|
||||
return s[i].freq < s[j].freq
|
||||
}
|
||||
|
||||
func (s literalFreqSorter) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
846
vendor/github.com/klauspost/compress/flate/inflate.go
generated
vendored
Normal file
846
vendor/github.com/klauspost/compress/flate/inflate.go
generated
vendored
Normal file
@ -0,0 +1,846 @@
|
||||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:generate go run gen.go -output fixedhuff.go
|
||||
|
||||
// Package flate implements the DEFLATE compressed data format, described in
|
||||
// RFC 1951. The gzip and zlib packages implement access to DEFLATE-based file
|
||||
// formats.
|
||||
package flate
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"io"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
const (
|
||||
maxCodeLen = 16 // max length of Huffman code
|
||||
maxHist = 32768 // max history required
|
||||
// The next three numbers come from the RFC section 3.2.7, with the
|
||||
// additional proviso in section 3.2.5 which implies that distance codes
|
||||
// 30 and 31 should never occur in compressed data.
|
||||
maxNumLit = 286
|
||||
maxNumDist = 30
|
||||
numCodes = 19 // number of codes in Huffman meta-code
|
||||
)
|
||||
|
||||
// A CorruptInputError reports the presence of corrupt input at a given offset.
|
||||
type CorruptInputError int64
|
||||
|
||||
func (e CorruptInputError) Error() string {
|
||||
return "flate: corrupt input before offset " + strconv.FormatInt(int64(e), 10)
|
||||
}
|
||||
|
||||
// An InternalError reports an error in the flate code itself.
|
||||
type InternalError string
|
||||
|
||||
func (e InternalError) Error() string { return "flate: internal error: " + string(e) }
|
||||
|
||||
// A ReadError reports an error encountered while reading input.
|
||||
type ReadError struct {
|
||||
Offset int64 // byte offset where error occurred
|
||||
Err error // error returned by underlying Read
|
||||
}
|
||||
|
||||
func (e *ReadError) Error() string {
|
||||
return "flate: read error at offset " + strconv.FormatInt(e.Offset, 10) + ": " + e.Err.Error()
|
||||
}
|
||||
|
||||
// A WriteError reports an error encountered while writing output.
|
||||
type WriteError struct {
|
||||
Offset int64 // byte offset where error occurred
|
||||
Err error // error returned by underlying Write
|
||||
}
|
||||
|
||||
func (e *WriteError) Error() string {
|
||||
return "flate: write error at offset " + strconv.FormatInt(e.Offset, 10) + ": " + e.Err.Error()
|
||||
}
|
||||
|
||||
// Resetter resets a ReadCloser returned by NewReader or NewReaderDict to
|
||||
// to switch to a new underlying Reader. This permits reusing a ReadCloser
|
||||
// instead of allocating a new one.
|
||||
type Resetter interface {
|
||||
// Reset discards any buffered data and resets the Resetter as if it was
|
||||
// newly initialized with the given reader.
|
||||
Reset(r io.Reader, dict []byte) error
|
||||
}
|
||||
|
||||
// Note that much of the implementation of huffmanDecoder is also copied
|
||||
// into gen.go (in package main) for the purpose of precomputing the
|
||||
// fixed huffman tables so they can be included statically.
|
||||
|
||||
// The data structure for decoding Huffman tables is based on that of
|
||||
// zlib. There is a lookup table of a fixed bit width (huffmanChunkBits),
|
||||
// For codes smaller than the table width, there are multiple entries
|
||||
// (each combination of trailing bits has the same value). For codes
|
||||
// larger than the table width, the table contains a link to an overflow
|
||||
// table. The width of each entry in the link table is the maximum code
|
||||
// size minus the chunk width.
|
||||
|
||||
// Note that you can do a lookup in the table even without all bits
|
||||
// filled. Since the extra bits are zero, and the DEFLATE Huffman codes
|
||||
// have the property that shorter codes come before longer ones, the
|
||||
// bit length estimate in the result is a lower bound on the actual
|
||||
// number of bits.
|
||||
|
||||
// chunk & 15 is number of bits
|
||||
// chunk >> 4 is value, including table link
|
||||
|
||||
const (
|
||||
huffmanChunkBits = 9
|
||||
huffmanNumChunks = 1 << huffmanChunkBits
|
||||
huffmanCountMask = 15
|
||||
huffmanValueShift = 4
|
||||
)
|
||||
|
||||
type huffmanDecoder struct {
|
||||
min int // the minimum code length
|
||||
chunks [huffmanNumChunks]uint32 // chunks as described above
|
||||
links [][]uint32 // overflow links
|
||||
linkMask uint32 // mask the width of the link table
|
||||
}
|
||||
|
||||
// Initialize Huffman decoding tables from array of code lengths.
|
||||
// Following this function, h is guaranteed to be initialized into a complete
|
||||
// tree (i.e., neither over-subscribed nor under-subscribed). The exception is a
|
||||
// degenerate case where the tree has only a single symbol with length 1. Empty
|
||||
// trees are permitted.
|
||||
func (h *huffmanDecoder) init(bits []int) bool {
|
||||
// Sanity enables additional runtime tests during Huffman
|
||||
// table construction. It's intended to be used during
|
||||
// development to supplement the currently ad-hoc unit tests.
|
||||
const sanity = false
|
||||
|
||||
if h.min != 0 {
|
||||
*h = huffmanDecoder{}
|
||||
}
|
||||
|
||||
// Count number of codes of each length,
|
||||
// compute min and max length.
|
||||
var count [maxCodeLen]int
|
||||
var min, max int
|
||||
for _, n := range bits {
|
||||
if n == 0 {
|
||||
continue
|
||||
}
|
||||
if min == 0 || n < min {
|
||||
min = n
|
||||
}
|
||||
if n > max {
|
||||
max = n
|
||||
}
|
||||
count[n]++
|
||||
}
|
||||
|
||||
// Empty tree. The decompressor.huffSym function will fail later if the tree
|
||||
// is used. Technically, an empty tree is only valid for the HDIST tree and
|
||||
// not the HCLEN and HLIT tree. However, a stream with an empty HCLEN tree
|
||||
// is guaranteed to fail since it will attempt to use the tree to decode the
|
||||
// codes for the HLIT and HDIST trees. Similarly, an empty HLIT tree is
|
||||
// guaranteed to fail later since the compressed data section must be
|
||||
// composed of at least one symbol (the end-of-block marker).
|
||||
if max == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
code := 0
|
||||
var nextcode [maxCodeLen]int
|
||||
for i := min; i <= max; i++ {
|
||||
code <<= 1
|
||||
nextcode[i] = code
|
||||
code += count[i]
|
||||
}
|
||||
|
||||
// Check that the coding is complete (i.e., that we've
|
||||
// assigned all 2-to-the-max possible bit sequences).
|
||||
// Exception: To be compatible with zlib, we also need to
|
||||
// accept degenerate single-code codings. See also
|
||||
// TestDegenerateHuffmanCoding.
|
||||
if code != 1<<uint(max) && !(code == 1 && max == 1) {
|
||||
return false
|
||||
}
|
||||
|
||||
h.min = min
|
||||
if max > huffmanChunkBits {
|
||||
numLinks := 1 << (uint(max) - huffmanChunkBits)
|
||||
h.linkMask = uint32(numLinks - 1)
|
||||
|
||||
// create link tables
|
||||
link := nextcode[huffmanChunkBits+1] >> 1
|
||||
h.links = make([][]uint32, huffmanNumChunks-link)
|
||||
for j := uint(link); j < huffmanNumChunks; j++ {
|
||||
reverse := int(reverseByte[j>>8]) | int(reverseByte[j&0xff])<<8
|
||||
reverse >>= uint(16 - huffmanChunkBits)
|
||||
off := j - uint(link)
|
||||
if sanity && h.chunks[reverse] != 0 {
|
||||
panic("impossible: overwriting existing chunk")
|
||||
}
|
||||
h.chunks[reverse] = uint32(off<<huffmanValueShift | (huffmanChunkBits + 1))
|
||||
h.links[off] = make([]uint32, numLinks)
|
||||
}
|
||||
}
|
||||
|
||||
for i, n := range bits {
|
||||
if n == 0 {
|
||||
continue
|
||||
}
|
||||
code := nextcode[n]
|
||||
nextcode[n]++
|
||||
chunk := uint32(i<<huffmanValueShift | n)
|
||||
reverse := int(reverseByte[code>>8]) | int(reverseByte[code&0xff])<<8
|
||||
reverse >>= uint(16 - n)
|
||||
if n <= huffmanChunkBits {
|
||||
for off := reverse; off < len(h.chunks); off += 1 << uint(n) {
|
||||
// We should never need to overwrite
|
||||
// an existing chunk. Also, 0 is
|
||||
// never a valid chunk, because the
|
||||
// lower 4 "count" bits should be
|
||||
// between 1 and 15.
|
||||
if sanity && h.chunks[off] != 0 {
|
||||
panic("impossible: overwriting existing chunk")
|
||||
}
|
||||
h.chunks[off] = chunk
|
||||
}
|
||||
} else {
|
||||
j := reverse & (huffmanNumChunks - 1)
|
||||
if sanity && h.chunks[j]&huffmanCountMask != huffmanChunkBits+1 {
|
||||
// Longer codes should have been
|
||||
// associated with a link table above.
|
||||
panic("impossible: not an indirect chunk")
|
||||
}
|
||||
value := h.chunks[j] >> huffmanValueShift
|
||||
linktab := h.links[value]
|
||||
reverse >>= huffmanChunkBits
|
||||
for off := reverse; off < len(linktab); off += 1 << uint(n-huffmanChunkBits) {
|
||||
if sanity && linktab[off] != 0 {
|
||||
panic("impossible: overwriting existing chunk")
|
||||
}
|
||||
linktab[off] = chunk
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if sanity {
|
||||
// Above we've sanity checked that we never overwrote
|
||||
// an existing entry. Here we additionally check that
|
||||
// we filled the tables completely.
|
||||
for i, chunk := range h.chunks {
|
||||
if chunk == 0 {
|
||||
// As an exception, in the degenerate
|
||||
// single-code case, we allow odd
|
||||
// chunks to be missing.
|
||||
if code == 1 && i%2 == 1 {
|
||||
continue
|
||||
}
|
||||
panic("impossible: missing chunk")
|
||||
}
|
||||
}
|
||||
for _, linktab := range h.links {
|
||||
for _, chunk := range linktab {
|
||||
if chunk == 0 {
|
||||
panic("impossible: missing chunk")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// The actual read interface needed by NewReader.
|
||||
// If the passed in io.Reader does not also have ReadByte,
|
||||
// the NewReader will introduce its own buffering.
|
||||
type Reader interface {
|
||||
io.Reader
|
||||
io.ByteReader
|
||||
}
|
||||
|
||||
// Decompress state.
|
||||
type decompressor struct {
|
||||
// Input source.
|
||||
r Reader
|
||||
roffset int64
|
||||
woffset int64
|
||||
|
||||
// Input bits, in top of b.
|
||||
b uint32
|
||||
nb uint
|
||||
|
||||
// Huffman decoders for literal/length, distance.
|
||||
h1, h2 huffmanDecoder
|
||||
|
||||
// Length arrays used to define Huffman codes.
|
||||
bits *[maxNumLit + maxNumDist]int
|
||||
codebits *[numCodes]int
|
||||
|
||||
// Output history, buffer.
|
||||
hist *[maxHist]byte
|
||||
hp int // current output position in buffer
|
||||
hw int // have written hist[0:hw] already
|
||||
hfull bool // buffer has filled at least once
|
||||
|
||||
// Temporary buffer (avoids repeated allocation).
|
||||
buf [4]byte
|
||||
|
||||
// Next step in the decompression,
|
||||
// and decompression state.
|
||||
step func(*decompressor)
|
||||
final bool
|
||||
err error
|
||||
toRead []byte
|
||||
hl, hd *huffmanDecoder
|
||||
copyLen int
|
||||
copyDist int
|
||||
}
|
||||
|
||||
func (f *decompressor) nextBlock() {
|
||||
if f.final {
|
||||
if f.hw != f.hp {
|
||||
f.flush((*decompressor).nextBlock)
|
||||
return
|
||||
}
|
||||
f.err = io.EOF
|
||||
return
|
||||
}
|
||||
for f.nb < 1+2 {
|
||||
if f.err = f.moreBits(); f.err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
f.final = f.b&1 == 1
|
||||
f.b >>= 1
|
||||
typ := f.b & 3
|
||||
f.b >>= 2
|
||||
f.nb -= 1 + 2
|
||||
switch typ {
|
||||
case 0:
|
||||
f.dataBlock()
|
||||
case 1:
|
||||
// compressed, fixed Huffman tables
|
||||
f.hl = &fixedHuffmanDecoder
|
||||
f.hd = nil
|
||||
f.huffmanBlock()
|
||||
case 2:
|
||||
// compressed, dynamic Huffman tables
|
||||
if f.err = f.readHuffman(); f.err != nil {
|
||||
break
|
||||
}
|
||||
f.hl = &f.h1
|
||||
f.hd = &f.h2
|
||||
f.huffmanBlock()
|
||||
default:
|
||||
// 3 is reserved.
|
||||
f.err = CorruptInputError(f.roffset)
|
||||
}
|
||||
}
|
||||
|
||||
func (f *decompressor) Read(b []byte) (int, error) {
|
||||
for {
|
||||
if len(f.toRead) > 0 {
|
||||
n := copy(b, f.toRead)
|
||||
f.toRead = f.toRead[n:]
|
||||
return n, nil
|
||||
}
|
||||
if f.err != nil {
|
||||
return 0, f.err
|
||||
}
|
||||
f.step(f)
|
||||
}
|
||||
}
|
||||
|
||||
// Support the io.WriteTo interface for io.Copy and friends.
|
||||
func (f *decompressor) WriteTo(w io.Writer) (int64, error) {
|
||||
total := int64(0)
|
||||
for {
|
||||
if f.err != nil {
|
||||
if f.err == io.EOF {
|
||||
return total, nil
|
||||
}
|
||||
return total, f.err
|
||||
}
|
||||
if len(f.toRead) > 0 {
|
||||
var n int
|
||||
n, f.err = w.Write(f.toRead)
|
||||
if f.err != nil {
|
||||
return total, f.err
|
||||
}
|
||||
if n != len(f.toRead) {
|
||||
return total, io.ErrShortWrite
|
||||
}
|
||||
f.toRead = f.toRead[:0]
|
||||
total += int64(n)
|
||||
}
|
||||
f.step(f)
|
||||
}
|
||||
}
|
||||
|
||||
func (f *decompressor) Close() error {
|
||||
if f.err == io.EOF {
|
||||
return nil
|
||||
}
|
||||
return f.err
|
||||
}
|
||||
|
||||
// RFC 1951 section 3.2.7.
|
||||
// Compression with dynamic Huffman codes
|
||||
|
||||
var codeOrder = [...]int{16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15}
|
||||
|
||||
func (f *decompressor) readHuffman() error {
|
||||
// HLIT[5], HDIST[5], HCLEN[4].
|
||||
for f.nb < 5+5+4 {
|
||||
if err := f.moreBits(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
nlit := int(f.b&0x1F) + 257
|
||||
if nlit > maxNumLit {
|
||||
return CorruptInputError(f.roffset)
|
||||
}
|
||||
f.b >>= 5
|
||||
ndist := int(f.b&0x1F) + 1
|
||||
if ndist > maxNumDist {
|
||||
return CorruptInputError(f.roffset)
|
||||
}
|
||||
f.b >>= 5
|
||||
nclen := int(f.b&0xF) + 4
|
||||
// numCodes is 19, so nclen is always valid.
|
||||
f.b >>= 4
|
||||
f.nb -= 5 + 5 + 4
|
||||
|
||||
// (HCLEN+4)*3 bits: code lengths in the magic codeOrder order.
|
||||
for i := 0; i < nclen; i++ {
|
||||
for f.nb < 3 {
|
||||
if err := f.moreBits(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
f.codebits[codeOrder[i]] = int(f.b & 0x7)
|
||||
f.b >>= 3
|
||||
f.nb -= 3
|
||||
}
|
||||
for i := nclen; i < len(codeOrder); i++ {
|
||||
f.codebits[codeOrder[i]] = 0
|
||||
}
|
||||
if !f.h1.init(f.codebits[0:]) {
|
||||
return CorruptInputError(f.roffset)
|
||||
}
|
||||
|
||||
// HLIT + 257 code lengths, HDIST + 1 code lengths,
|
||||
// using the code length Huffman code.
|
||||
for i, n := 0, nlit+ndist; i < n; {
|
||||
x, err := f.huffSym(&f.h1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if x < 16 {
|
||||
// Actual length.
|
||||
f.bits[i] = x
|
||||
i++
|
||||
continue
|
||||
}
|
||||
// Repeat previous length or zero.
|
||||
var rep int
|
||||
var nb uint
|
||||
var b int
|
||||
switch x {
|
||||
default:
|
||||
return InternalError("unexpected length code")
|
||||
case 16:
|
||||
rep = 3
|
||||
nb = 2
|
||||
if i == 0 {
|
||||
return CorruptInputError(f.roffset)
|
||||
}
|
||||
b = f.bits[i-1]
|
||||
case 17:
|
||||
rep = 3
|
||||
nb = 3
|
||||
b = 0
|
||||
case 18:
|
||||
rep = 11
|
||||
nb = 7
|
||||
b = 0
|
||||
}
|
||||
for f.nb < nb {
|
||||
if err := f.moreBits(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
rep += int(f.b & uint32(1<<nb-1))
|
||||
f.b >>= nb
|
||||
f.nb -= nb
|
||||
if i+rep > n {
|
||||
return CorruptInputError(f.roffset)
|
||||
}
|
||||
for j := 0; j < rep; j++ {
|
||||
f.bits[i] = b
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
if !f.h1.init(f.bits[0:nlit]) || !f.h2.init(f.bits[nlit:nlit+ndist]) {
|
||||
return CorruptInputError(f.roffset)
|
||||
}
|
||||
|
||||
// In order to preserve the property that we never read any extra bytes
|
||||
// after the end of the DEFLATE stream, huffSym conservatively reads min
|
||||
// bits at a time until it decodes the symbol. However, since every block
|
||||
// must end with an EOB marker, we can use that as the minimum number of
|
||||
// bits to read and guarantee we never read past the end of the stream.
|
||||
if f.bits[endBlockMarker] > 0 {
|
||||
f.h1.min = f.bits[endBlockMarker] // Length of EOB marker
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Decode a single Huffman block from f.
|
||||
// hl and hd are the Huffman states for the lit/length values
|
||||
// and the distance values, respectively. If hd == nil, using the
|
||||
// fixed distance encoding associated with fixed Huffman blocks.
|
||||
func (f *decompressor) huffmanBlock() {
|
||||
for {
|
||||
v, err := f.huffSym(f.hl)
|
||||
if err != nil {
|
||||
f.err = err
|
||||
return
|
||||
}
|
||||
var n uint // number of bits extra
|
||||
var length int
|
||||
switch {
|
||||
case v < 256:
|
||||
f.hist[f.hp] = byte(v)
|
||||
f.hp++
|
||||
if f.hp == len(f.hist) {
|
||||
// After the flush, continue this loop.
|
||||
f.flush((*decompressor).huffmanBlock)
|
||||
return
|
||||
}
|
||||
continue
|
||||
case v == 256:
|
||||
// Done with huffman block; read next block.
|
||||
f.step = (*decompressor).nextBlock
|
||||
return
|
||||
// otherwise, reference to older data
|
||||
case v < 265:
|
||||
length = v - (257 - 3)
|
||||
n = 0
|
||||
case v < 269:
|
||||
length = v*2 - (265*2 - 11)
|
||||
n = 1
|
||||
case v < 273:
|
||||
length = v*4 - (269*4 - 19)
|
||||
n = 2
|
||||
case v < 277:
|
||||
length = v*8 - (273*8 - 35)
|
||||
n = 3
|
||||
case v < 281:
|
||||
length = v*16 - (277*16 - 67)
|
||||
n = 4
|
||||
case v < 285:
|
||||
length = v*32 - (281*32 - 131)
|
||||
n = 5
|
||||
case v < maxNumLit:
|
||||
length = 258
|
||||
n = 0
|
||||
default:
|
||||
f.err = CorruptInputError(f.roffset)
|
||||
return
|
||||
}
|
||||
if n > 0 {
|
||||
for f.nb < n {
|
||||
if err = f.moreBits(); err != nil {
|
||||
f.err = err
|
||||
return
|
||||
}
|
||||
}
|
||||
length += int(f.b & uint32(1<<n-1))
|
||||
f.b >>= n
|
||||
f.nb -= n
|
||||
}
|
||||
|
||||
var dist int
|
||||
if f.hd == nil {
|
||||
for f.nb < 5 {
|
||||
if err = f.moreBits(); err != nil {
|
||||
f.err = err
|
||||
return
|
||||
}
|
||||
}
|
||||
dist = int(reverseByte[(f.b&0x1F)<<3])
|
||||
f.b >>= 5
|
||||
f.nb -= 5
|
||||
} else {
|
||||
if dist, err = f.huffSym(f.hd); err != nil {
|
||||
f.err = err
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
switch {
|
||||
case dist < 4:
|
||||
dist++
|
||||
case dist < maxNumDist:
|
||||
nb := uint(dist-2) >> 1
|
||||
// have 1 bit in bottom of dist, need nb more.
|
||||
extra := (dist & 1) << nb
|
||||
for f.nb < nb {
|
||||
if err = f.moreBits(); err != nil {
|
||||
f.err = err
|
||||
return
|
||||
}
|
||||
}
|
||||
extra |= int(f.b & uint32(1<<nb-1))
|
||||
f.b >>= nb
|
||||
f.nb -= nb
|
||||
dist = 1<<(nb+1) + 1 + extra
|
||||
default:
|
||||
f.err = CorruptInputError(f.roffset)
|
||||
return
|
||||
}
|
||||
|
||||
// Copy history[-dist:-dist+length] into output.
|
||||
if dist > len(f.hist) {
|
||||
f.err = InternalError("bad history distance")
|
||||
return
|
||||
}
|
||||
|
||||
// No check on length; encoding can be prescient.
|
||||
if !f.hfull && dist > f.hp {
|
||||
f.err = CorruptInputError(f.roffset)
|
||||
return
|
||||
}
|
||||
|
||||
f.copyLen, f.copyDist = length, dist
|
||||
if f.copyHist() {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// copyHist copies f.copyLen bytes from f.hist (f.copyDist bytes ago) to itself.
|
||||
// It reports whether the f.hist buffer is full.
|
||||
func (f *decompressor) copyHist() bool {
|
||||
p := f.hp - f.copyDist
|
||||
if p < 0 {
|
||||
p += len(f.hist)
|
||||
}
|
||||
for f.copyLen > 0 {
|
||||
n := f.copyLen
|
||||
if x := len(f.hist) - f.hp; n > x {
|
||||
n = x
|
||||
}
|
||||
if x := len(f.hist) - p; n > x {
|
||||
n = x
|
||||
}
|
||||
forwardCopy(f.hist[:], f.hp, p, n)
|
||||
p += n
|
||||
f.hp += n
|
||||
f.copyLen -= n
|
||||
if f.hp == len(f.hist) {
|
||||
// After flush continue copying out of history.
|
||||
f.flush((*decompressor).copyHuff)
|
||||
return true
|
||||
}
|
||||
if p == len(f.hist) {
|
||||
p = 0
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (f *decompressor) copyHuff() {
|
||||
if f.copyHist() {
|
||||
return
|
||||
}
|
||||
f.huffmanBlock()
|
||||
}
|
||||
|
||||
// Copy a single uncompressed data block from input to output.
|
||||
func (f *decompressor) dataBlock() {
|
||||
// Uncompressed.
|
||||
// Discard current half-byte.
|
||||
f.nb = 0
|
||||
f.b = 0
|
||||
|
||||
// Length then ones-complement of length.
|
||||
nr, err := io.ReadFull(f.r, f.buf[0:4])
|
||||
f.roffset += int64(nr)
|
||||
if err != nil {
|
||||
f.err = &ReadError{f.roffset, err}
|
||||
return
|
||||
}
|
||||
n := int(f.buf[0]) | int(f.buf[1])<<8
|
||||
nn := int(f.buf[2]) | int(f.buf[3])<<8
|
||||
if uint16(nn) != uint16(^n) {
|
||||
f.err = CorruptInputError(f.roffset)
|
||||
return
|
||||
}
|
||||
|
||||
if n == 0 {
|
||||
// 0-length block means sync
|
||||
f.flush((*decompressor).nextBlock)
|
||||
return
|
||||
}
|
||||
|
||||
f.copyLen = n
|
||||
f.copyData()
|
||||
}
|
||||
|
||||
// copyData copies f.copyLen bytes from the underlying reader into f.hist.
|
||||
// It pauses for reads when f.hist is full.
|
||||
func (f *decompressor) copyData() {
|
||||
n := f.copyLen
|
||||
for n > 0 {
|
||||
m := len(f.hist) - f.hp
|
||||
if m > n {
|
||||
m = n
|
||||
}
|
||||
m, err := io.ReadFull(f.r, f.hist[f.hp:f.hp+m])
|
||||
f.roffset += int64(m)
|
||||
if err != nil {
|
||||
f.err = &ReadError{f.roffset, err}
|
||||
return
|
||||
}
|
||||
n -= m
|
||||
f.hp += m
|
||||
if f.hp == len(f.hist) {
|
||||
f.copyLen = n
|
||||
f.flush((*decompressor).copyData)
|
||||
return
|
||||
}
|
||||
}
|
||||
f.step = (*decompressor).nextBlock
|
||||
}
|
||||
|
||||
func (f *decompressor) setDict(dict []byte) {
|
||||
if len(dict) > len(f.hist) {
|
||||
// Will only remember the tail.
|
||||
dict = dict[len(dict)-len(f.hist):]
|
||||
}
|
||||
|
||||
f.hp = copy(f.hist[:], dict)
|
||||
if f.hp == len(f.hist) {
|
||||
f.hp = 0
|
||||
f.hfull = true
|
||||
}
|
||||
f.hw = f.hp
|
||||
}
|
||||
|
||||
func (f *decompressor) moreBits() error {
|
||||
c, err := f.r.ReadByte()
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
err = io.ErrUnexpectedEOF
|
||||
}
|
||||
return err
|
||||
}
|
||||
f.roffset++
|
||||
f.b |= uint32(c) << f.nb
|
||||
f.nb += 8
|
||||
return nil
|
||||
}
|
||||
|
||||
// Read the next Huffman-encoded symbol from f according to h.
|
||||
func (f *decompressor) huffSym(h *huffmanDecoder) (int, error) {
|
||||
// Since a huffmanDecoder can be empty or be composed of a degenerate tree
|
||||
// with single element, huffSym must error on these two edge cases. In both
|
||||
// cases, the chunks slice will be 0 for the invalid sequence, leading it
|
||||
// satisfy the n == 0 check below.
|
||||
n := uint(h.min)
|
||||
for {
|
||||
for f.nb < n {
|
||||
if err := f.moreBits(); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
chunk := h.chunks[f.b&(huffmanNumChunks-1)]
|
||||
n = uint(chunk & huffmanCountMask)
|
||||
if n > huffmanChunkBits {
|
||||
chunk = h.links[chunk>>huffmanValueShift][(f.b>>huffmanChunkBits)&h.linkMask]
|
||||
n = uint(chunk & huffmanCountMask)
|
||||
}
|
||||
if n <= f.nb {
|
||||
if n == 0 {
|
||||
f.err = CorruptInputError(f.roffset)
|
||||
return 0, f.err
|
||||
}
|
||||
f.b >>= n
|
||||
f.nb -= n
|
||||
return int(chunk >> huffmanValueShift), nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Flush any buffered output to the underlying writer.
|
||||
func (f *decompressor) flush(step func(*decompressor)) {
|
||||
f.toRead = f.hist[f.hw:f.hp]
|
||||
f.woffset += int64(f.hp - f.hw)
|
||||
f.hw = f.hp
|
||||
if f.hp == len(f.hist) {
|
||||
f.hp = 0
|
||||
f.hw = 0
|
||||
f.hfull = true
|
||||
}
|
||||
f.step = step
|
||||
}
|
||||
|
||||
func makeReader(r io.Reader) Reader {
|
||||
if rr, ok := r.(Reader); ok {
|
||||
return rr
|
||||
}
|
||||
return bufio.NewReader(r)
|
||||
}
|
||||
|
||||
func (f *decompressor) Reset(r io.Reader, dict []byte) error {
|
||||
*f = decompressor{
|
||||
r: makeReader(r),
|
||||
bits: f.bits,
|
||||
codebits: f.codebits,
|
||||
hist: f.hist,
|
||||
step: (*decompressor).nextBlock,
|
||||
}
|
||||
if dict != nil {
|
||||
f.setDict(dict)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// NewReader returns a new ReadCloser that can be used
|
||||
// to read the uncompressed version of r.
|
||||
// If r does not also implement io.ByteReader,
|
||||
// the decompressor may read more data than necessary from r.
|
||||
// It is the caller's responsibility to call Close on the ReadCloser
|
||||
// when finished reading.
|
||||
//
|
||||
// The ReadCloser returned by NewReader also implements Resetter.
|
||||
func NewReader(r io.Reader) io.ReadCloser {
|
||||
var f decompressor
|
||||
f.bits = new([maxNumLit + maxNumDist]int)
|
||||
f.codebits = new([numCodes]int)
|
||||
f.r = makeReader(r)
|
||||
f.hist = new([maxHist]byte)
|
||||
f.step = (*decompressor).nextBlock
|
||||
return &f
|
||||
}
|
||||
|
||||
// NewReaderDict is like NewReader but initializes the reader
|
||||
// with a preset dictionary. The returned Reader behaves as if
|
||||
// the uncompressed data stream started with the given dictionary,
|
||||
// which has already been read. NewReaderDict is typically used
|
||||
// to read data compressed by NewWriterDict.
|
||||
//
|
||||
// The ReadCloser returned by NewReader also implements Resetter.
|
||||
func NewReaderDict(r io.Reader, dict []byte) io.ReadCloser {
|
||||
var f decompressor
|
||||
f.r = makeReader(r)
|
||||
f.hist = new([maxHist]byte)
|
||||
f.bits = new([maxNumLit + maxNumDist]int)
|
||||
f.codebits = new([numCodes]int)
|
||||
f.step = (*decompressor).nextBlock
|
||||
f.setDict(dict)
|
||||
return &f
|
||||
}
|
225
vendor/github.com/klauspost/compress/flate/inflate_test.go
generated
vendored
Normal file
225
vendor/github.com/klauspost/compress/flate/inflate_test.go
generated
vendored
Normal file
@ -0,0 +1,225 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package flate
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/rand"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestReset(t *testing.T) {
|
||||
ss := []string{
|
||||
"lorem ipsum izzle fo rizzle",
|
||||
"the quick brown fox jumped over",
|
||||
}
|
||||
|
||||
deflated := make([]bytes.Buffer, 2)
|
||||
for i, s := range ss {
|
||||
w, _ := NewWriter(&deflated[i], 1)
|
||||
w.Write([]byte(s))
|
||||
w.Close()
|
||||
}
|
||||
|
||||
inflated := make([]bytes.Buffer, 2)
|
||||
|
||||
f := NewReader(&deflated[0])
|
||||
io.Copy(&inflated[0], f)
|
||||
f.(Resetter).Reset(&deflated[1], nil)
|
||||
io.Copy(&inflated[1], f)
|
||||
f.Close()
|
||||
|
||||
for i, s := range ss {
|
||||
if s != inflated[i].String() {
|
||||
t.Errorf("inflated[%d]:\ngot %q\nwant %q", i, inflated[i], s)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Tests ported from zlib/test/infcover.c
|
||||
type infTest struct {
|
||||
hex string
|
||||
id string
|
||||
n int
|
||||
}
|
||||
|
||||
var infTests = []infTest{
|
||||
infTest{"0 0 0 0 0", "invalid stored block lengths", 1},
|
||||
infTest{"3 0", "fixed", 0},
|
||||
infTest{"6", "invalid block type", 1},
|
||||
infTest{"1 1 0 fe ff 0", "stored", 0},
|
||||
infTest{"fc 0 0", "too many length or distance symbols", 1},
|
||||
infTest{"4 0 fe ff", "invalid code lengths set", 1},
|
||||
infTest{"4 0 24 49 0", "invalid bit length repeat", 1},
|
||||
infTest{"4 0 24 e9 ff ff", "invalid bit length repeat", 1},
|
||||
infTest{"4 0 24 e9 ff 6d", "invalid code -- missing end-of-block", 1},
|
||||
infTest{"4 80 49 92 24 49 92 24 71 ff ff 93 11 0", "invalid literal/lengths set", 1},
|
||||
infTest{"4 80 49 92 24 49 92 24 f b4 ff ff c3 84", "invalid distances set", 1},
|
||||
infTest{"4 c0 81 8 0 0 0 0 20 7f eb b 0 0", "invalid literal/length code", 1},
|
||||
infTest{"2 7e ff ff", "invalid distance code", 1},
|
||||
infTest{"c c0 81 0 0 0 0 0 90 ff 6b 4 0", "invalid distance too far back", 1},
|
||||
|
||||
// also trailer mismatch just in inflate()
|
||||
infTest{"1f 8b 8 0 0 0 0 0 0 0 3 0 0 0 0 1", "incorrect data check", -1},
|
||||
infTest{"1f 8b 8 0 0 0 0 0 0 0 3 0 0 0 0 0 0 0 0 1", "incorrect length check", -1},
|
||||
infTest{"5 c0 21 d 0 0 0 80 b0 fe 6d 2f 91 6c", "pull 17", 0},
|
||||
infTest{"5 e0 81 91 24 cb b2 2c 49 e2 f 2e 8b 9a 47 56 9f fb fe ec d2 ff 1f", "long code", 0},
|
||||
infTest{"ed c0 1 1 0 0 0 40 20 ff 57 1b 42 2c 4f", "length extra", 0},
|
||||
infTest{"ed cf c1 b1 2c 47 10 c4 30 fa 6f 35 1d 1 82 59 3d fb be 2e 2a fc f c", "long distance and extra", 0},
|
||||
infTest{"ed c0 81 0 0 0 0 80 a0 fd a9 17 a9 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 6", "window end", 0},
|
||||
}
|
||||
|
||||
func TestInflate(t *testing.T) {
|
||||
for _, test := range infTests {
|
||||
hex := strings.Split(test.hex, " ")
|
||||
data := make([]byte, len(hex))
|
||||
for i, h := range hex {
|
||||
b, _ := strconv.ParseInt(h, 16, 32)
|
||||
data[i] = byte(b)
|
||||
}
|
||||
buf := bytes.NewReader(data)
|
||||
r := NewReader(buf)
|
||||
|
||||
_, err := io.Copy(ioutil.Discard, r)
|
||||
if (test.n == 0 && err == nil) || (test.n != 0 && err != nil) {
|
||||
t.Logf("%q: OK:", test.id)
|
||||
t.Logf(" - got %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
if test.n == 0 && err != nil {
|
||||
t.Errorf("%q: Expected no error, but got %v", test.id, err)
|
||||
continue
|
||||
}
|
||||
|
||||
if test.n != 0 && err == nil {
|
||||
t.Errorf("%q:Expected an error, but got none", test.id)
|
||||
continue
|
||||
}
|
||||
t.Fatal(test.n, err)
|
||||
}
|
||||
|
||||
for _, test := range infOutTests {
|
||||
hex := strings.Split(test.hex, " ")
|
||||
data := make([]byte, len(hex))
|
||||
for i, h := range hex {
|
||||
b, _ := strconv.ParseInt(h, 16, 32)
|
||||
data[i] = byte(b)
|
||||
}
|
||||
buf := bytes.NewReader(data)
|
||||
r := NewReader(buf)
|
||||
|
||||
_, err := io.Copy(ioutil.Discard, r)
|
||||
if test.err == (err != nil) {
|
||||
t.Logf("%q: OK:", test.id)
|
||||
t.Logf(" - got %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
if test.err == false && err != nil {
|
||||
t.Errorf("%q: Expected no error, but got %v", test.id, err)
|
||||
continue
|
||||
}
|
||||
|
||||
if test.err && err == nil {
|
||||
t.Errorf("%q: Expected an error, but got none", test.id)
|
||||
continue
|
||||
}
|
||||
t.Fatal(test.err, err)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Tests ported from zlib/test/infcover.c
|
||||
// Since zlib inflate is push (writer) instead of pull (reader)
|
||||
// some of the window size tests have been removed, since they
|
||||
// are irrelevant.
|
||||
type infOutTest struct {
|
||||
hex string
|
||||
id string
|
||||
step int
|
||||
win int
|
||||
length int
|
||||
err bool
|
||||
}
|
||||
|
||||
var infOutTests = []infOutTest{
|
||||
infOutTest{"2 8 20 80 0 3 0", "inflate_fast TYPE return", 0, -15, 258, false},
|
||||
infOutTest{"63 18 5 40 c 0", "window wrap", 3, -8, 300, false},
|
||||
infOutTest{"e5 e0 81 ad 6d cb b2 2c c9 01 1e 59 63 ae 7d ee fb 4d fd b5 35 41 68 ff 7f 0f 0 0 0", "fast length extra bits", 0, -8, 258, true},
|
||||
infOutTest{"25 fd 81 b5 6d 59 b6 6a 49 ea af 35 6 34 eb 8c b9 f6 b9 1e ef 67 49 50 fe ff ff 3f 0 0", "fast distance extra bits", 0, -8, 258, true},
|
||||
infOutTest{"3 7e 0 0 0 0 0", "fast invalid distance code", 0, -8, 258, true},
|
||||
infOutTest{"1b 7 0 0 0 0 0", "fast invalid literal/length code", 0, -8, 258, true},
|
||||
infOutTest{"d c7 1 ae eb 38 c 4 41 a0 87 72 de df fb 1f b8 36 b1 38 5d ff ff 0", "fast 2nd level codes and too far back", 0, -8, 258, true},
|
||||
infOutTest{"63 18 5 8c 10 8 0 0 0 0", "very common case", 0, -8, 259, false},
|
||||
infOutTest{"63 60 60 18 c9 0 8 18 18 18 26 c0 28 0 29 0 0 0", "contiguous and wrap around window", 6, -8, 259, false},
|
||||
infOutTest{"63 0 3 0 0 0 0 0", "copy direct from output", 0, -8, 259, false},
|
||||
infOutTest{"1f 8b 0 0", "bad gzip method", 0, 31, 0, true},
|
||||
infOutTest{"1f 8b 8 80", "bad gzip flags", 0, 31, 0, true},
|
||||
infOutTest{"77 85", "bad zlib method", 0, 15, 0, true},
|
||||
infOutTest{"78 9c", "bad zlib window size", 0, 8, 0, true},
|
||||
infOutTest{"1f 8b 8 1e 0 0 0 0 0 0 1 0 0 0 0 0 0", "bad header crc", 0, 47, 1, true},
|
||||
infOutTest{"1f 8b 8 2 0 0 0 0 0 0 1d 26 3 0 0 0 0 0 0 0 0 0", "check gzip length", 0, 47, 0, true},
|
||||
infOutTest{"78 90", "bad zlib header check", 0, 47, 0, true},
|
||||
infOutTest{"8 b8 0 0 0 1", "need dictionary", 0, 8, 0, true},
|
||||
infOutTest{"63 18 68 30 d0 0 0", "force split window update", 4, -8, 259, false},
|
||||
infOutTest{"3 0", "use fixed blocks", 0, -15, 1, false},
|
||||
infOutTest{"", "bad window size", 0, 1, 0, true},
|
||||
}
|
||||
|
||||
func TestWriteTo(t *testing.T) {
|
||||
input := make([]byte, 100000)
|
||||
n, err := rand.Read(input)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if n != len(input) {
|
||||
t.Fatal("did not fill buffer")
|
||||
}
|
||||
compressed := &bytes.Buffer{}
|
||||
w, err := NewWriter(compressed, -2)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
n, err = w.Write(input)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if n != len(input) {
|
||||
t.Fatal("did not fill buffer")
|
||||
}
|
||||
w.Close()
|
||||
buf := compressed.Bytes()
|
||||
|
||||
dec := NewReader(bytes.NewBuffer(buf))
|
||||
// ReadAll does not use WriteTo, but we wrap it in a NopCloser to be sure.
|
||||
readall, err := ioutil.ReadAll(ioutil.NopCloser(dec))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(readall) != len(input) {
|
||||
t.Fatal("did not decompress everything")
|
||||
}
|
||||
|
||||
dec = NewReader(bytes.NewBuffer(buf))
|
||||
wtbuf := &bytes.Buffer{}
|
||||
written, err := dec.(io.WriterTo).WriteTo(wtbuf)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if written != int64(len(input)) {
|
||||
t.Error("Returned length did not match, expected", len(input), "got", written)
|
||||
}
|
||||
if wtbuf.Len() != len(input) {
|
||||
t.Error("Actual Length did not match, expected", len(input), "got", wtbuf.Len())
|
||||
}
|
||||
if bytes.Compare(wtbuf.Bytes(), input) != 0 {
|
||||
t.Fatal("output did not match input")
|
||||
}
|
||||
}
|
97
vendor/github.com/klauspost/compress/flate/reader_test.go
generated
vendored
Normal file
97
vendor/github.com/klauspost/compress/flate/reader_test.go
generated
vendored
Normal file
@ -0,0 +1,97 @@
|
||||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package flate
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestNlitOutOfRange(t *testing.T) {
|
||||
// Trying to decode this bogus flate data, which has a Huffman table
|
||||
// with nlit=288, should not panic.
|
||||
io.Copy(ioutil.Discard, NewReader(strings.NewReader(
|
||||
"\xfc\xfe\x36\xe7\x5e\x1c\xef\xb3\x55\x58\x77\xb6\x56\xb5\x43\xf4"+
|
||||
"\x6f\xf2\xd2\xe6\x3d\x99\xa0\x85\x8c\x48\xeb\xf8\xda\x83\x04\x2a"+
|
||||
"\x75\xc4\xf8\x0f\x12\x11\xb9\xb4\x4b\x09\xa0\xbe\x8b\x91\x4c")))
|
||||
}
|
||||
|
||||
const (
|
||||
digits = iota
|
||||
twain
|
||||
)
|
||||
|
||||
var testfiles = []string{
|
||||
// Digits is the digits of the irrational number e. Its decimal representation
|
||||
// does not repeat, but there are only 10 possible digits, so it should be
|
||||
// reasonably compressible.
|
||||
digits: "../testdata/e.txt",
|
||||
// Twain is Project Gutenberg's edition of Mark Twain's classic English novel.
|
||||
twain: "../testdata/Mark.Twain-Tom.Sawyer.txt",
|
||||
}
|
||||
|
||||
func benchmarkDecode(b *testing.B, testfile, level, n int) {
|
||||
b.ReportAllocs()
|
||||
b.StopTimer()
|
||||
b.SetBytes(int64(n))
|
||||
buf0, err := ioutil.ReadFile(testfiles[testfile])
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
if len(buf0) == 0 {
|
||||
b.Fatalf("test file %q has no data", testfiles[testfile])
|
||||
}
|
||||
compressed := new(bytes.Buffer)
|
||||
w, err := NewWriter(compressed, level)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
for i := 0; i < n; i += len(buf0) {
|
||||
if len(buf0) > n-i {
|
||||
buf0 = buf0[:n-i]
|
||||
}
|
||||
io.Copy(w, bytes.NewReader(buf0))
|
||||
}
|
||||
w.Close()
|
||||
buf1 := compressed.Bytes()
|
||||
buf0, compressed, w = nil, nil, nil
|
||||
runtime.GC()
|
||||
b.StartTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
io.Copy(ioutil.Discard, NewReader(bytes.NewReader(buf1)))
|
||||
}
|
||||
}
|
||||
|
||||
// These short names are so that gofmt doesn't break the BenchmarkXxx function
|
||||
// bodies below over multiple lines.
|
||||
const (
|
||||
constant = ConstantCompression
|
||||
speed = BestSpeed
|
||||
default_ = DefaultCompression
|
||||
compress = BestCompression
|
||||
)
|
||||
|
||||
func BenchmarkDecodeDigitsSpeed1e4(b *testing.B) { benchmarkDecode(b, digits, speed, 1e4) }
|
||||
func BenchmarkDecodeDigitsSpeed1e5(b *testing.B) { benchmarkDecode(b, digits, speed, 1e5) }
|
||||
func BenchmarkDecodeDigitsSpeed1e6(b *testing.B) { benchmarkDecode(b, digits, speed, 1e6) }
|
||||
func BenchmarkDecodeDigitsDefault1e4(b *testing.B) { benchmarkDecode(b, digits, default_, 1e4) }
|
||||
func BenchmarkDecodeDigitsDefault1e5(b *testing.B) { benchmarkDecode(b, digits, default_, 1e5) }
|
||||
func BenchmarkDecodeDigitsDefault1e6(b *testing.B) { benchmarkDecode(b, digits, default_, 1e6) }
|
||||
func BenchmarkDecodeDigitsCompress1e4(b *testing.B) { benchmarkDecode(b, digits, compress, 1e4) }
|
||||
func BenchmarkDecodeDigitsCompress1e5(b *testing.B) { benchmarkDecode(b, digits, compress, 1e5) }
|
||||
func BenchmarkDecodeDigitsCompress1e6(b *testing.B) { benchmarkDecode(b, digits, compress, 1e6) }
|
||||
func BenchmarkDecodeTwainSpeed1e4(b *testing.B) { benchmarkDecode(b, twain, speed, 1e4) }
|
||||
func BenchmarkDecodeTwainSpeed1e5(b *testing.B) { benchmarkDecode(b, twain, speed, 1e5) }
|
||||
func BenchmarkDecodeTwainSpeed1e6(b *testing.B) { benchmarkDecode(b, twain, speed, 1e6) }
|
||||
func BenchmarkDecodeTwainDefault1e4(b *testing.B) { benchmarkDecode(b, twain, default_, 1e4) }
|
||||
func BenchmarkDecodeTwainDefault1e5(b *testing.B) { benchmarkDecode(b, twain, default_, 1e5) }
|
||||
func BenchmarkDecodeTwainDefault1e6(b *testing.B) { benchmarkDecode(b, twain, default_, 1e6) }
|
||||
func BenchmarkDecodeTwainCompress1e4(b *testing.B) { benchmarkDecode(b, twain, compress, 1e4) }
|
||||
func BenchmarkDecodeTwainCompress1e5(b *testing.B) { benchmarkDecode(b, twain, compress, 1e5) }
|
||||
func BenchmarkDecodeTwainCompress1e6(b *testing.B) { benchmarkDecode(b, twain, compress, 1e6) }
|
48
vendor/github.com/klauspost/compress/flate/reverse_bits.go
generated
vendored
Normal file
48
vendor/github.com/klauspost/compress/flate/reverse_bits.go
generated
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package flate
|
||||
|
||||
var reverseByte = [256]byte{
|
||||
0x00, 0x80, 0x40, 0xc0, 0x20, 0xa0, 0x60, 0xe0,
|
||||
0x10, 0x90, 0x50, 0xd0, 0x30, 0xb0, 0x70, 0xf0,
|
||||
0x08, 0x88, 0x48, 0xc8, 0x28, 0xa8, 0x68, 0xe8,
|
||||
0x18, 0x98, 0x58, 0xd8, 0x38, 0xb8, 0x78, 0xf8,
|
||||
0x04, 0x84, 0x44, 0xc4, 0x24, 0xa4, 0x64, 0xe4,
|
||||
0x14, 0x94, 0x54, 0xd4, 0x34, 0xb4, 0x74, 0xf4,
|
||||
0x0c, 0x8c, 0x4c, 0xcc, 0x2c, 0xac, 0x6c, 0xec,
|
||||
0x1c, 0x9c, 0x5c, 0xdc, 0x3c, 0xbc, 0x7c, 0xfc,
|
||||
0x02, 0x82, 0x42, 0xc2, 0x22, 0xa2, 0x62, 0xe2,
|
||||
0x12, 0x92, 0x52, 0xd2, 0x32, 0xb2, 0x72, 0xf2,
|
||||
0x0a, 0x8a, 0x4a, 0xca, 0x2a, 0xaa, 0x6a, 0xea,
|
||||
0x1a, 0x9a, 0x5a, 0xda, 0x3a, 0xba, 0x7a, 0xfa,
|
||||
0x06, 0x86, 0x46, 0xc6, 0x26, 0xa6, 0x66, 0xe6,
|
||||
0x16, 0x96, 0x56, 0xd6, 0x36, 0xb6, 0x76, 0xf6,
|
||||
0x0e, 0x8e, 0x4e, 0xce, 0x2e, 0xae, 0x6e, 0xee,
|
||||
0x1e, 0x9e, 0x5e, 0xde, 0x3e, 0xbe, 0x7e, 0xfe,
|
||||
0x01, 0x81, 0x41, 0xc1, 0x21, 0xa1, 0x61, 0xe1,
|
||||
0x11, 0x91, 0x51, 0xd1, 0x31, 0xb1, 0x71, 0xf1,
|
||||
0x09, 0x89, 0x49, 0xc9, 0x29, 0xa9, 0x69, 0xe9,
|
||||
0x19, 0x99, 0x59, 0xd9, 0x39, 0xb9, 0x79, 0xf9,
|
||||
0x05, 0x85, 0x45, 0xc5, 0x25, 0xa5, 0x65, 0xe5,
|
||||
0x15, 0x95, 0x55, 0xd5, 0x35, 0xb5, 0x75, 0xf5,
|
||||
0x0d, 0x8d, 0x4d, 0xcd, 0x2d, 0xad, 0x6d, 0xed,
|
||||
0x1d, 0x9d, 0x5d, 0xdd, 0x3d, 0xbd, 0x7d, 0xfd,
|
||||
0x03, 0x83, 0x43, 0xc3, 0x23, 0xa3, 0x63, 0xe3,
|
||||
0x13, 0x93, 0x53, 0xd3, 0x33, 0xb3, 0x73, 0xf3,
|
||||
0x0b, 0x8b, 0x4b, 0xcb, 0x2b, 0xab, 0x6b, 0xeb,
|
||||
0x1b, 0x9b, 0x5b, 0xdb, 0x3b, 0xbb, 0x7b, 0xfb,
|
||||
0x07, 0x87, 0x47, 0xc7, 0x27, 0xa7, 0x67, 0xe7,
|
||||
0x17, 0x97, 0x57, 0xd7, 0x37, 0xb7, 0x77, 0xf7,
|
||||
0x0f, 0x8f, 0x4f, 0xcf, 0x2f, 0xaf, 0x6f, 0xef,
|
||||
0x1f, 0x9f, 0x5f, 0xdf, 0x3f, 0xbf, 0x7f, 0xff,
|
||||
}
|
||||
|
||||
func reverseUint16(v uint16) uint16 {
|
||||
return uint16(reverseByte[v>>8]) | uint16(reverseByte[v&0xFF])<<8
|
||||
}
|
||||
|
||||
func reverseBits(number uint16, bitLength byte) uint16 {
|
||||
return reverseUint16(number << uint8(16-bitLength))
|
||||
}
|
558
vendor/github.com/klauspost/compress/flate/snappy.go
generated
vendored
Normal file
558
vendor/github.com/klauspost/compress/flate/snappy.go
generated
vendored
Normal file
@ -0,0 +1,558 @@
|
||||
// Copyright 2011 The Snappy-Go Authors. All rights reserved.
|
||||
// Modified for deflate by Klaus Post (c) 2015.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package flate
|
||||
|
||||
// We limit how far copy back-references can go, the same as the C++ code.
|
||||
const maxOffset = 1 << 15
|
||||
|
||||
// emitLiteral writes a literal chunk and returns the number of bytes written.
|
||||
func emitLiteral(dst *tokens, lit []byte) {
|
||||
ol := dst.n
|
||||
for i, v := range lit {
|
||||
dst.tokens[i+ol] = token(v)
|
||||
}
|
||||
dst.n += len(lit)
|
||||
}
|
||||
|
||||
// emitCopy writes a copy chunk and returns the number of bytes written.
|
||||
func emitCopy(dst *tokens, offset, length int) {
|
||||
dst.tokens[dst.n] = matchToken(uint32(length-3), uint32(offset-minOffsetSize))
|
||||
dst.n++
|
||||
}
|
||||
|
||||
type snappyEnc interface {
|
||||
Encode(dst *tokens, src []byte)
|
||||
Reset()
|
||||
}
|
||||
|
||||
func newSnappy(level int) snappyEnc {
|
||||
if useSSE42 {
|
||||
e := &snappySSE4{snappyGen: snappyGen{cur: 1}}
|
||||
switch level {
|
||||
case 3:
|
||||
e.enc = e.encodeL3
|
||||
return e
|
||||
}
|
||||
}
|
||||
e := &snappyGen{cur: 1}
|
||||
switch level {
|
||||
case 1:
|
||||
e.enc = e.encodeL1
|
||||
case 2:
|
||||
e.enc = e.encodeL2
|
||||
case 3:
|
||||
e.enc = e.encodeL3
|
||||
default:
|
||||
panic("invalid level specified")
|
||||
}
|
||||
return e
|
||||
}
|
||||
|
||||
const tableBits = 14 // Bits used in the table
|
||||
const tableSize = 1 << tableBits // Size of the table
|
||||
|
||||
// snappyGen maintains the table for matches,
|
||||
// and the previous byte block for level 2.
|
||||
// This is the generic implementation.
|
||||
type snappyGen struct {
|
||||
table [tableSize]int64
|
||||
block [maxStoreBlockSize]byte
|
||||
prev []byte
|
||||
cur int
|
||||
enc func(dst *tokens, src []byte)
|
||||
}
|
||||
|
||||
func (e *snappyGen) Encode(dst *tokens, src []byte) {
|
||||
e.enc(dst, src)
|
||||
}
|
||||
|
||||
// EncodeL1 uses Snappy-like compression, but stores as Huffman
|
||||
// blocks.
|
||||
func (e *snappyGen) encodeL1(dst *tokens, src []byte) {
|
||||
// Return early if src is short.
|
||||
if len(src) <= 4 {
|
||||
if len(src) != 0 {
|
||||
emitLiteral(dst, src)
|
||||
}
|
||||
e.cur += 4
|
||||
return
|
||||
}
|
||||
|
||||
// Ensure that e.cur doesn't wrap, mainly an issue on 32 bits.
|
||||
if e.cur > 1<<30 {
|
||||
e.cur = 1
|
||||
}
|
||||
|
||||
// Iterate over the source bytes.
|
||||
var (
|
||||
s int // The iterator position.
|
||||
t int // The last position with the same hash as s.
|
||||
lit int // The start position of any pending literal bytes.
|
||||
)
|
||||
|
||||
for s+3 < len(src) {
|
||||
// Update the hash table.
|
||||
b0, b1, b2, b3 := src[s], src[s+1], src[s+2], src[s+3]
|
||||
h := uint32(b0) | uint32(b1)<<8 | uint32(b2)<<16 | uint32(b3)<<24
|
||||
p := &e.table[(h*0x1e35a7bd)>>(32-tableBits)]
|
||||
// We need to to store values in [-1, inf) in table.
|
||||
// To save some initialization time, we make sure that
|
||||
// e.cur is never zero.
|
||||
t, *p = int(*p)-e.cur, int64(s+e.cur)
|
||||
|
||||
offset := uint(s - t - 1)
|
||||
|
||||
// If t is invalid or src[s:s+4] differs from src[t:t+4], accumulate a literal byte.
|
||||
if t < 0 || offset >= (maxOffset-1) || b0 != src[t] || b1 != src[t+1] || b2 != src[t+2] || b3 != src[t+3] {
|
||||
// Skip 1 byte for 16 consecutive missed.
|
||||
s += 1 + ((s - lit) >> 4)
|
||||
continue
|
||||
}
|
||||
// Otherwise, we have a match. First, emit any pending literal bytes.
|
||||
if lit != s {
|
||||
emitLiteral(dst, src[lit:s])
|
||||
}
|
||||
// Extend the match to be as long as possible.
|
||||
s0 := s
|
||||
s1 := s + maxMatchLength
|
||||
if s1 > len(src) {
|
||||
s1 = len(src)
|
||||
}
|
||||
s, t = s+4, t+4
|
||||
for s < s1 && src[s] == src[t] {
|
||||
s++
|
||||
t++
|
||||
}
|
||||
// Emit the copied bytes.
|
||||
// inlined: emitCopy(dst, s-t, s-s0)
|
||||
|
||||
dst.tokens[dst.n] = matchToken(uint32(s-s0-3), uint32(s-t-minOffsetSize))
|
||||
dst.n++
|
||||
lit = s
|
||||
}
|
||||
|
||||
// Emit any final pending literal bytes and return.
|
||||
if lit != len(src) {
|
||||
emitLiteral(dst, src[lit:])
|
||||
}
|
||||
e.cur += len(src)
|
||||
}
|
||||
|
||||
// EncodeL2 uses a similar algorithm to level 1, but is capable
|
||||
// of matching across blocks giving better compression at a small slowdown.
|
||||
func (e *snappyGen) encodeL2(dst *tokens, src []byte) {
|
||||
// Return early if src is short.
|
||||
if len(src) <= 4 {
|
||||
if len(src) != 0 {
|
||||
emitLiteral(dst, src)
|
||||
}
|
||||
e.prev = nil
|
||||
e.cur += len(src)
|
||||
return
|
||||
}
|
||||
|
||||
// Ensure that e.cur doesn't wrap, mainly an issue on 32 bits.
|
||||
if e.cur > 1<<30 {
|
||||
e.cur = 1
|
||||
}
|
||||
|
||||
// Iterate over the source bytes.
|
||||
var (
|
||||
s int // The iterator position.
|
||||
t int // The last position with the same hash as s.
|
||||
lit int // The start position of any pending literal bytes.
|
||||
)
|
||||
|
||||
for s+3 < len(src) {
|
||||
// Update the hash table.
|
||||
b0, b1, b2, b3 := src[s], src[s+1], src[s+2], src[s+3]
|
||||
h := uint32(b0) | uint32(b1)<<8 | uint32(b2)<<16 | uint32(b3)<<24
|
||||
p := &e.table[(h*0x1e35a7bd)>>(32-tableBits)]
|
||||
// We need to to store values in [-1, inf) in table.
|
||||
// To save some initialization time, we make sure that
|
||||
// e.cur is never zero.
|
||||
t, *p = int(*p)-e.cur, int64(s+e.cur)
|
||||
|
||||
// If t is positive, the match starts in the current block
|
||||
if t >= 0 {
|
||||
|
||||
offset := uint(s - t - 1)
|
||||
// Check that the offset is valid and that we match at least 4 bytes
|
||||
if offset >= (maxOffset-1) || b0 != src[t] || b1 != src[t+1] || b2 != src[t+2] || b3 != src[t+3] {
|
||||
// Skip 1 byte for 32 consecutive missed.
|
||||
s += 1 + ((s - lit) >> 5)
|
||||
continue
|
||||
}
|
||||
// Otherwise, we have a match. First, emit any pending literal bytes.
|
||||
if lit != s {
|
||||
emitLiteral(dst, src[lit:s])
|
||||
}
|
||||
// Extend the match to be as long as possible.
|
||||
s0 := s
|
||||
s1 := s + maxMatchLength
|
||||
if s1 > len(src) {
|
||||
s1 = len(src)
|
||||
}
|
||||
s, t = s+4, t+4
|
||||
for s < s1 && src[s] == src[t] {
|
||||
s++
|
||||
t++
|
||||
}
|
||||
// Emit the copied bytes.
|
||||
// inlined: emitCopy(dst, s-t, s-s0)
|
||||
dst.tokens[dst.n] = matchToken(uint32(s-s0-3), uint32(s-t-minOffsetSize))
|
||||
dst.n++
|
||||
lit = s
|
||||
continue
|
||||
}
|
||||
// We found a match in the previous block.
|
||||
tp := len(e.prev) + t
|
||||
if tp < 0 || t > -5 || s-t >= maxOffset || b0 != e.prev[tp] || b1 != e.prev[tp+1] || b2 != e.prev[tp+2] || b3 != e.prev[tp+3] {
|
||||
// Skip 1 byte for 32 consecutive missed.
|
||||
s += 1 + ((s - lit) >> 5)
|
||||
continue
|
||||
}
|
||||
// Otherwise, we have a match. First, emit any pending literal bytes.
|
||||
if lit != s {
|
||||
emitLiteral(dst, src[lit:s])
|
||||
}
|
||||
// Extend the match to be as long as possible.
|
||||
s0 := s
|
||||
s1 := s + maxMatchLength
|
||||
if s1 > len(src) {
|
||||
s1 = len(src)
|
||||
}
|
||||
s, tp = s+4, tp+4
|
||||
for s < s1 && src[s] == e.prev[tp] {
|
||||
s++
|
||||
tp++
|
||||
if tp == len(e.prev) {
|
||||
t = 0
|
||||
// continue in current buffer
|
||||
for s < s1 && src[s] == src[t] {
|
||||
s++
|
||||
t++
|
||||
}
|
||||
goto l
|
||||
}
|
||||
}
|
||||
l:
|
||||
// Emit the copied bytes.
|
||||
if t < 0 {
|
||||
t = tp - len(e.prev)
|
||||
}
|
||||
dst.tokens[dst.n] = matchToken(uint32(s-s0-3), uint32(s-t-minOffsetSize))
|
||||
dst.n++
|
||||
lit = s
|
||||
|
||||
}
|
||||
|
||||
// Emit any final pending literal bytes and return.
|
||||
if lit != len(src) {
|
||||
emitLiteral(dst, src[lit:])
|
||||
}
|
||||
e.cur += len(src)
|
||||
// Store this block, if it was full length.
|
||||
if len(src) == maxStoreBlockSize {
|
||||
copy(e.block[:], src)
|
||||
e.prev = e.block[:len(src)]
|
||||
} else {
|
||||
e.prev = nil
|
||||
}
|
||||
}
|
||||
|
||||
// EncodeL3 uses a similar algorithm to level 2, but is capable
|
||||
// will keep two matches per hash.
|
||||
// Both hashes are checked if the first isn't ok, and the longest is selected.
|
||||
func (e *snappyGen) encodeL3(dst *tokens, src []byte) {
|
||||
// Return early if src is short.
|
||||
if len(src) <= 4 {
|
||||
if len(src) != 0 {
|
||||
emitLiteral(dst, src)
|
||||
}
|
||||
e.prev = nil
|
||||
e.cur += len(src)
|
||||
return
|
||||
}
|
||||
|
||||
// Ensure that e.cur doesn't wrap, mainly an issue on 32 bits.
|
||||
if e.cur > 1<<30 {
|
||||
e.cur = 1
|
||||
}
|
||||
|
||||
// Iterate over the source bytes.
|
||||
var (
|
||||
s int // The iterator position.
|
||||
lit int // The start position of any pending literal bytes.
|
||||
)
|
||||
|
||||
for s+3 < len(src) {
|
||||
// Update the hash table.
|
||||
h := uint32(src[s]) | uint32(src[s+1])<<8 | uint32(src[s+2])<<16 | uint32(src[s+3])<<24
|
||||
p := &e.table[(h*0x1e35a7bd)>>(32-tableBits)]
|
||||
tmp := *p
|
||||
p1 := int(tmp & 0xffffffff) // Closest match position
|
||||
p2 := int(tmp >> 32) // Furthest match position
|
||||
|
||||
// We need to to store values in [-1, inf) in table.
|
||||
// To save some initialization time, we make sure that
|
||||
// e.cur is never zero.
|
||||
t1 := p1 - e.cur
|
||||
|
||||
var l2 int
|
||||
var t2 int
|
||||
l1 := e.matchlen(s, t1, src)
|
||||
// If fist match was ok, don't do the second.
|
||||
if l1 < 16 {
|
||||
t2 = p2 - e.cur
|
||||
l2 = e.matchlen(s, t2, src)
|
||||
|
||||
// If both are short, continue
|
||||
if l1 < 4 && l2 < 4 {
|
||||
// Update hash table
|
||||
*p = int64(s+e.cur) | (int64(p1) << 32)
|
||||
// Skip 1 byte for 32 consecutive missed.
|
||||
s += 1 + ((s - lit) >> 5)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise, we have a match. First, emit any pending literal bytes.
|
||||
if lit != s {
|
||||
emitLiteral(dst, src[lit:s])
|
||||
}
|
||||
// Update hash table
|
||||
*p = int64(s+e.cur) | (int64(p1) << 32)
|
||||
|
||||
// Store the longest match l1 will be closest, so we prefer that if equal length
|
||||
if l1 >= l2 {
|
||||
dst.tokens[dst.n] = matchToken(uint32(l1-3), uint32(s-t1-minOffsetSize))
|
||||
s += l1
|
||||
} else {
|
||||
dst.tokens[dst.n] = matchToken(uint32(l2-3), uint32(s-t2-minOffsetSize))
|
||||
s += l2
|
||||
}
|
||||
dst.n++
|
||||
lit = s
|
||||
}
|
||||
|
||||
// Emit any final pending literal bytes and return.
|
||||
if lit != len(src) {
|
||||
emitLiteral(dst, src[lit:])
|
||||
}
|
||||
e.cur += len(src)
|
||||
// Store this block, if it was full length.
|
||||
if len(src) == maxStoreBlockSize {
|
||||
copy(e.block[:], src)
|
||||
e.prev = e.block[:len(src)]
|
||||
} else {
|
||||
e.prev = nil
|
||||
}
|
||||
}
|
||||
|
||||
func (e *snappyGen) matchlen(s, t int, src []byte) int {
|
||||
// If t is invalid or src[s:s+4] differs from src[t:t+4], accumulate a literal byte.
|
||||
offset := uint(s - t - 1)
|
||||
|
||||
// If we are inside the current block
|
||||
if t >= 0 {
|
||||
if offset >= (maxOffset-1) ||
|
||||
src[s] != src[t] || src[s+1] != src[t+1] ||
|
||||
src[s+2] != src[t+2] || src[s+3] != src[t+3] {
|
||||
return 0
|
||||
}
|
||||
// Extend the match to be as long as possible.
|
||||
s0 := s
|
||||
s1 := s + maxMatchLength
|
||||
if s1 > len(src) {
|
||||
s1 = len(src)
|
||||
}
|
||||
s, t = s+4, t+4
|
||||
for s < s1 && src[s] == src[t] {
|
||||
s++
|
||||
t++
|
||||
}
|
||||
return s - s0
|
||||
}
|
||||
|
||||
// We found a match in the previous block.
|
||||
tp := len(e.prev) + t
|
||||
if tp < 0 || offset >= (maxOffset-1) || t > -5 ||
|
||||
src[s] != e.prev[tp] || src[s+1] != e.prev[tp+1] ||
|
||||
src[s+2] != e.prev[tp+2] || src[s+3] != e.prev[tp+3] {
|
||||
return 0
|
||||
}
|
||||
|
||||
// Extend the match to be as long as possible.
|
||||
s0 := s
|
||||
s1 := s + maxMatchLength
|
||||
if s1 > len(src) {
|
||||
s1 = len(src)
|
||||
}
|
||||
s, tp = s+4, tp+4
|
||||
for s < s1 && src[s] == e.prev[tp] {
|
||||
s++
|
||||
tp++
|
||||
if tp == len(e.prev) {
|
||||
t = 0
|
||||
// continue in current buffer
|
||||
for s < s1 && src[s] == src[t] {
|
||||
s++
|
||||
t++
|
||||
}
|
||||
return s - s0
|
||||
}
|
||||
}
|
||||
return s - s0
|
||||
}
|
||||
|
||||
// Reset the encoding table.
|
||||
func (e *snappyGen) Reset() {
|
||||
e.prev = nil
|
||||
}
|
||||
|
||||
// snappySSE4 extends snappyGen.
|
||||
// This implementation can use SSE 4.2 for length matching.
|
||||
type snappySSE4 struct {
|
||||
snappyGen
|
||||
}
|
||||
|
||||
// EncodeL3 uses a similar algorithm to level 2,
|
||||
// but will keep two matches per hash.
|
||||
// Both hashes are checked if the first isn't ok, and the longest is selected.
|
||||
func (e *snappySSE4) encodeL3(dst *tokens, src []byte) {
|
||||
// Return early if src is short.
|
||||
if len(src) <= 4 {
|
||||
if len(src) != 0 {
|
||||
emitLiteral(dst, src)
|
||||
}
|
||||
e.prev = nil
|
||||
e.cur += len(src)
|
||||
return
|
||||
}
|
||||
|
||||
// Ensure that e.cur doesn't wrap, mainly an issue on 32 bits.
|
||||
if e.cur > 1<<30 {
|
||||
e.cur = 1
|
||||
}
|
||||
|
||||
// Iterate over the source bytes.
|
||||
var (
|
||||
s int // The iterator position.
|
||||
lit int // The start position of any pending literal bytes.
|
||||
)
|
||||
|
||||
for s+3 < len(src) {
|
||||
// Load potential matches from hash table.
|
||||
h := uint32(src[s]) | uint32(src[s+1])<<8 | uint32(src[s+2])<<16 | uint32(src[s+3])<<24
|
||||
p := &e.table[(h*0x1e35a7bd)>>(32-tableBits)]
|
||||
tmp := *p
|
||||
p1 := int(tmp & 0xffffffff) // Closest match position
|
||||
p2 := int(tmp >> 32) // Furthest match position
|
||||
|
||||
// We need to to store values in [-1, inf) in table.
|
||||
// To save some initialization time, we make sure that
|
||||
// e.cur is never zero.
|
||||
t1 := int(p1) - e.cur
|
||||
|
||||
var l2 int
|
||||
var t2 int
|
||||
l1 := e.matchlen(s, t1, src)
|
||||
// If fist match was ok, don't do the second.
|
||||
if l1 < 16 {
|
||||
t2 = int(p2) - e.cur
|
||||
l2 = e.matchlen(s, t2, src)
|
||||
|
||||
// If both are short, continue
|
||||
if l1 < 4 && l2 < 4 {
|
||||
// Update hash table
|
||||
*p = int64(s+e.cur) | (int64(p1) << 32)
|
||||
// Skip 1 byte for 32 consecutive missed.
|
||||
s += 1 + ((s - lit) >> 5)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise, we have a match. First, emit any pending literal bytes.
|
||||
if lit != s {
|
||||
emitLiteral(dst, src[lit:s])
|
||||
}
|
||||
// Update hash table
|
||||
*p = int64(s+e.cur) | (int64(p1) << 32)
|
||||
|
||||
// Store the longest match l1 will be closest, so we prefer that if equal length
|
||||
if l1 >= l2 {
|
||||
dst.tokens[dst.n] = matchToken(uint32(l1-3), uint32(s-t1-minOffsetSize))
|
||||
s += l1
|
||||
} else {
|
||||
dst.tokens[dst.n] = matchToken(uint32(l2-3), uint32(s-t2-minOffsetSize))
|
||||
s += l2
|
||||
}
|
||||
dst.n++
|
||||
lit = s
|
||||
}
|
||||
|
||||
// Emit any final pending literal bytes and return.
|
||||
if lit != len(src) {
|
||||
emitLiteral(dst, src[lit:])
|
||||
}
|
||||
e.cur += len(src)
|
||||
// Store this block, if it was full length.
|
||||
if len(src) == maxStoreBlockSize {
|
||||
copy(e.block[:], src)
|
||||
e.prev = e.block[:len(src)]
|
||||
} else {
|
||||
e.prev = nil
|
||||
}
|
||||
}
|
||||
|
||||
func (e *snappySSE4) matchlen(s, t int, src []byte) int {
|
||||
// If t is invalid or src[s:s+4] differs from src[t:t+4], accumulate a literal byte.
|
||||
offset := uint(s - t - 1)
|
||||
|
||||
// If we are inside the current block
|
||||
if t >= 0 {
|
||||
if offset >= (maxOffset - 1) {
|
||||
return 0
|
||||
}
|
||||
length := len(src) - s
|
||||
if length > maxMatchLength {
|
||||
length = maxMatchLength
|
||||
}
|
||||
// Extend the match to be as long as possible.
|
||||
return matchLenSSE4(src[t:], src[s:], length)
|
||||
}
|
||||
|
||||
// We found a match in the previous block.
|
||||
tp := len(e.prev) + t
|
||||
if tp < 0 || offset >= (maxOffset-1) || t > -5 ||
|
||||
src[s] != e.prev[tp] || src[s+1] != e.prev[tp+1] ||
|
||||
src[s+2] != e.prev[tp+2] || src[s+3] != e.prev[tp+3] {
|
||||
return 0
|
||||
}
|
||||
|
||||
// Extend the match to be as long as possible.
|
||||
s0 := s
|
||||
s1 := s + maxMatchLength
|
||||
if s1 > len(src) {
|
||||
s1 = len(src)
|
||||
}
|
||||
s, tp = s+4, tp+4
|
||||
for s < s1 && src[s] == e.prev[tp] {
|
||||
s++
|
||||
tp++
|
||||
if tp == len(e.prev) {
|
||||
t = 0
|
||||
// continue in current buffer
|
||||
for s < s1 && src[s] == src[t] {
|
||||
s++
|
||||
t++
|
||||
}
|
||||
return s - s0
|
||||
}
|
||||
}
|
||||
return s - s0
|
||||
}
|
BIN
vendor/github.com/klauspost/compress/flate/testdata/huffman-null-max.dyn.expect
generated
vendored
Normal file
BIN
vendor/github.com/klauspost/compress/flate/testdata/huffman-null-max.dyn.expect
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/klauspost/compress/flate/testdata/huffman-null-max.dyn.expect-noinput
generated
vendored
Normal file
BIN
vendor/github.com/klauspost/compress/flate/testdata/huffman-null-max.dyn.expect-noinput
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/klauspost/compress/flate/testdata/huffman-null-max.golden
generated
vendored
Normal file
BIN
vendor/github.com/klauspost/compress/flate/testdata/huffman-null-max.golden
generated
vendored
Normal file
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user