Deployed the page to Github Pages.
This commit is contained in:
parent
1d79754e93
commit
2c89899458
62797 changed files with 6551425 additions and 15279 deletions
202
node_modules/@sigstore/bundle/LICENSE
generated
vendored
Normal file
202
node_modules/@sigstore/bundle/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,202 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2023 The Sigstore Authors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
19
node_modules/@sigstore/bundle/README.md
generated
vendored
Normal file
19
node_modules/@sigstore/bundle/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
# @sigstore/bundle · [](https://www.npmjs.com/package/@sigstore/bundle) [](https://github.com/sigstore/sigstore-js/actions/workflows/ci.yml) [](https://github.com/sigstore/sigstore-js/actions/workflows/smoke-test.yml)
|
||||
|
||||
A JavaScript library for working with the Sigstore bundle format.
|
||||
|
||||
## Features
|
||||
|
||||
- TypeScript types for the different Sigstore bundle versions.
|
||||
- Bundle validation functions.
|
||||
- Support for serializing/deserializing bundles to/from JSON.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Node.js version >= 16.14.0
|
||||
|
||||
## Installation
|
||||
|
||||
```
|
||||
npm install @sigstore/bundle
|
||||
```
|
||||
19
node_modules/@sigstore/bundle/dist/build.d.ts
generated
vendored
Normal file
19
node_modules/@sigstore/bundle/dist/build.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
/// <reference types="node" />
|
||||
import type { BundleWithDsseEnvelope, BundleWithMessageSignature } from './bundle';
|
||||
type VerificationMaterialOptions = {
|
||||
certificate?: Buffer;
|
||||
keyHint?: string;
|
||||
singleCertificate?: boolean;
|
||||
};
|
||||
type MessageSignatureBundleOptions = {
|
||||
digest: Buffer;
|
||||
signature: Buffer;
|
||||
} & VerificationMaterialOptions;
|
||||
type DSSEBundleOptions = {
|
||||
artifact: Buffer;
|
||||
artifactType: string;
|
||||
signature: Buffer;
|
||||
} & VerificationMaterialOptions;
|
||||
export declare function toMessageSignatureBundle(options: MessageSignatureBundleOptions): BundleWithMessageSignature;
|
||||
export declare function toDSSEBundle(options: DSSEBundleOptions): BundleWithDsseEnvelope;
|
||||
export {};
|
||||
101
node_modules/@sigstore/bundle/dist/build.js
generated
vendored
Normal file
101
node_modules/@sigstore/bundle/dist/build.js
generated
vendored
Normal file
|
|
@ -0,0 +1,101 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.toDSSEBundle = exports.toMessageSignatureBundle = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
const protobuf_specs_1 = require("@sigstore/protobuf-specs");
|
||||
const bundle_1 = require("./bundle");
|
||||
// Message signature bundle - $case: 'messageSignature'
|
||||
function toMessageSignatureBundle(options) {
|
||||
return {
|
||||
mediaType: options.singleCertificate
|
||||
? bundle_1.BUNDLE_V03_MEDIA_TYPE
|
||||
: bundle_1.BUNDLE_V02_MEDIA_TYPE,
|
||||
content: {
|
||||
$case: 'messageSignature',
|
||||
messageSignature: {
|
||||
messageDigest: {
|
||||
algorithm: protobuf_specs_1.HashAlgorithm.SHA2_256,
|
||||
digest: options.digest,
|
||||
},
|
||||
signature: options.signature,
|
||||
},
|
||||
},
|
||||
verificationMaterial: toVerificationMaterial(options),
|
||||
};
|
||||
}
|
||||
exports.toMessageSignatureBundle = toMessageSignatureBundle;
|
||||
// DSSE envelope bundle - $case: 'dsseEnvelope'
|
||||
function toDSSEBundle(options) {
|
||||
return {
|
||||
mediaType: options.singleCertificate
|
||||
? bundle_1.BUNDLE_V03_MEDIA_TYPE
|
||||
: bundle_1.BUNDLE_V02_MEDIA_TYPE,
|
||||
content: {
|
||||
$case: 'dsseEnvelope',
|
||||
dsseEnvelope: toEnvelope(options),
|
||||
},
|
||||
verificationMaterial: toVerificationMaterial(options),
|
||||
};
|
||||
}
|
||||
exports.toDSSEBundle = toDSSEBundle;
|
||||
function toEnvelope(options) {
|
||||
return {
|
||||
payloadType: options.artifactType,
|
||||
payload: options.artifact,
|
||||
signatures: [toSignature(options)],
|
||||
};
|
||||
}
|
||||
function toSignature(options) {
|
||||
return {
|
||||
keyid: options.keyHint || '',
|
||||
sig: options.signature,
|
||||
};
|
||||
}
|
||||
// Verification material
|
||||
function toVerificationMaterial(options) {
|
||||
return {
|
||||
content: toKeyContent(options),
|
||||
tlogEntries: [],
|
||||
timestampVerificationData: { rfc3161Timestamps: [] },
|
||||
};
|
||||
}
|
||||
function toKeyContent(options) {
|
||||
if (options.certificate) {
|
||||
if (options.singleCertificate) {
|
||||
return {
|
||||
$case: 'certificate',
|
||||
certificate: { rawBytes: options.certificate },
|
||||
};
|
||||
}
|
||||
else {
|
||||
return {
|
||||
$case: 'x509CertificateChain',
|
||||
x509CertificateChain: {
|
||||
certificates: [{ rawBytes: options.certificate }],
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
else {
|
||||
return {
|
||||
$case: 'publicKey',
|
||||
publicKey: {
|
||||
hint: options.keyHint || '',
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
74
node_modules/@sigstore/bundle/dist/bundle.d.ts
generated
vendored
Normal file
74
node_modules/@sigstore/bundle/dist/bundle.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
import type { Bundle as ProtoBundle, InclusionProof as ProtoInclusionProof, MessageSignature as ProtoMessageSignature, TransparencyLogEntry as ProtoTransparencyLogEntry, VerificationMaterial as ProtoVerificationMaterial } from '@sigstore/protobuf-specs';
|
||||
import type { WithRequired } from './utility';
|
||||
export declare const BUNDLE_V01_MEDIA_TYPE = "application/vnd.dev.sigstore.bundle+json;version=0.1";
|
||||
export declare const BUNDLE_V02_MEDIA_TYPE = "application/vnd.dev.sigstore.bundle+json;version=0.2";
|
||||
export declare const BUNDLE_V03_LEGACY_MEDIA_TYPE = "application/vnd.dev.sigstore.bundle+json;version=0.3";
|
||||
export declare const BUNDLE_V03_MEDIA_TYPE = "application/vnd.dev.sigstore.bundle.v0.3+json";
|
||||
type DsseEnvelopeContent = Extract<ProtoBundle['content'], {
|
||||
$case: 'dsseEnvelope';
|
||||
}>;
|
||||
type MessageSignatureContent = Extract<ProtoBundle['content'], {
|
||||
$case: 'messageSignature';
|
||||
}>;
|
||||
export type MessageSignature = WithRequired<ProtoMessageSignature, 'messageDigest'>;
|
||||
export type VerificationMaterial = WithRequired<ProtoVerificationMaterial, 'content'>;
|
||||
export type TransparencyLogEntry = WithRequired<ProtoTransparencyLogEntry, 'logId' | 'kindVersion'>;
|
||||
export type InclusionProof = WithRequired<ProtoInclusionProof, 'checkpoint'>;
|
||||
export type TLogEntryWithInclusionPromise = WithRequired<TransparencyLogEntry, 'inclusionPromise'>;
|
||||
export type TLogEntryWithInclusionProof = TransparencyLogEntry & {
|
||||
inclusionProof: InclusionProof;
|
||||
};
|
||||
export type Bundle = ProtoBundle & {
|
||||
verificationMaterial: VerificationMaterial & {
|
||||
tlogEntries: TransparencyLogEntry[];
|
||||
};
|
||||
content: (MessageSignatureContent & {
|
||||
messageSignature: MessageSignature;
|
||||
}) | DsseEnvelopeContent;
|
||||
};
|
||||
export type BundleV01 = Bundle & {
|
||||
verificationMaterial: Bundle['verificationMaterial'] & {
|
||||
tlogEntries: TLogEntryWithInclusionPromise[];
|
||||
};
|
||||
};
|
||||
export type BundleLatest = Bundle & {
|
||||
verificationMaterial: Bundle['verificationMaterial'] & {
|
||||
tlogEntries: TLogEntryWithInclusionProof[];
|
||||
};
|
||||
};
|
||||
export type BundleWithCertificateChain = Bundle & {
|
||||
verificationMaterial: Bundle['verificationMaterial'] & {
|
||||
content: Extract<VerificationMaterial['content'], {
|
||||
$case: 'x509CertificateChain';
|
||||
}>;
|
||||
};
|
||||
};
|
||||
export type BundleWithSingleCertificate = Bundle & {
|
||||
verificationMaterial: Bundle['verificationMaterial'] & {
|
||||
content: Extract<VerificationMaterial['content'], {
|
||||
$case: 'certificate';
|
||||
}>;
|
||||
};
|
||||
};
|
||||
export type BundleWithPublicKey = Bundle & {
|
||||
verificationMaterial: Bundle['verificationMaterial'] & {
|
||||
content: Extract<VerificationMaterial['content'], {
|
||||
$case: 'publicKey';
|
||||
}>;
|
||||
};
|
||||
};
|
||||
export type BundleWithMessageSignature = Bundle & {
|
||||
content: Extract<Bundle['content'], {
|
||||
$case: 'messageSignature';
|
||||
}>;
|
||||
};
|
||||
export type BundleWithDsseEnvelope = Bundle & {
|
||||
content: Extract<Bundle['content'], {
|
||||
$case: 'dsseEnvelope';
|
||||
}>;
|
||||
};
|
||||
export declare function isBundleWithCertificateChain(b: Bundle): b is BundleWithCertificateChain;
|
||||
export declare function isBundleWithPublicKey(b: Bundle): b is BundleWithPublicKey;
|
||||
export declare function isBundleWithMessageSignature(b: Bundle): b is BundleWithMessageSignature;
|
||||
export declare function isBundleWithDsseEnvelope(b: Bundle): b is BundleWithDsseEnvelope;
|
||||
export {};
|
||||
24
node_modules/@sigstore/bundle/dist/bundle.js
generated
vendored
Normal file
24
node_modules/@sigstore/bundle/dist/bundle.js
generated
vendored
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.isBundleWithDsseEnvelope = exports.isBundleWithMessageSignature = exports.isBundleWithPublicKey = exports.isBundleWithCertificateChain = exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = void 0;
|
||||
exports.BUNDLE_V01_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1';
|
||||
exports.BUNDLE_V02_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.2';
|
||||
exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.3';
|
||||
exports.BUNDLE_V03_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle.v0.3+json';
|
||||
// Type guards for bundle variants.
|
||||
function isBundleWithCertificateChain(b) {
|
||||
return b.verificationMaterial.content.$case === 'x509CertificateChain';
|
||||
}
|
||||
exports.isBundleWithCertificateChain = isBundleWithCertificateChain;
|
||||
function isBundleWithPublicKey(b) {
|
||||
return b.verificationMaterial.content.$case === 'publicKey';
|
||||
}
|
||||
exports.isBundleWithPublicKey = isBundleWithPublicKey;
|
||||
function isBundleWithMessageSignature(b) {
|
||||
return b.content.$case === 'messageSignature';
|
||||
}
|
||||
exports.isBundleWithMessageSignature = isBundleWithMessageSignature;
|
||||
function isBundleWithDsseEnvelope(b) {
|
||||
return b.content.$case === 'dsseEnvelope';
|
||||
}
|
||||
exports.isBundleWithDsseEnvelope = isBundleWithDsseEnvelope;
|
||||
4
node_modules/@sigstore/bundle/dist/error.d.ts
generated
vendored
Normal file
4
node_modules/@sigstore/bundle/dist/error.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
export declare class ValidationError extends Error {
|
||||
fields: string[];
|
||||
constructor(message: string, fields: string[]);
|
||||
}
|
||||
25
node_modules/@sigstore/bundle/dist/error.js
generated
vendored
Normal file
25
node_modules/@sigstore/bundle/dist/error.js
generated
vendored
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ValidationError = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
class ValidationError extends Error {
|
||||
constructor(message, fields) {
|
||||
super(message);
|
||||
this.fields = fields;
|
||||
}
|
||||
}
|
||||
exports.ValidationError = ValidationError;
|
||||
8
node_modules/@sigstore/bundle/dist/index.d.ts
generated
vendored
Normal file
8
node_modules/@sigstore/bundle/dist/index.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
export { toDSSEBundle, toMessageSignatureBundle } from './build';
|
||||
export { BUNDLE_V01_MEDIA_TYPE, BUNDLE_V02_MEDIA_TYPE, BUNDLE_V03_LEGACY_MEDIA_TYPE, BUNDLE_V03_MEDIA_TYPE, isBundleWithCertificateChain, isBundleWithDsseEnvelope, isBundleWithMessageSignature, isBundleWithPublicKey, } from './bundle';
|
||||
export { ValidationError } from './error';
|
||||
export { bundleFromJSON, bundleToJSON, envelopeFromJSON, envelopeToJSON, } from './serialized';
|
||||
export { assertBundle, assertBundleLatest, assertBundleV01, assertBundleV02, isBundleV01, } from './validate';
|
||||
export type { Envelope, PublicKeyIdentifier, RFC3161SignedTimestamp, Signature, TimestampVerificationData, X509Certificate, X509CertificateChain, } from '@sigstore/protobuf-specs';
|
||||
export type { Bundle, BundleLatest, BundleV01, BundleWithCertificateChain, BundleWithDsseEnvelope, BundleWithMessageSignature, BundleWithPublicKey, BundleWithSingleCertificate, InclusionProof, MessageSignature, TLogEntryWithInclusionPromise, TLogEntryWithInclusionProof, TransparencyLogEntry, VerificationMaterial, } from './bundle';
|
||||
export type { SerializedBundle, SerializedEnvelope } from './serialized';
|
||||
43
node_modules/@sigstore/bundle/dist/index.js
generated
vendored
Normal file
43
node_modules/@sigstore/bundle/dist/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.isBundleV01 = exports.assertBundleV02 = exports.assertBundleV01 = exports.assertBundleLatest = exports.assertBundle = exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = exports.ValidationError = exports.isBundleWithPublicKey = exports.isBundleWithMessageSignature = exports.isBundleWithDsseEnvelope = exports.isBundleWithCertificateChain = exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = exports.toMessageSignatureBundle = exports.toDSSEBundle = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
var build_1 = require("./build");
|
||||
Object.defineProperty(exports, "toDSSEBundle", { enumerable: true, get: function () { return build_1.toDSSEBundle; } });
|
||||
Object.defineProperty(exports, "toMessageSignatureBundle", { enumerable: true, get: function () { return build_1.toMessageSignatureBundle; } });
|
||||
var bundle_1 = require("./bundle");
|
||||
Object.defineProperty(exports, "BUNDLE_V01_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V01_MEDIA_TYPE; } });
|
||||
Object.defineProperty(exports, "BUNDLE_V02_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V02_MEDIA_TYPE; } });
|
||||
Object.defineProperty(exports, "BUNDLE_V03_LEGACY_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V03_LEGACY_MEDIA_TYPE; } });
|
||||
Object.defineProperty(exports, "BUNDLE_V03_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V03_MEDIA_TYPE; } });
|
||||
Object.defineProperty(exports, "isBundleWithCertificateChain", { enumerable: true, get: function () { return bundle_1.isBundleWithCertificateChain; } });
|
||||
Object.defineProperty(exports, "isBundleWithDsseEnvelope", { enumerable: true, get: function () { return bundle_1.isBundleWithDsseEnvelope; } });
|
||||
Object.defineProperty(exports, "isBundleWithMessageSignature", { enumerable: true, get: function () { return bundle_1.isBundleWithMessageSignature; } });
|
||||
Object.defineProperty(exports, "isBundleWithPublicKey", { enumerable: true, get: function () { return bundle_1.isBundleWithPublicKey; } });
|
||||
var error_1 = require("./error");
|
||||
Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return error_1.ValidationError; } });
|
||||
var serialized_1 = require("./serialized");
|
||||
Object.defineProperty(exports, "bundleFromJSON", { enumerable: true, get: function () { return serialized_1.bundleFromJSON; } });
|
||||
Object.defineProperty(exports, "bundleToJSON", { enumerable: true, get: function () { return serialized_1.bundleToJSON; } });
|
||||
Object.defineProperty(exports, "envelopeFromJSON", { enumerable: true, get: function () { return serialized_1.envelopeFromJSON; } });
|
||||
Object.defineProperty(exports, "envelopeToJSON", { enumerable: true, get: function () { return serialized_1.envelopeToJSON; } });
|
||||
var validate_1 = require("./validate");
|
||||
Object.defineProperty(exports, "assertBundle", { enumerable: true, get: function () { return validate_1.assertBundle; } });
|
||||
Object.defineProperty(exports, "assertBundleLatest", { enumerable: true, get: function () { return validate_1.assertBundleLatest; } });
|
||||
Object.defineProperty(exports, "assertBundleV01", { enumerable: true, get: function () { return validate_1.assertBundleV01; } });
|
||||
Object.defineProperty(exports, "assertBundleV02", { enumerable: true, get: function () { return validate_1.assertBundleV02; } });
|
||||
Object.defineProperty(exports, "isBundleV01", { enumerable: true, get: function () { return validate_1.isBundleV01; } });
|
||||
74
node_modules/@sigstore/bundle/dist/serialized.d.ts
generated
vendored
Normal file
74
node_modules/@sigstore/bundle/dist/serialized.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
import { Envelope } from '@sigstore/protobuf-specs';
|
||||
import { Bundle } from './bundle';
|
||||
import type { OneOf } from './utility';
|
||||
export declare const bundleFromJSON: (obj: unknown) => Bundle;
|
||||
export declare const bundleToJSON: (bundle: Bundle) => SerializedBundle;
|
||||
export declare const envelopeFromJSON: (obj: unknown) => Envelope;
|
||||
export declare const envelopeToJSON: (envelope: Envelope) => SerializedEnvelope;
|
||||
type SerializedTLogEntry = {
|
||||
logIndex: string;
|
||||
logId: {
|
||||
keyId: string;
|
||||
};
|
||||
kindVersion: {
|
||||
kind: string;
|
||||
version: string;
|
||||
} | undefined;
|
||||
integratedTime: string;
|
||||
inclusionPromise: {
|
||||
signedEntryTimestamp: string;
|
||||
} | undefined;
|
||||
inclusionProof: {
|
||||
logIndex: string;
|
||||
rootHash: string;
|
||||
treeSize: string;
|
||||
hashes: string[];
|
||||
checkpoint: {
|
||||
envelope: string;
|
||||
};
|
||||
} | undefined;
|
||||
canonicalizedBody: string;
|
||||
};
|
||||
type SerializedTimestampVerificationData = {
|
||||
rfc3161Timestamps: {
|
||||
signedTimestamp: string;
|
||||
}[];
|
||||
};
|
||||
type SerializedMessageSignature = {
|
||||
messageDigest: {
|
||||
algorithm: string;
|
||||
digest: string;
|
||||
} | undefined;
|
||||
signature: string;
|
||||
};
|
||||
export type SerializedEnvelope = {
|
||||
payload: string;
|
||||
payloadType: string;
|
||||
signatures: {
|
||||
sig: string;
|
||||
keyid: string;
|
||||
}[];
|
||||
};
|
||||
export type SerializedBundle = {
|
||||
mediaType: string;
|
||||
verificationMaterial: (OneOf<{
|
||||
x509CertificateChain: {
|
||||
certificates: {
|
||||
rawBytes: string;
|
||||
}[];
|
||||
};
|
||||
publicKey: {
|
||||
hint: string;
|
||||
};
|
||||
certificate: {
|
||||
rawBytes: string;
|
||||
};
|
||||
}> | undefined) & {
|
||||
tlogEntries: SerializedTLogEntry[];
|
||||
timestampVerificationData: SerializedTimestampVerificationData | undefined;
|
||||
};
|
||||
} & OneOf<{
|
||||
dsseEnvelope: SerializedEnvelope;
|
||||
messageSignature: SerializedMessageSignature;
|
||||
}>;
|
||||
export {};
|
||||
49
node_modules/@sigstore/bundle/dist/serialized.js
generated
vendored
Normal file
49
node_modules/@sigstore/bundle/dist/serialized.js
generated
vendored
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
const protobuf_specs_1 = require("@sigstore/protobuf-specs");
|
||||
const bundle_1 = require("./bundle");
|
||||
const validate_1 = require("./validate");
|
||||
const bundleFromJSON = (obj) => {
|
||||
const bundle = protobuf_specs_1.Bundle.fromJSON(obj);
|
||||
switch (bundle.mediaType) {
|
||||
case bundle_1.BUNDLE_V01_MEDIA_TYPE:
|
||||
(0, validate_1.assertBundleV01)(bundle);
|
||||
break;
|
||||
case bundle_1.BUNDLE_V02_MEDIA_TYPE:
|
||||
(0, validate_1.assertBundleV02)(bundle);
|
||||
break;
|
||||
default:
|
||||
(0, validate_1.assertBundleLatest)(bundle);
|
||||
break;
|
||||
}
|
||||
return bundle;
|
||||
};
|
||||
exports.bundleFromJSON = bundleFromJSON;
|
||||
const bundleToJSON = (bundle) => {
|
||||
return protobuf_specs_1.Bundle.toJSON(bundle);
|
||||
};
|
||||
exports.bundleToJSON = bundleToJSON;
|
||||
const envelopeFromJSON = (obj) => {
|
||||
return protobuf_specs_1.Envelope.fromJSON(obj);
|
||||
};
|
||||
exports.envelopeFromJSON = envelopeFromJSON;
|
||||
const envelopeToJSON = (envelope) => {
|
||||
return protobuf_specs_1.Envelope.toJSON(envelope);
|
||||
};
|
||||
exports.envelopeToJSON = envelopeToJSON;
|
||||
14
node_modules/@sigstore/bundle/dist/utility.d.ts
generated
vendored
Normal file
14
node_modules/@sigstore/bundle/dist/utility.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
type ValueOf<Obj> = Obj[keyof Obj];
|
||||
type OneOnly<Obj, K extends keyof Obj> = {
|
||||
[key in Exclude<keyof Obj, K>]: undefined;
|
||||
} & {
|
||||
[key in K]: Obj[K];
|
||||
};
|
||||
type OneOfByKey<Obj> = {
|
||||
[key in keyof Obj]: OneOnly<Obj, key>;
|
||||
};
|
||||
export type OneOf<T> = ValueOf<OneOfByKey<T>>;
|
||||
export type WithRequired<T, K extends keyof T> = T & {
|
||||
[P in K]-?: NonNullable<T[P]>;
|
||||
};
|
||||
export {};
|
||||
2
node_modules/@sigstore/bundle/dist/utility.js
generated
vendored
Normal file
2
node_modules/@sigstore/bundle/dist/utility.js
generated
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
7
node_modules/@sigstore/bundle/dist/validate.d.ts
generated
vendored
Normal file
7
node_modules/@sigstore/bundle/dist/validate.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
import type { Bundle as ProtoBundle } from '@sigstore/protobuf-specs';
|
||||
import type { Bundle, BundleLatest, BundleV01 } from './bundle';
|
||||
export declare function assertBundle(b: ProtoBundle): asserts b is Bundle;
|
||||
export declare function assertBundleV01(b: ProtoBundle): asserts b is BundleV01;
|
||||
export declare function isBundleV01(b: Bundle): b is BundleV01;
|
||||
export declare function assertBundleV02(b: ProtoBundle): asserts b is BundleLatest;
|
||||
export declare function assertBundleLatest(b: ProtoBundle): asserts b is BundleLatest;
|
||||
199
node_modules/@sigstore/bundle/dist/validate.js
generated
vendored
Normal file
199
node_modules/@sigstore/bundle/dist/validate.js
generated
vendored
Normal file
|
|
@ -0,0 +1,199 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.assertBundleLatest = exports.assertBundleV02 = exports.isBundleV01 = exports.assertBundleV01 = exports.assertBundle = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
const error_1 = require("./error");
|
||||
// Performs basic validation of a Sigstore bundle to ensure that all required
|
||||
// fields are populated. This is not a complete validation of the bundle, but
|
||||
// rather a check that the bundle is in a valid state to be processed by the
|
||||
// rest of the code.
|
||||
function assertBundle(b) {
|
||||
const invalidValues = validateBundleBase(b);
|
||||
if (invalidValues.length > 0) {
|
||||
throw new error_1.ValidationError('invalid bundle', invalidValues);
|
||||
}
|
||||
}
|
||||
exports.assertBundle = assertBundle;
|
||||
// Asserts that the given bundle conforms to the v0.1 bundle format.
|
||||
function assertBundleV01(b) {
|
||||
const invalidValues = [];
|
||||
invalidValues.push(...validateBundleBase(b));
|
||||
invalidValues.push(...validateInclusionPromise(b));
|
||||
if (invalidValues.length > 0) {
|
||||
throw new error_1.ValidationError('invalid v0.1 bundle', invalidValues);
|
||||
}
|
||||
}
|
||||
exports.assertBundleV01 = assertBundleV01;
|
||||
// Type guard to determine if Bundle is a v0.1 bundle.
|
||||
function isBundleV01(b) {
|
||||
try {
|
||||
assertBundleV01(b);
|
||||
return true;
|
||||
}
|
||||
catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
exports.isBundleV01 = isBundleV01;
|
||||
// Asserts that the given bundle conforms to the v0.2 bundle format.
|
||||
function assertBundleV02(b) {
|
||||
const invalidValues = [];
|
||||
invalidValues.push(...validateBundleBase(b));
|
||||
invalidValues.push(...validateInclusionProof(b));
|
||||
if (invalidValues.length > 0) {
|
||||
throw new error_1.ValidationError('invalid v0.2 bundle', invalidValues);
|
||||
}
|
||||
}
|
||||
exports.assertBundleV02 = assertBundleV02;
|
||||
// Asserts that the given bundle conforms to the newest (0.3) bundle format.
|
||||
function assertBundleLatest(b) {
|
||||
const invalidValues = [];
|
||||
invalidValues.push(...validateBundleBase(b));
|
||||
invalidValues.push(...validateInclusionProof(b));
|
||||
invalidValues.push(...validateNoCertificateChain(b));
|
||||
if (invalidValues.length > 0) {
|
||||
throw new error_1.ValidationError('invalid bundle', invalidValues);
|
||||
}
|
||||
}
|
||||
exports.assertBundleLatest = assertBundleLatest;
|
||||
function validateBundleBase(b) {
|
||||
const invalidValues = [];
|
||||
// Media type validation
|
||||
if (b.mediaType === undefined ||
|
||||
(!b.mediaType.match(/^application\/vnd\.dev\.sigstore\.bundle\+json;version=\d\.\d/) &&
|
||||
!b.mediaType.match(/^application\/vnd\.dev\.sigstore\.bundle\.v\d\.\d\+json/))) {
|
||||
invalidValues.push('mediaType');
|
||||
}
|
||||
// Content-related validation
|
||||
if (b.content === undefined) {
|
||||
invalidValues.push('content');
|
||||
}
|
||||
else {
|
||||
switch (b.content.$case) {
|
||||
case 'messageSignature':
|
||||
if (b.content.messageSignature.messageDigest === undefined) {
|
||||
invalidValues.push('content.messageSignature.messageDigest');
|
||||
}
|
||||
else {
|
||||
if (b.content.messageSignature.messageDigest.digest.length === 0) {
|
||||
invalidValues.push('content.messageSignature.messageDigest.digest');
|
||||
}
|
||||
}
|
||||
if (b.content.messageSignature.signature.length === 0) {
|
||||
invalidValues.push('content.messageSignature.signature');
|
||||
}
|
||||
break;
|
||||
case 'dsseEnvelope':
|
||||
if (b.content.dsseEnvelope.payload.length === 0) {
|
||||
invalidValues.push('content.dsseEnvelope.payload');
|
||||
}
|
||||
if (b.content.dsseEnvelope.signatures.length !== 1) {
|
||||
invalidValues.push('content.dsseEnvelope.signatures');
|
||||
}
|
||||
else {
|
||||
if (b.content.dsseEnvelope.signatures[0].sig.length === 0) {
|
||||
invalidValues.push('content.dsseEnvelope.signatures[0].sig');
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Verification material-related validation
|
||||
if (b.verificationMaterial === undefined) {
|
||||
invalidValues.push('verificationMaterial');
|
||||
}
|
||||
else {
|
||||
if (b.verificationMaterial.content === undefined) {
|
||||
invalidValues.push('verificationMaterial.content');
|
||||
}
|
||||
else {
|
||||
switch (b.verificationMaterial.content.$case) {
|
||||
case 'x509CertificateChain':
|
||||
if (b.verificationMaterial.content.x509CertificateChain.certificates
|
||||
.length === 0) {
|
||||
invalidValues.push('verificationMaterial.content.x509CertificateChain.certificates');
|
||||
}
|
||||
b.verificationMaterial.content.x509CertificateChain.certificates.forEach((cert, i) => {
|
||||
if (cert.rawBytes.length === 0) {
|
||||
invalidValues.push(`verificationMaterial.content.x509CertificateChain.certificates[${i}].rawBytes`);
|
||||
}
|
||||
});
|
||||
break;
|
||||
case 'certificate':
|
||||
if (b.verificationMaterial.content.certificate.rawBytes.length === 0) {
|
||||
invalidValues.push('verificationMaterial.content.certificate.rawBytes');
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (b.verificationMaterial.tlogEntries === undefined) {
|
||||
invalidValues.push('verificationMaterial.tlogEntries');
|
||||
}
|
||||
else {
|
||||
if (b.verificationMaterial.tlogEntries.length > 0) {
|
||||
b.verificationMaterial.tlogEntries.forEach((entry, i) => {
|
||||
if (entry.logId === undefined) {
|
||||
invalidValues.push(`verificationMaterial.tlogEntries[${i}].logId`);
|
||||
}
|
||||
if (entry.kindVersion === undefined) {
|
||||
invalidValues.push(`verificationMaterial.tlogEntries[${i}].kindVersion`);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
return invalidValues;
|
||||
}
|
||||
// Necessary for V01 bundles
|
||||
function validateInclusionPromise(b) {
|
||||
const invalidValues = [];
|
||||
if (b.verificationMaterial &&
|
||||
b.verificationMaterial.tlogEntries?.length > 0) {
|
||||
b.verificationMaterial.tlogEntries.forEach((entry, i) => {
|
||||
if (entry.inclusionPromise === undefined) {
|
||||
invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionPromise`);
|
||||
}
|
||||
});
|
||||
}
|
||||
return invalidValues;
|
||||
}
|
||||
// Necessary for V02 and later bundles
|
||||
function validateInclusionProof(b) {
|
||||
const invalidValues = [];
|
||||
if (b.verificationMaterial &&
|
||||
b.verificationMaterial.tlogEntries?.length > 0) {
|
||||
b.verificationMaterial.tlogEntries.forEach((entry, i) => {
|
||||
if (entry.inclusionProof === undefined) {
|
||||
invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof`);
|
||||
}
|
||||
else {
|
||||
if (entry.inclusionProof.checkpoint === undefined) {
|
||||
invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof.checkpoint`);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
return invalidValues;
|
||||
}
|
||||
// Necessary for V03 and later bundles
|
||||
function validateNoCertificateChain(b) {
|
||||
const invalidValues = [];
|
||||
if (b.verificationMaterial?.content?.$case === 'x509CertificateChain') {
|
||||
invalidValues.push('verificationMaterial.content.$case');
|
||||
}
|
||||
return invalidValues;
|
||||
}
|
||||
35
node_modules/@sigstore/bundle/package.json
generated
vendored
Normal file
35
node_modules/@sigstore/bundle/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"name": "@sigstore/bundle",
|
||||
"version": "2.3.2",
|
||||
"description": "Sigstore bundle type",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"scripts": {
|
||||
"clean": "shx rm -rf dist *.tsbuildinfo",
|
||||
"build": "tsc --build",
|
||||
"test": "jest"
|
||||
},
|
||||
"files": [
|
||||
"dist",
|
||||
"store"
|
||||
],
|
||||
"author": "bdehamer@github.com",
|
||||
"license": "Apache-2.0",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/sigstore/sigstore-js.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/sigstore/sigstore-js/issues"
|
||||
},
|
||||
"homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/bundle#readme",
|
||||
"publishConfig": {
|
||||
"provenance": true
|
||||
},
|
||||
"dependencies": {
|
||||
"@sigstore/protobuf-specs": "^0.3.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^16.14.0 || >=18.0.0"
|
||||
}
|
||||
}
|
||||
202
node_modules/@sigstore/core/LICENSE
generated
vendored
Normal file
202
node_modules/@sigstore/core/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,202 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2023 The Sigstore Authors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
10
node_modules/@sigstore/core/README.md
generated
vendored
Normal file
10
node_modules/@sigstore/core/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
# @sigstore/core · [](https://www.npmjs.com/package/@sigstore/core) [](https://github.com/sigstore/sigstore-js/actions/workflows/ci.yml) [](https://github.com/sigstore/sigstore-js/actions/workflows/smoke-test.yml)
|
||||
|
||||
Base library for [Sigstore][1] JavaScript packages.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Node.js version >= 16.14.0
|
||||
|
||||
|
||||
[1]: https://www.sigstore.dev
|
||||
4
node_modules/@sigstore/core/dist/asn1/error.d.ts
generated
vendored
Normal file
4
node_modules/@sigstore/core/dist/asn1/error.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
export declare class ASN1ParseError extends Error {
|
||||
}
|
||||
export declare class ASN1TypeError extends Error {
|
||||
}
|
||||
24
node_modules/@sigstore/core/dist/asn1/error.js
generated
vendored
Normal file
24
node_modules/@sigstore/core/dist/asn1/error.js
generated
vendored
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ASN1TypeError = exports.ASN1ParseError = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
class ASN1ParseError extends Error {
|
||||
}
|
||||
exports.ASN1ParseError = ASN1ParseError;
|
||||
class ASN1TypeError extends Error {
|
||||
}
|
||||
exports.ASN1TypeError = ASN1TypeError;
|
||||
1
node_modules/@sigstore/core/dist/asn1/index.d.ts
generated
vendored
Normal file
1
node_modules/@sigstore/core/dist/asn1/index.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
export { ASN1Obj } from './obj';
|
||||
20
node_modules/@sigstore/core/dist/asn1/index.js
generated
vendored
Normal file
20
node_modules/@sigstore/core/dist/asn1/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ASN1Obj = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
var obj_1 = require("./obj");
|
||||
Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return obj_1.ASN1Obj; } });
|
||||
4
node_modules/@sigstore/core/dist/asn1/length.d.ts
generated
vendored
Normal file
4
node_modules/@sigstore/core/dist/asn1/length.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
/// <reference types="node" />
|
||||
import { ByteStream } from '../stream';
|
||||
export declare function decodeLength(stream: ByteStream): number;
|
||||
export declare function encodeLength(len: number): Buffer;
|
||||
63
node_modules/@sigstore/core/dist/asn1/length.js
generated
vendored
Normal file
63
node_modules/@sigstore/core/dist/asn1/length.js
generated
vendored
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
"use strict";
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.encodeLength = exports.decodeLength = void 0;
|
||||
const error_1 = require("./error");
|
||||
// Decodes the length of a DER-encoded ANS.1 element from the supplied stream.
|
||||
// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-length-and-value-bytes
|
||||
function decodeLength(stream) {
|
||||
const buf = stream.getUint8();
|
||||
// If the most significant bit is UNSET the length is just the value of the
|
||||
// byte.
|
||||
if ((buf & 0x80) === 0x00) {
|
||||
return buf;
|
||||
}
|
||||
// Otherwise, the lower 7 bits of the first byte indicate the number of bytes
|
||||
// that follow to encode the length.
|
||||
const byteCount = buf & 0x7f;
|
||||
// Ensure the encoded length can safely fit in a JS number.
|
||||
if (byteCount > 6) {
|
||||
throw new error_1.ASN1ParseError('length exceeds 6 byte limit');
|
||||
}
|
||||
// Iterate over the bytes that encode the length.
|
||||
let len = 0;
|
||||
for (let i = 0; i < byteCount; i++) {
|
||||
len = len * 256 + stream.getUint8();
|
||||
}
|
||||
// This is a valid ASN.1 length encoding, but we don't support it.
|
||||
if (len === 0) {
|
||||
throw new error_1.ASN1ParseError('indefinite length encoding not supported');
|
||||
}
|
||||
return len;
|
||||
}
|
||||
exports.decodeLength = decodeLength;
|
||||
// Translates the supplied value to a DER-encoded length.
|
||||
function encodeLength(len) {
|
||||
if (len < 128) {
|
||||
return Buffer.from([len]);
|
||||
}
|
||||
// Bitwise operations on large numbers are not supported in JS, so we need to
|
||||
// use BigInts.
|
||||
let val = BigInt(len);
|
||||
const bytes = [];
|
||||
while (val > 0n) {
|
||||
bytes.unshift(Number(val & 255n));
|
||||
val = val >> 8n;
|
||||
}
|
||||
return Buffer.from([0x80 | bytes.length, ...bytes]);
|
||||
}
|
||||
exports.encodeLength = encodeLength;
|
||||
15
node_modules/@sigstore/core/dist/asn1/obj.d.ts
generated
vendored
Normal file
15
node_modules/@sigstore/core/dist/asn1/obj.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
/// <reference types="node" />
|
||||
import { ASN1Tag } from './tag';
|
||||
export declare class ASN1Obj {
|
||||
readonly tag: ASN1Tag;
|
||||
readonly subs: ASN1Obj[];
|
||||
readonly value: Buffer;
|
||||
constructor(tag: ASN1Tag, value: Buffer, subs: ASN1Obj[]);
|
||||
static parseBuffer(buf: Buffer): ASN1Obj;
|
||||
toDER(): Buffer;
|
||||
toBoolean(): boolean;
|
||||
toInteger(): bigint;
|
||||
toOID(): string;
|
||||
toDate(): Date;
|
||||
toBitString(): number[];
|
||||
}
|
||||
152
node_modules/@sigstore/core/dist/asn1/obj.js
generated
vendored
Normal file
152
node_modules/@sigstore/core/dist/asn1/obj.js
generated
vendored
Normal file
|
|
@ -0,0 +1,152 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ASN1Obj = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
const stream_1 = require("../stream");
|
||||
const error_1 = require("./error");
|
||||
const length_1 = require("./length");
|
||||
const parse_1 = require("./parse");
|
||||
const tag_1 = require("./tag");
|
||||
class ASN1Obj {
|
||||
constructor(tag, value, subs) {
|
||||
this.tag = tag;
|
||||
this.value = value;
|
||||
this.subs = subs;
|
||||
}
|
||||
// Constructs an ASN.1 object from a Buffer of DER-encoded bytes.
|
||||
static parseBuffer(buf) {
|
||||
return parseStream(new stream_1.ByteStream(buf));
|
||||
}
|
||||
toDER() {
|
||||
const valueStream = new stream_1.ByteStream();
|
||||
if (this.subs.length > 0) {
|
||||
for (const sub of this.subs) {
|
||||
valueStream.appendView(sub.toDER());
|
||||
}
|
||||
}
|
||||
else {
|
||||
valueStream.appendView(this.value);
|
||||
}
|
||||
const value = valueStream.buffer;
|
||||
// Concat tag/length/value
|
||||
const obj = new stream_1.ByteStream();
|
||||
obj.appendChar(this.tag.toDER());
|
||||
obj.appendView((0, length_1.encodeLength)(value.length));
|
||||
obj.appendView(value);
|
||||
return obj.buffer;
|
||||
}
|
||||
/////////////////////////////////////////////////////////////////////////////
|
||||
// Convenience methods for parsing ASN.1 primitives into JS types
|
||||
// Returns the ASN.1 object's value as a boolean. Throws an error if the
|
||||
// object is not a boolean.
|
||||
toBoolean() {
|
||||
if (!this.tag.isBoolean()) {
|
||||
throw new error_1.ASN1TypeError('not a boolean');
|
||||
}
|
||||
return (0, parse_1.parseBoolean)(this.value);
|
||||
}
|
||||
// Returns the ASN.1 object's value as a BigInt. Throws an error if the
|
||||
// object is not an integer.
|
||||
toInteger() {
|
||||
if (!this.tag.isInteger()) {
|
||||
throw new error_1.ASN1TypeError('not an integer');
|
||||
}
|
||||
return (0, parse_1.parseInteger)(this.value);
|
||||
}
|
||||
// Returns the ASN.1 object's value as an OID string. Throws an error if the
|
||||
// object is not an OID.
|
||||
toOID() {
|
||||
if (!this.tag.isOID()) {
|
||||
throw new error_1.ASN1TypeError('not an OID');
|
||||
}
|
||||
return (0, parse_1.parseOID)(this.value);
|
||||
}
|
||||
// Returns the ASN.1 object's value as a Date. Throws an error if the object
|
||||
// is not either a UTCTime or a GeneralizedTime.
|
||||
toDate() {
|
||||
switch (true) {
|
||||
case this.tag.isUTCTime():
|
||||
return (0, parse_1.parseTime)(this.value, true);
|
||||
case this.tag.isGeneralizedTime():
|
||||
return (0, parse_1.parseTime)(this.value, false);
|
||||
default:
|
||||
throw new error_1.ASN1TypeError('not a date');
|
||||
}
|
||||
}
|
||||
// Returns the ASN.1 object's value as a number[] where each number is the
|
||||
// value of a bit in the bit string. Throws an error if the object is not a
|
||||
// bit string.
|
||||
toBitString() {
|
||||
if (!this.tag.isBitString()) {
|
||||
throw new error_1.ASN1TypeError('not a bit string');
|
||||
}
|
||||
return (0, parse_1.parseBitString)(this.value);
|
||||
}
|
||||
}
|
||||
exports.ASN1Obj = ASN1Obj;
|
||||
/////////////////////////////////////////////////////////////////////////////
|
||||
// Internal stream parsing functions
|
||||
function parseStream(stream) {
|
||||
// Parse tag, length, and value from stream
|
||||
const tag = new tag_1.ASN1Tag(stream.getUint8());
|
||||
const len = (0, length_1.decodeLength)(stream);
|
||||
const value = stream.slice(stream.position, len);
|
||||
const start = stream.position;
|
||||
let subs = [];
|
||||
// If the object is constructed, parse its children. Sometimes, children
|
||||
// are embedded in OCTESTRING objects, so we need to check those
|
||||
// for children as well.
|
||||
if (tag.constructed) {
|
||||
subs = collectSubs(stream, len);
|
||||
}
|
||||
else if (tag.isOctetString()) {
|
||||
// Attempt to parse children of OCTETSTRING objects. If anything fails,
|
||||
// assume the object is not constructed and treat as primitive.
|
||||
try {
|
||||
subs = collectSubs(stream, len);
|
||||
}
|
||||
catch (e) {
|
||||
// Fail silently and treat as primitive
|
||||
}
|
||||
}
|
||||
// If there are no children, move stream cursor to the end of the object
|
||||
if (subs.length === 0) {
|
||||
stream.seek(start + len);
|
||||
}
|
||||
return new ASN1Obj(tag, value, subs);
|
||||
}
|
||||
function collectSubs(stream, len) {
|
||||
// Calculate end of object content
|
||||
const end = stream.position + len;
|
||||
// Make sure there are enough bytes left in the stream. This should never
|
||||
// happen, cause it'll get caught when the stream is sliced in parseStream.
|
||||
// Leaving as an extra check just in case.
|
||||
/* istanbul ignore if */
|
||||
if (end > stream.length) {
|
||||
throw new error_1.ASN1ParseError('invalid length');
|
||||
}
|
||||
// Parse all children
|
||||
const subs = [];
|
||||
while (stream.position < end) {
|
||||
subs.push(parseStream(stream));
|
||||
}
|
||||
// When we're done parsing children, we should be at the end of the object
|
||||
if (stream.position !== end) {
|
||||
throw new error_1.ASN1ParseError('invalid length');
|
||||
}
|
||||
return subs;
|
||||
}
|
||||
7
node_modules/@sigstore/core/dist/asn1/parse.d.ts
generated
vendored
Normal file
7
node_modules/@sigstore/core/dist/asn1/parse.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
/// <reference types="node" />
|
||||
export declare function parseInteger(buf: Buffer): bigint;
|
||||
export declare function parseStringASCII(buf: Buffer): string;
|
||||
export declare function parseTime(buf: Buffer, shortYear: boolean): Date;
|
||||
export declare function parseOID(buf: Buffer): string;
|
||||
export declare function parseBoolean(buf: Buffer): boolean;
|
||||
export declare function parseBitString(buf: Buffer): number[];
|
||||
125
node_modules/@sigstore/core/dist/asn1/parse.js
generated
vendored
Normal file
125
node_modules/@sigstore/core/dist/asn1/parse.js
generated
vendored
Normal file
|
|
@ -0,0 +1,125 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.parseBitString = exports.parseBoolean = exports.parseOID = exports.parseTime = exports.parseStringASCII = exports.parseInteger = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
const RE_TIME_SHORT_YEAR = /^(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\.\d{3})?Z$/;
|
||||
const RE_TIME_LONG_YEAR = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\.\d{3})?Z$/;
|
||||
// Parse a BigInt from the DER-encoded buffer
|
||||
// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-integer
|
||||
function parseInteger(buf) {
|
||||
let pos = 0;
|
||||
const end = buf.length;
|
||||
let val = buf[pos];
|
||||
const neg = val > 0x7f;
|
||||
// Consume any padding bytes
|
||||
const pad = neg ? 0xff : 0x00;
|
||||
while (val == pad && ++pos < end) {
|
||||
val = buf[pos];
|
||||
}
|
||||
// Calculate remaining bytes to read
|
||||
const len = end - pos;
|
||||
if (len === 0)
|
||||
return BigInt(neg ? -1 : 0);
|
||||
// Handle two's complement for negative numbers
|
||||
val = neg ? val - 256 : val;
|
||||
// Parse remaining bytes
|
||||
let n = BigInt(val);
|
||||
for (let i = pos + 1; i < end; ++i) {
|
||||
n = n * BigInt(256) + BigInt(buf[i]);
|
||||
}
|
||||
return n;
|
||||
}
|
||||
exports.parseInteger = parseInteger;
|
||||
// Parse an ASCII string from the DER-encoded buffer
|
||||
// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean
|
||||
function parseStringASCII(buf) {
|
||||
return buf.toString('ascii');
|
||||
}
|
||||
exports.parseStringASCII = parseStringASCII;
|
||||
// Parse a Date from the DER-encoded buffer
|
||||
// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5.1
|
||||
function parseTime(buf, shortYear) {
|
||||
const timeStr = parseStringASCII(buf);
|
||||
// Parse the time string into matches - captured groups start at index 1
|
||||
const m = shortYear
|
||||
? RE_TIME_SHORT_YEAR.exec(timeStr)
|
||||
: RE_TIME_LONG_YEAR.exec(timeStr);
|
||||
if (!m) {
|
||||
throw new Error('invalid time');
|
||||
}
|
||||
// Translate dates with a 2-digit year to 4 digits per the spec
|
||||
if (shortYear) {
|
||||
let year = Number(m[1]);
|
||||
year += year >= 50 ? 1900 : 2000;
|
||||
m[1] = year.toString();
|
||||
}
|
||||
// Translate to ISO8601 format and parse
|
||||
return new Date(`${m[1]}-${m[2]}-${m[3]}T${m[4]}:${m[5]}:${m[6]}Z`);
|
||||
}
|
||||
exports.parseTime = parseTime;
|
||||
// Parse an OID from the DER-encoded buffer
|
||||
// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-object-identifier
|
||||
function parseOID(buf) {
|
||||
let pos = 0;
|
||||
const end = buf.length;
|
||||
// Consume first byte which encodes the first two OID components
|
||||
let n = buf[pos++];
|
||||
const first = Math.floor(n / 40);
|
||||
const second = n % 40;
|
||||
let oid = `${first}.${second}`;
|
||||
// Consume remaining bytes
|
||||
let val = 0;
|
||||
for (; pos < end; ++pos) {
|
||||
n = buf[pos];
|
||||
val = (val << 7) + (n & 0x7f);
|
||||
// If the left-most bit is NOT set, then this is the last byte in the
|
||||
// sequence and we can add the value to the OID and reset the accumulator
|
||||
if ((n & 0x80) === 0) {
|
||||
oid += `.${val}`;
|
||||
val = 0;
|
||||
}
|
||||
}
|
||||
return oid;
|
||||
}
|
||||
exports.parseOID = parseOID;
|
||||
// Parse a boolean from the DER-encoded buffer
|
||||
// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean
|
||||
function parseBoolean(buf) {
|
||||
return buf[0] !== 0;
|
||||
}
|
||||
exports.parseBoolean = parseBoolean;
|
||||
// Parse a bit string from the DER-encoded buffer
|
||||
// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-bit-string
|
||||
function parseBitString(buf) {
|
||||
// First byte tell us how many unused bits are in the last byte
|
||||
const unused = buf[0];
|
||||
const start = 1;
|
||||
const end = buf.length;
|
||||
const bits = [];
|
||||
for (let i = start; i < end; ++i) {
|
||||
const byte = buf[i];
|
||||
// The skip value is only used for the last byte
|
||||
const skip = i === end - 1 ? unused : 0;
|
||||
// Iterate over each bit in the byte (most significant first)
|
||||
for (let j = 7; j >= skip; --j) {
|
||||
// Read the bit and add it to the bit string
|
||||
bits.push((byte >> j) & 0x01);
|
||||
}
|
||||
}
|
||||
return bits;
|
||||
}
|
||||
exports.parseBitString = parseBitString;
|
||||
16
node_modules/@sigstore/core/dist/asn1/tag.d.ts
generated
vendored
Normal file
16
node_modules/@sigstore/core/dist/asn1/tag.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
export declare class ASN1Tag {
|
||||
readonly number: number;
|
||||
readonly constructed: boolean;
|
||||
readonly class: number;
|
||||
constructor(enc: number);
|
||||
isUniversal(): boolean;
|
||||
isContextSpecific(num?: number): boolean;
|
||||
isBoolean(): boolean;
|
||||
isInteger(): boolean;
|
||||
isBitString(): boolean;
|
||||
isOctetString(): boolean;
|
||||
isOID(): boolean;
|
||||
isUTCTime(): boolean;
|
||||
isGeneralizedTime(): boolean;
|
||||
toDER(): number;
|
||||
}
|
||||
86
node_modules/@sigstore/core/dist/asn1/tag.js
generated
vendored
Normal file
86
node_modules/@sigstore/core/dist/asn1/tag.js
generated
vendored
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ASN1Tag = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
const error_1 = require("./error");
|
||||
const UNIVERSAL_TAG = {
|
||||
BOOLEAN: 0x01,
|
||||
INTEGER: 0x02,
|
||||
BIT_STRING: 0x03,
|
||||
OCTET_STRING: 0x04,
|
||||
OBJECT_IDENTIFIER: 0x06,
|
||||
SEQUENCE: 0x10,
|
||||
SET: 0x11,
|
||||
PRINTABLE_STRING: 0x13,
|
||||
UTC_TIME: 0x17,
|
||||
GENERALIZED_TIME: 0x18,
|
||||
};
|
||||
const TAG_CLASS = {
|
||||
UNIVERSAL: 0x00,
|
||||
APPLICATION: 0x01,
|
||||
CONTEXT_SPECIFIC: 0x02,
|
||||
PRIVATE: 0x03,
|
||||
};
|
||||
// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-tag-bytes
|
||||
class ASN1Tag {
|
||||
constructor(enc) {
|
||||
// Bits 0 through 4 are the tag number
|
||||
this.number = enc & 0x1f;
|
||||
// Bit 5 is the constructed bit
|
||||
this.constructed = (enc & 0x20) === 0x20;
|
||||
// Bit 6 & 7 are the class
|
||||
this.class = enc >> 6;
|
||||
if (this.number === 0x1f) {
|
||||
throw new error_1.ASN1ParseError('long form tags not supported');
|
||||
}
|
||||
if (this.class === TAG_CLASS.UNIVERSAL && this.number === 0x00) {
|
||||
throw new error_1.ASN1ParseError('unsupported tag 0x00');
|
||||
}
|
||||
}
|
||||
isUniversal() {
|
||||
return this.class === TAG_CLASS.UNIVERSAL;
|
||||
}
|
||||
isContextSpecific(num) {
|
||||
const res = this.class === TAG_CLASS.CONTEXT_SPECIFIC;
|
||||
return num !== undefined ? res && this.number === num : res;
|
||||
}
|
||||
isBoolean() {
|
||||
return this.isUniversal() && this.number === UNIVERSAL_TAG.BOOLEAN;
|
||||
}
|
||||
isInteger() {
|
||||
return this.isUniversal() && this.number === UNIVERSAL_TAG.INTEGER;
|
||||
}
|
||||
isBitString() {
|
||||
return this.isUniversal() && this.number === UNIVERSAL_TAG.BIT_STRING;
|
||||
}
|
||||
isOctetString() {
|
||||
return this.isUniversal() && this.number === UNIVERSAL_TAG.OCTET_STRING;
|
||||
}
|
||||
isOID() {
|
||||
return (this.isUniversal() && this.number === UNIVERSAL_TAG.OBJECT_IDENTIFIER);
|
||||
}
|
||||
isUTCTime() {
|
||||
return this.isUniversal() && this.number === UNIVERSAL_TAG.UTC_TIME;
|
||||
}
|
||||
isGeneralizedTime() {
|
||||
return this.isUniversal() && this.number === UNIVERSAL_TAG.GENERALIZED_TIME;
|
||||
}
|
||||
toDER() {
|
||||
return this.number | (this.constructed ? 0x20 : 0x00) | (this.class << 6);
|
||||
}
|
||||
}
|
||||
exports.ASN1Tag = ASN1Tag;
|
||||
9
node_modules/@sigstore/core/dist/crypto.d.ts
generated
vendored
Normal file
9
node_modules/@sigstore/core/dist/crypto.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
/// <reference types="node" />
|
||||
/// <reference types="node" />
|
||||
import crypto, { BinaryLike } from 'crypto';
|
||||
export type { KeyObject } from 'crypto';
|
||||
export declare function createPublicKey(key: string | Buffer, type?: 'spki' | 'pkcs1'): crypto.KeyObject;
|
||||
export declare function digest(algorithm: string, ...data: BinaryLike[]): Buffer;
|
||||
export declare function hash(...data: BinaryLike[]): Buffer;
|
||||
export declare function verify(data: Buffer, key: crypto.KeyLike, signature: Buffer, algorithm?: string): boolean;
|
||||
export declare function bufferEqual(a: Buffer, b: Buffer): boolean;
|
||||
71
node_modules/@sigstore/core/dist/crypto.js
generated
vendored
Normal file
71
node_modules/@sigstore/core/dist/crypto.js
generated
vendored
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.bufferEqual = exports.verify = exports.hash = exports.digest = exports.createPublicKey = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
const crypto_1 = __importDefault(require("crypto"));
|
||||
const SHA256_ALGORITHM = 'sha256';
|
||||
function createPublicKey(key, type = 'spki') {
|
||||
if (typeof key === 'string') {
|
||||
return crypto_1.default.createPublicKey(key);
|
||||
}
|
||||
else {
|
||||
return crypto_1.default.createPublicKey({ key, format: 'der', type: type });
|
||||
}
|
||||
}
|
||||
exports.createPublicKey = createPublicKey;
|
||||
function digest(algorithm, ...data) {
|
||||
const hash = crypto_1.default.createHash(algorithm);
|
||||
for (const d of data) {
|
||||
hash.update(d);
|
||||
}
|
||||
return hash.digest();
|
||||
}
|
||||
exports.digest = digest;
|
||||
// TODO: deprecate this in favor of digest()
|
||||
function hash(...data) {
|
||||
const hash = crypto_1.default.createHash(SHA256_ALGORITHM);
|
||||
for (const d of data) {
|
||||
hash.update(d);
|
||||
}
|
||||
return hash.digest();
|
||||
}
|
||||
exports.hash = hash;
|
||||
function verify(data, key, signature, algorithm) {
|
||||
// The try/catch is to work around an issue in Node 14.x where verify throws
|
||||
// an error in some scenarios if the signature is invalid.
|
||||
try {
|
||||
return crypto_1.default.verify(algorithm, data, key, signature);
|
||||
}
|
||||
catch (e) {
|
||||
/* istanbul ignore next */
|
||||
return false;
|
||||
}
|
||||
}
|
||||
exports.verify = verify;
|
||||
function bufferEqual(a, b) {
|
||||
try {
|
||||
return crypto_1.default.timingSafeEqual(a, b);
|
||||
}
|
||||
catch {
|
||||
/* istanbul ignore next */
|
||||
return false;
|
||||
}
|
||||
}
|
||||
exports.bufferEqual = bufferEqual;
|
||||
2
node_modules/@sigstore/core/dist/dsse.d.ts
generated
vendored
Normal file
2
node_modules/@sigstore/core/dist/dsse.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
/// <reference types="node" />
|
||||
export declare function preAuthEncoding(payloadType: string, payload: Buffer): Buffer;
|
||||
31
node_modules/@sigstore/core/dist/dsse.js
generated
vendored
Normal file
31
node_modules/@sigstore/core/dist/dsse.js
generated
vendored
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.preAuthEncoding = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
const PAE_PREFIX = 'DSSEv1';
|
||||
// DSSE Pre-Authentication Encoding
|
||||
function preAuthEncoding(payloadType, payload) {
|
||||
const prefix = [
|
||||
PAE_PREFIX,
|
||||
payloadType.length,
|
||||
payloadType,
|
||||
payload.length,
|
||||
'',
|
||||
].join(' ');
|
||||
return Buffer.concat([Buffer.from(prefix, 'ascii'), payload]);
|
||||
}
|
||||
exports.preAuthEncoding = preAuthEncoding;
|
||||
2
node_modules/@sigstore/core/dist/encoding.d.ts
generated
vendored
Normal file
2
node_modules/@sigstore/core/dist/encoding.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
export declare function base64Encode(str: string): string;
|
||||
export declare function base64Decode(str: string): string;
|
||||
28
node_modules/@sigstore/core/dist/encoding.js
generated
vendored
Normal file
28
node_modules/@sigstore/core/dist/encoding.js
generated
vendored
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.base64Decode = exports.base64Encode = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
const BASE64_ENCODING = 'base64';
|
||||
const UTF8_ENCODING = 'utf-8';
|
||||
function base64Encode(str) {
|
||||
return Buffer.from(str, UTF8_ENCODING).toString(BASE64_ENCODING);
|
||||
}
|
||||
exports.base64Encode = base64Encode;
|
||||
function base64Decode(str) {
|
||||
return Buffer.from(str, BASE64_ENCODING).toString(UTF8_ENCODING);
|
||||
}
|
||||
exports.base64Decode = base64Decode;
|
||||
9
node_modules/@sigstore/core/dist/index.d.ts
generated
vendored
Normal file
9
node_modules/@sigstore/core/dist/index.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
export { ASN1Obj } from './asn1';
|
||||
export * as crypto from './crypto';
|
||||
export * as dsse from './dsse';
|
||||
export * as encoding from './encoding';
|
||||
export * as json from './json';
|
||||
export * as pem from './pem';
|
||||
export { RFC3161Timestamp } from './rfc3161';
|
||||
export { ByteStream } from './stream';
|
||||
export { EXTENSION_OID_SCT, X509Certificate, X509SCTExtension } from './x509';
|
||||
56
node_modules/@sigstore/core/dist/index.js
generated
vendored
Normal file
56
node_modules/@sigstore/core/dist/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.X509SCTExtension = exports.X509Certificate = exports.EXTENSION_OID_SCT = exports.ByteStream = exports.RFC3161Timestamp = exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.ASN1Obj = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
var asn1_1 = require("./asn1");
|
||||
Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return asn1_1.ASN1Obj; } });
|
||||
exports.crypto = __importStar(require("./crypto"));
|
||||
exports.dsse = __importStar(require("./dsse"));
|
||||
exports.encoding = __importStar(require("./encoding"));
|
||||
exports.json = __importStar(require("./json"));
|
||||
exports.pem = __importStar(require("./pem"));
|
||||
var rfc3161_1 = require("./rfc3161");
|
||||
Object.defineProperty(exports, "RFC3161Timestamp", { enumerable: true, get: function () { return rfc3161_1.RFC3161Timestamp; } });
|
||||
var stream_1 = require("./stream");
|
||||
Object.defineProperty(exports, "ByteStream", { enumerable: true, get: function () { return stream_1.ByteStream; } });
|
||||
var x509_1 = require("./x509");
|
||||
Object.defineProperty(exports, "EXTENSION_OID_SCT", { enumerable: true, get: function () { return x509_1.EXTENSION_OID_SCT; } });
|
||||
Object.defineProperty(exports, "X509Certificate", { enumerable: true, get: function () { return x509_1.X509Certificate; } });
|
||||
Object.defineProperty(exports, "X509SCTExtension", { enumerable: true, get: function () { return x509_1.X509SCTExtension; } });
|
||||
1
node_modules/@sigstore/core/dist/json.d.ts
generated
vendored
Normal file
1
node_modules/@sigstore/core/dist/json.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
export declare function canonicalize(object: any): string;
|
||||
61
node_modules/@sigstore/core/dist/json.js
generated
vendored
Normal file
61
node_modules/@sigstore/core/dist/json.js
generated
vendored
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
"use strict";
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.canonicalize = void 0;
|
||||
// JSON canonicalization per https://github.com/cyberphone/json-canonicalization
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function canonicalize(object) {
|
||||
let buffer = '';
|
||||
if (object === null || typeof object !== 'object' || object.toJSON != null) {
|
||||
// Primitives or toJSONable objects
|
||||
buffer += JSON.stringify(object);
|
||||
}
|
||||
else if (Array.isArray(object)) {
|
||||
// Array - maintain element order
|
||||
buffer += '[';
|
||||
let first = true;
|
||||
object.forEach((element) => {
|
||||
if (!first) {
|
||||
buffer += ',';
|
||||
}
|
||||
first = false;
|
||||
// recursive call
|
||||
buffer += canonicalize(element);
|
||||
});
|
||||
buffer += ']';
|
||||
}
|
||||
else {
|
||||
// Object - Sort properties before serializing
|
||||
buffer += '{';
|
||||
let first = true;
|
||||
Object.keys(object)
|
||||
.sort()
|
||||
.forEach((property) => {
|
||||
if (!first) {
|
||||
buffer += ',';
|
||||
}
|
||||
first = false;
|
||||
buffer += JSON.stringify(property);
|
||||
buffer += ':';
|
||||
// recursive call
|
||||
buffer += canonicalize(object[property]);
|
||||
});
|
||||
buffer += '}';
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
exports.canonicalize = canonicalize;
|
||||
2
node_modules/@sigstore/core/dist/oid.d.ts
generated
vendored
Normal file
2
node_modules/@sigstore/core/dist/oid.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
export declare const ECDSA_SIGNATURE_ALGOS: Record<string, string>;
|
||||
export declare const SHA2_HASH_ALGOS: Record<string, string>;
|
||||
14
node_modules/@sigstore/core/dist/oid.js
generated
vendored
Normal file
14
node_modules/@sigstore/core/dist/oid.js
generated
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.SHA2_HASH_ALGOS = exports.ECDSA_SIGNATURE_ALGOS = void 0;
|
||||
exports.ECDSA_SIGNATURE_ALGOS = {
|
||||
'1.2.840.10045.4.3.1': 'sha224',
|
||||
'1.2.840.10045.4.3.2': 'sha256',
|
||||
'1.2.840.10045.4.3.3': 'sha384',
|
||||
'1.2.840.10045.4.3.4': 'sha512',
|
||||
};
|
||||
exports.SHA2_HASH_ALGOS = {
|
||||
'2.16.840.1.101.3.4.2.1': 'sha256',
|
||||
'2.16.840.1.101.3.4.2.2': 'sha384',
|
||||
'2.16.840.1.101.3.4.2.3': 'sha512',
|
||||
};
|
||||
3
node_modules/@sigstore/core/dist/pem.d.ts
generated
vendored
Normal file
3
node_modules/@sigstore/core/dist/pem.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
/// <reference types="node" />
|
||||
export declare function toDER(certificate: string): Buffer;
|
||||
export declare function fromDER(certificate: Buffer, type?: string): string;
|
||||
44
node_modules/@sigstore/core/dist/pem.js
generated
vendored
Normal file
44
node_modules/@sigstore/core/dist/pem.js
generated
vendored
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.fromDER = exports.toDER = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
const PEM_HEADER = /-----BEGIN (.*)-----/;
|
||||
const PEM_FOOTER = /-----END (.*)-----/;
|
||||
function toDER(certificate) {
|
||||
let der = '';
|
||||
certificate.split('\n').forEach((line) => {
|
||||
if (line.match(PEM_HEADER) || line.match(PEM_FOOTER)) {
|
||||
return;
|
||||
}
|
||||
der += line;
|
||||
});
|
||||
return Buffer.from(der, 'base64');
|
||||
}
|
||||
exports.toDER = toDER;
|
||||
// Translates a DER-encoded buffer into a PEM-encoded string. Standard PEM
|
||||
// encoding dictates that each certificate should have a trailing newline after
|
||||
// the footer.
|
||||
function fromDER(certificate, type = 'CERTIFICATE') {
|
||||
// Base64-encode the certificate.
|
||||
const der = certificate.toString('base64');
|
||||
// Split the certificate into lines of 64 characters.
|
||||
const lines = der.match(/.{1,64}/g) || '';
|
||||
return [`-----BEGIN ${type}-----`, ...lines, `-----END ${type}-----`]
|
||||
.join('\n')
|
||||
.concat('\n');
|
||||
}
|
||||
exports.fromDER = fromDER;
|
||||
2
node_modules/@sigstore/core/dist/rfc3161/error.d.ts
generated
vendored
Normal file
2
node_modules/@sigstore/core/dist/rfc3161/error.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
export declare class RFC3161TimestampVerificationError extends Error {
|
||||
}
|
||||
21
node_modules/@sigstore/core/dist/rfc3161/error.js
generated
vendored
Normal file
21
node_modules/@sigstore/core/dist/rfc3161/error.js
generated
vendored
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.RFC3161TimestampVerificationError = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
class RFC3161TimestampVerificationError extends Error {
|
||||
}
|
||||
exports.RFC3161TimestampVerificationError = RFC3161TimestampVerificationError;
|
||||
1
node_modules/@sigstore/core/dist/rfc3161/index.d.ts
generated
vendored
Normal file
1
node_modules/@sigstore/core/dist/rfc3161/index.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
export { RFC3161Timestamp } from './timestamp';
|
||||
20
node_modules/@sigstore/core/dist/rfc3161/index.js
generated
vendored
Normal file
20
node_modules/@sigstore/core/dist/rfc3161/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
"use strict";
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.RFC3161Timestamp = void 0;
|
||||
var timestamp_1 = require("./timestamp");
|
||||
Object.defineProperty(exports, "RFC3161Timestamp", { enumerable: true, get: function () { return timestamp_1.RFC3161Timestamp; } });
|
||||
38
node_modules/@sigstore/core/dist/rfc3161/timestamp.d.ts
generated
vendored
Normal file
38
node_modules/@sigstore/core/dist/rfc3161/timestamp.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
/// <reference types="node" />
|
||||
/// <reference types="node" />
|
||||
import { ASN1Obj } from '../asn1';
|
||||
import * as crypto from '../crypto';
|
||||
import { TSTInfo } from './tstinfo';
|
||||
export declare class RFC3161Timestamp {
|
||||
root: ASN1Obj;
|
||||
constructor(asn1: ASN1Obj);
|
||||
static parse(der: Buffer): RFC3161Timestamp;
|
||||
get status(): bigint;
|
||||
get contentType(): string;
|
||||
get eContentType(): string;
|
||||
get signingTime(): Date;
|
||||
get signerIssuer(): Buffer;
|
||||
get signerSerialNumber(): Buffer;
|
||||
get signerDigestAlgorithm(): string;
|
||||
get signatureAlgorithm(): string;
|
||||
get signatureValue(): Buffer;
|
||||
get tstInfo(): TSTInfo;
|
||||
verify(data: Buffer, publicKey: crypto.KeyObject): void;
|
||||
private verifyMessageDigest;
|
||||
private verifySignature;
|
||||
private get pkiStatusInfoObj();
|
||||
private get timeStampTokenObj();
|
||||
private get contentTypeObj();
|
||||
private get signedDataObj();
|
||||
private get encapContentInfoObj();
|
||||
private get signerInfosObj();
|
||||
private get signerInfoObj();
|
||||
private get eContentTypeObj();
|
||||
private get eContentObj();
|
||||
private get signedAttrsObj();
|
||||
private get messageDigestAttributeObj();
|
||||
private get signerSidObj();
|
||||
private get signerDigestAlgorithmObj();
|
||||
private get signatureAlgorithmObj();
|
||||
private get signatureValueObj();
|
||||
}
|
||||
201
node_modules/@sigstore/core/dist/rfc3161/timestamp.js
generated
vendored
Normal file
201
node_modules/@sigstore/core/dist/rfc3161/timestamp.js
generated
vendored
Normal file
|
|
@ -0,0 +1,201 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.RFC3161Timestamp = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
const asn1_1 = require("../asn1");
|
||||
const crypto = __importStar(require("../crypto"));
|
||||
const oid_1 = require("../oid");
|
||||
const error_1 = require("./error");
|
||||
const tstinfo_1 = require("./tstinfo");
|
||||
const OID_PKCS9_CONTENT_TYPE_SIGNED_DATA = '1.2.840.113549.1.7.2';
|
||||
const OID_PKCS9_CONTENT_TYPE_TSTINFO = '1.2.840.113549.1.9.16.1.4';
|
||||
const OID_PKCS9_MESSAGE_DIGEST_KEY = '1.2.840.113549.1.9.4';
|
||||
class RFC3161Timestamp {
|
||||
constructor(asn1) {
|
||||
this.root = asn1;
|
||||
}
|
||||
static parse(der) {
|
||||
const asn1 = asn1_1.ASN1Obj.parseBuffer(der);
|
||||
return new RFC3161Timestamp(asn1);
|
||||
}
|
||||
get status() {
|
||||
return this.pkiStatusInfoObj.subs[0].toInteger();
|
||||
}
|
||||
get contentType() {
|
||||
return this.contentTypeObj.toOID();
|
||||
}
|
||||
get eContentType() {
|
||||
return this.eContentTypeObj.toOID();
|
||||
}
|
||||
get signingTime() {
|
||||
return this.tstInfo.genTime;
|
||||
}
|
||||
get signerIssuer() {
|
||||
return this.signerSidObj.subs[0].value;
|
||||
}
|
||||
get signerSerialNumber() {
|
||||
return this.signerSidObj.subs[1].value;
|
||||
}
|
||||
get signerDigestAlgorithm() {
|
||||
const oid = this.signerDigestAlgorithmObj.subs[0].toOID();
|
||||
return oid_1.SHA2_HASH_ALGOS[oid];
|
||||
}
|
||||
get signatureAlgorithm() {
|
||||
const oid = this.signatureAlgorithmObj.subs[0].toOID();
|
||||
return oid_1.ECDSA_SIGNATURE_ALGOS[oid];
|
||||
}
|
||||
get signatureValue() {
|
||||
return this.signatureValueObj.value;
|
||||
}
|
||||
get tstInfo() {
|
||||
// Need to unpack tstInfo from an OCTET STRING
|
||||
return new tstinfo_1.TSTInfo(this.eContentObj.subs[0].subs[0]);
|
||||
}
|
||||
verify(data, publicKey) {
|
||||
if (!this.timeStampTokenObj) {
|
||||
throw new error_1.RFC3161TimestampVerificationError('timeStampToken is missing');
|
||||
}
|
||||
// Check for expected ContentInfo content type
|
||||
if (this.contentType !== OID_PKCS9_CONTENT_TYPE_SIGNED_DATA) {
|
||||
throw new error_1.RFC3161TimestampVerificationError(`incorrect content type: ${this.contentType}`);
|
||||
}
|
||||
// Check for expected encapsulated content type
|
||||
if (this.eContentType !== OID_PKCS9_CONTENT_TYPE_TSTINFO) {
|
||||
throw new error_1.RFC3161TimestampVerificationError(`incorrect encapsulated content type: ${this.eContentType}`);
|
||||
}
|
||||
// Check that the tstInfo references the correct artifact
|
||||
this.tstInfo.verify(data);
|
||||
// Check that the signed message digest matches the tstInfo
|
||||
this.verifyMessageDigest();
|
||||
// Check that the signature is valid for the signed attributes
|
||||
this.verifySignature(publicKey);
|
||||
}
|
||||
verifyMessageDigest() {
|
||||
// Check that the tstInfo matches the signed data
|
||||
const tstInfoDigest = crypto.digest(this.signerDigestAlgorithm, this.tstInfo.raw);
|
||||
const expectedDigest = this.messageDigestAttributeObj.subs[1].subs[0].value;
|
||||
if (!crypto.bufferEqual(tstInfoDigest, expectedDigest)) {
|
||||
throw new error_1.RFC3161TimestampVerificationError('signed data does not match tstInfo');
|
||||
}
|
||||
}
|
||||
verifySignature(key) {
|
||||
// Encode the signed attributes for verification
|
||||
const signedAttrs = this.signedAttrsObj.toDER();
|
||||
signedAttrs[0] = 0x31; // Change context-specific tag to SET
|
||||
// Check that the signature is valid for the signed attributes
|
||||
const verified = crypto.verify(signedAttrs, key, this.signatureValue, this.signatureAlgorithm);
|
||||
if (!verified) {
|
||||
throw new error_1.RFC3161TimestampVerificationError('signature verification failed');
|
||||
}
|
||||
}
|
||||
// https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2
|
||||
get pkiStatusInfoObj() {
|
||||
// pkiStatusInfo is the first element of the timestamp response sequence
|
||||
return this.root.subs[0];
|
||||
}
|
||||
// https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2
|
||||
get timeStampTokenObj() {
|
||||
// timeStampToken is the first element of the timestamp response sequence
|
||||
return this.root.subs[1];
|
||||
}
|
||||
// https://datatracker.ietf.org/doc/html/rfc5652#section-3
|
||||
get contentTypeObj() {
|
||||
return this.timeStampTokenObj.subs[0];
|
||||
}
|
||||
// https://www.rfc-editor.org/rfc/rfc5652#section-3
|
||||
get signedDataObj() {
|
||||
const obj = this.timeStampTokenObj.subs.find((sub) => sub.tag.isContextSpecific(0x00));
|
||||
return obj.subs[0];
|
||||
}
|
||||
// https://datatracker.ietf.org/doc/html/rfc5652#section-5.1
|
||||
get encapContentInfoObj() {
|
||||
return this.signedDataObj.subs[2];
|
||||
}
|
||||
// https://datatracker.ietf.org/doc/html/rfc5652#section-5.1
|
||||
get signerInfosObj() {
|
||||
// SignerInfos is the last element of the signed data sequence
|
||||
const sd = this.signedDataObj;
|
||||
return sd.subs[sd.subs.length - 1];
|
||||
}
|
||||
// https://www.rfc-editor.org/rfc/rfc5652#section-5.1
|
||||
get signerInfoObj() {
|
||||
// Only supporting one signer
|
||||
return this.signerInfosObj.subs[0];
|
||||
}
|
||||
// https://datatracker.ietf.org/doc/html/rfc5652#section-5.2
|
||||
get eContentTypeObj() {
|
||||
return this.encapContentInfoObj.subs[0];
|
||||
}
|
||||
// https://datatracker.ietf.org/doc/html/rfc5652#section-5.2
|
||||
get eContentObj() {
|
||||
return this.encapContentInfoObj.subs[1];
|
||||
}
|
||||
// https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
|
||||
get signedAttrsObj() {
|
||||
const signedAttrs = this.signerInfoObj.subs.find((sub) => sub.tag.isContextSpecific(0x00));
|
||||
return signedAttrs;
|
||||
}
|
||||
// https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
|
||||
get messageDigestAttributeObj() {
|
||||
const messageDigest = this.signedAttrsObj.subs.find((sub) => sub.subs[0].tag.isOID() &&
|
||||
sub.subs[0].toOID() === OID_PKCS9_MESSAGE_DIGEST_KEY);
|
||||
return messageDigest;
|
||||
}
|
||||
// https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
|
||||
get signerSidObj() {
|
||||
return this.signerInfoObj.subs[1];
|
||||
}
|
||||
// https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
|
||||
get signerDigestAlgorithmObj() {
|
||||
// Signature is the 2nd element of the signerInfoObj object
|
||||
return this.signerInfoObj.subs[2];
|
||||
}
|
||||
// https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
|
||||
get signatureAlgorithmObj() {
|
||||
// Signature is the 4th element of the signerInfoObj object
|
||||
return this.signerInfoObj.subs[4];
|
||||
}
|
||||
// https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
|
||||
get signatureValueObj() {
|
||||
// Signature is the 6th element of the signerInfoObj object
|
||||
return this.signerInfoObj.subs[5];
|
||||
}
|
||||
}
|
||||
exports.RFC3161Timestamp = RFC3161Timestamp;
|
||||
13
node_modules/@sigstore/core/dist/rfc3161/tstinfo.d.ts
generated
vendored
Normal file
13
node_modules/@sigstore/core/dist/rfc3161/tstinfo.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
/// <reference types="node" />
|
||||
import { ASN1Obj } from '../asn1';
|
||||
export declare class TSTInfo {
|
||||
root: ASN1Obj;
|
||||
constructor(asn1: ASN1Obj);
|
||||
get version(): bigint;
|
||||
get genTime(): Date;
|
||||
get messageImprintHashAlgorithm(): string;
|
||||
get messageImprintHashedMessage(): Buffer;
|
||||
get raw(): Buffer;
|
||||
verify(data: Buffer): void;
|
||||
private get messageImprintObj();
|
||||
}
|
||||
61
node_modules/@sigstore/core/dist/rfc3161/tstinfo.js
generated
vendored
Normal file
61
node_modules/@sigstore/core/dist/rfc3161/tstinfo.js
generated
vendored
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.TSTInfo = void 0;
|
||||
const crypto = __importStar(require("../crypto"));
|
||||
const oid_1 = require("../oid");
|
||||
const error_1 = require("./error");
|
||||
class TSTInfo {
|
||||
constructor(asn1) {
|
||||
this.root = asn1;
|
||||
}
|
||||
get version() {
|
||||
return this.root.subs[0].toInteger();
|
||||
}
|
||||
get genTime() {
|
||||
return this.root.subs[4].toDate();
|
||||
}
|
||||
get messageImprintHashAlgorithm() {
|
||||
const oid = this.messageImprintObj.subs[0].subs[0].toOID();
|
||||
return oid_1.SHA2_HASH_ALGOS[oid];
|
||||
}
|
||||
get messageImprintHashedMessage() {
|
||||
return this.messageImprintObj.subs[1].value;
|
||||
}
|
||||
get raw() {
|
||||
return this.root.toDER();
|
||||
}
|
||||
verify(data) {
|
||||
const digest = crypto.digest(this.messageImprintHashAlgorithm, data);
|
||||
if (!crypto.bufferEqual(digest, this.messageImprintHashedMessage)) {
|
||||
throw new error_1.RFC3161TimestampVerificationError('message imprint does not match artifact');
|
||||
}
|
||||
}
|
||||
// https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2
|
||||
get messageImprintObj() {
|
||||
return this.root.subs[2];
|
||||
}
|
||||
}
|
||||
exports.TSTInfo = TSTInfo;
|
||||
22
node_modules/@sigstore/core/dist/stream.d.ts
generated
vendored
Normal file
22
node_modules/@sigstore/core/dist/stream.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
/// <reference types="node" />
|
||||
export declare class ByteStream {
|
||||
private static BLOCK_SIZE;
|
||||
private buf;
|
||||
private view;
|
||||
private start;
|
||||
constructor(buffer?: ArrayBuffer);
|
||||
get buffer(): Buffer;
|
||||
get length(): number;
|
||||
get position(): number;
|
||||
seek(position: number): void;
|
||||
slice(start: number, len: number): Buffer;
|
||||
appendChar(char: number): void;
|
||||
appendUint16(num: number): void;
|
||||
appendUint24(num: number): void;
|
||||
appendView(view: Uint8Array): void;
|
||||
getBlock(size: number): Buffer;
|
||||
getUint8(): number;
|
||||
getUint16(): number;
|
||||
private ensureCapacity;
|
||||
private realloc;
|
||||
}
|
||||
115
node_modules/@sigstore/core/dist/stream.js
generated
vendored
Normal file
115
node_modules/@sigstore/core/dist/stream.js
generated
vendored
Normal file
|
|
@ -0,0 +1,115 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ByteStream = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
class StreamError extends Error {
|
||||
}
|
||||
class ByteStream {
|
||||
constructor(buffer) {
|
||||
this.start = 0;
|
||||
if (buffer) {
|
||||
this.buf = buffer;
|
||||
this.view = Buffer.from(buffer);
|
||||
}
|
||||
else {
|
||||
this.buf = new ArrayBuffer(0);
|
||||
this.view = Buffer.from(this.buf);
|
||||
}
|
||||
}
|
||||
get buffer() {
|
||||
return this.view.subarray(0, this.start);
|
||||
}
|
||||
get length() {
|
||||
return this.view.byteLength;
|
||||
}
|
||||
get position() {
|
||||
return this.start;
|
||||
}
|
||||
seek(position) {
|
||||
this.start = position;
|
||||
}
|
||||
// Returns a Buffer containing the specified number of bytes starting at the
|
||||
// given start position.
|
||||
slice(start, len) {
|
||||
const end = start + len;
|
||||
if (end > this.length) {
|
||||
throw new StreamError('request past end of buffer');
|
||||
}
|
||||
return this.view.subarray(start, end);
|
||||
}
|
||||
appendChar(char) {
|
||||
this.ensureCapacity(1);
|
||||
this.view[this.start] = char;
|
||||
this.start += 1;
|
||||
}
|
||||
appendUint16(num) {
|
||||
this.ensureCapacity(2);
|
||||
const value = new Uint16Array([num]);
|
||||
const view = new Uint8Array(value.buffer);
|
||||
this.view[this.start] = view[1];
|
||||
this.view[this.start + 1] = view[0];
|
||||
this.start += 2;
|
||||
}
|
||||
appendUint24(num) {
|
||||
this.ensureCapacity(3);
|
||||
const value = new Uint32Array([num]);
|
||||
const view = new Uint8Array(value.buffer);
|
||||
this.view[this.start] = view[2];
|
||||
this.view[this.start + 1] = view[1];
|
||||
this.view[this.start + 2] = view[0];
|
||||
this.start += 3;
|
||||
}
|
||||
appendView(view) {
|
||||
this.ensureCapacity(view.length);
|
||||
this.view.set(view, this.start);
|
||||
this.start += view.length;
|
||||
}
|
||||
getBlock(size) {
|
||||
if (size <= 0) {
|
||||
return Buffer.alloc(0);
|
||||
}
|
||||
if (this.start + size > this.view.length) {
|
||||
throw new Error('request past end of buffer');
|
||||
}
|
||||
const result = this.view.subarray(this.start, this.start + size);
|
||||
this.start += size;
|
||||
return result;
|
||||
}
|
||||
getUint8() {
|
||||
return this.getBlock(1)[0];
|
||||
}
|
||||
getUint16() {
|
||||
const block = this.getBlock(2);
|
||||
return (block[0] << 8) | block[1];
|
||||
}
|
||||
ensureCapacity(size) {
|
||||
if (this.start + size > this.view.byteLength) {
|
||||
const blockSize = ByteStream.BLOCK_SIZE + (size > ByteStream.BLOCK_SIZE ? size : 0);
|
||||
this.realloc(this.view.byteLength + blockSize);
|
||||
}
|
||||
}
|
||||
realloc(size) {
|
||||
const newArray = new ArrayBuffer(size);
|
||||
const newView = Buffer.from(newArray);
|
||||
// Copy the old buffer into the new one
|
||||
newView.set(this.view);
|
||||
this.buf = newArray;
|
||||
this.view = newView;
|
||||
}
|
||||
}
|
||||
exports.ByteStream = ByteStream;
|
||||
ByteStream.BLOCK_SIZE = 1024;
|
||||
44
node_modules/@sigstore/core/dist/x509/cert.d.ts
generated
vendored
Normal file
44
node_modules/@sigstore/core/dist/x509/cert.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
/// <reference types="node" />
|
||||
import { ASN1Obj } from '../asn1';
|
||||
import { X509AuthorityKeyIDExtension, X509BasicConstraintsExtension, X509Extension, X509KeyUsageExtension, X509SCTExtension, X509SubjectAlternativeNameExtension, X509SubjectKeyIDExtension } from './ext';
|
||||
export declare const EXTENSION_OID_SCT = "1.3.6.1.4.1.11129.2.4.2";
|
||||
export declare class X509Certificate {
|
||||
root: ASN1Obj;
|
||||
constructor(asn1: ASN1Obj);
|
||||
static parse(cert: Buffer | string): X509Certificate;
|
||||
get tbsCertificate(): ASN1Obj;
|
||||
get version(): string;
|
||||
get serialNumber(): Buffer;
|
||||
get notBefore(): Date;
|
||||
get notAfter(): Date;
|
||||
get issuer(): Buffer;
|
||||
get subject(): Buffer;
|
||||
get publicKey(): Buffer;
|
||||
get signatureAlgorithm(): string;
|
||||
get signatureValue(): Buffer;
|
||||
get subjectAltName(): string | undefined;
|
||||
get extensions(): ASN1Obj[];
|
||||
get extKeyUsage(): X509KeyUsageExtension | undefined;
|
||||
get extBasicConstraints(): X509BasicConstraintsExtension | undefined;
|
||||
get extSubjectAltName(): X509SubjectAlternativeNameExtension | undefined;
|
||||
get extAuthorityKeyID(): X509AuthorityKeyIDExtension | undefined;
|
||||
get extSubjectKeyID(): X509SubjectKeyIDExtension | undefined;
|
||||
get extSCT(): X509SCTExtension | undefined;
|
||||
get isCA(): boolean;
|
||||
extension(oid: string): X509Extension | undefined;
|
||||
verify(issuerCertificate?: X509Certificate): boolean;
|
||||
validForDate(date: Date): boolean;
|
||||
equals(other: X509Certificate): boolean;
|
||||
clone(): X509Certificate;
|
||||
private findExtension;
|
||||
private get tbsCertificateObj();
|
||||
private get signatureAlgorithmObj();
|
||||
private get signatureValueObj();
|
||||
private get versionObj();
|
||||
private get serialNumberObj();
|
||||
private get issuerObj();
|
||||
private get validityObj();
|
||||
private get subjectObj();
|
||||
private get subjectPublicKeyInfoObj();
|
||||
private get extensionsObj();
|
||||
}
|
||||
226
node_modules/@sigstore/core/dist/x509/cert.js
generated
vendored
Normal file
226
node_modules/@sigstore/core/dist/x509/cert.js
generated
vendored
Normal file
|
|
@ -0,0 +1,226 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.X509Certificate = exports.EXTENSION_OID_SCT = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
const asn1_1 = require("../asn1");
|
||||
const crypto = __importStar(require("../crypto"));
|
||||
const oid_1 = require("../oid");
|
||||
const pem = __importStar(require("../pem"));
|
||||
const ext_1 = require("./ext");
|
||||
const EXTENSION_OID_SUBJECT_KEY_ID = '2.5.29.14';
|
||||
const EXTENSION_OID_KEY_USAGE = '2.5.29.15';
|
||||
const EXTENSION_OID_SUBJECT_ALT_NAME = '2.5.29.17';
|
||||
const EXTENSION_OID_BASIC_CONSTRAINTS = '2.5.29.19';
|
||||
const EXTENSION_OID_AUTHORITY_KEY_ID = '2.5.29.35';
|
||||
exports.EXTENSION_OID_SCT = '1.3.6.1.4.1.11129.2.4.2';
|
||||
class X509Certificate {
|
||||
constructor(asn1) {
|
||||
this.root = asn1;
|
||||
}
|
||||
static parse(cert) {
|
||||
const der = typeof cert === 'string' ? pem.toDER(cert) : cert;
|
||||
const asn1 = asn1_1.ASN1Obj.parseBuffer(der);
|
||||
return new X509Certificate(asn1);
|
||||
}
|
||||
get tbsCertificate() {
|
||||
return this.tbsCertificateObj;
|
||||
}
|
||||
get version() {
|
||||
// version number is the first element of the version context specific tag
|
||||
const ver = this.versionObj.subs[0].toInteger();
|
||||
return `v${(ver + BigInt(1)).toString()}`;
|
||||
}
|
||||
get serialNumber() {
|
||||
return this.serialNumberObj.value;
|
||||
}
|
||||
get notBefore() {
|
||||
// notBefore is the first element of the validity sequence
|
||||
return this.validityObj.subs[0].toDate();
|
||||
}
|
||||
get notAfter() {
|
||||
// notAfter is the second element of the validity sequence
|
||||
return this.validityObj.subs[1].toDate();
|
||||
}
|
||||
get issuer() {
|
||||
return this.issuerObj.value;
|
||||
}
|
||||
get subject() {
|
||||
return this.subjectObj.value;
|
||||
}
|
||||
get publicKey() {
|
||||
return this.subjectPublicKeyInfoObj.toDER();
|
||||
}
|
||||
get signatureAlgorithm() {
|
||||
const oid = this.signatureAlgorithmObj.subs[0].toOID();
|
||||
return oid_1.ECDSA_SIGNATURE_ALGOS[oid];
|
||||
}
|
||||
get signatureValue() {
|
||||
// Signature value is a bit string, so we need to skip the first byte
|
||||
return this.signatureValueObj.value.subarray(1);
|
||||
}
|
||||
get subjectAltName() {
|
||||
const ext = this.extSubjectAltName;
|
||||
return ext?.uri || ext?.rfc822Name;
|
||||
}
|
||||
get extensions() {
|
||||
// The extension list is the first (and only) element of the extensions
|
||||
// context specific tag
|
||||
const extSeq = this.extensionsObj?.subs[0];
|
||||
return extSeq?.subs || /* istanbul ignore next */ [];
|
||||
}
|
||||
get extKeyUsage() {
|
||||
const ext = this.findExtension(EXTENSION_OID_KEY_USAGE);
|
||||
return ext ? new ext_1.X509KeyUsageExtension(ext) : undefined;
|
||||
}
|
||||
get extBasicConstraints() {
|
||||
const ext = this.findExtension(EXTENSION_OID_BASIC_CONSTRAINTS);
|
||||
return ext ? new ext_1.X509BasicConstraintsExtension(ext) : undefined;
|
||||
}
|
||||
get extSubjectAltName() {
|
||||
const ext = this.findExtension(EXTENSION_OID_SUBJECT_ALT_NAME);
|
||||
return ext ? new ext_1.X509SubjectAlternativeNameExtension(ext) : undefined;
|
||||
}
|
||||
get extAuthorityKeyID() {
|
||||
const ext = this.findExtension(EXTENSION_OID_AUTHORITY_KEY_ID);
|
||||
return ext ? new ext_1.X509AuthorityKeyIDExtension(ext) : undefined;
|
||||
}
|
||||
get extSubjectKeyID() {
|
||||
const ext = this.findExtension(EXTENSION_OID_SUBJECT_KEY_ID);
|
||||
return ext
|
||||
? new ext_1.X509SubjectKeyIDExtension(ext)
|
||||
: /* istanbul ignore next */ undefined;
|
||||
}
|
||||
get extSCT() {
|
||||
const ext = this.findExtension(exports.EXTENSION_OID_SCT);
|
||||
return ext ? new ext_1.X509SCTExtension(ext) : undefined;
|
||||
}
|
||||
get isCA() {
|
||||
const ca = this.extBasicConstraints?.isCA || false;
|
||||
// If the KeyUsage extension is present, keyCertSign must be set
|
||||
if (this.extKeyUsage) {
|
||||
ca && this.extKeyUsage.keyCertSign;
|
||||
}
|
||||
return ca;
|
||||
}
|
||||
extension(oid) {
|
||||
const ext = this.findExtension(oid);
|
||||
return ext ? new ext_1.X509Extension(ext) : undefined;
|
||||
}
|
||||
verify(issuerCertificate) {
|
||||
// Use the issuer's public key if provided, otherwise use the subject's
|
||||
const publicKey = issuerCertificate?.publicKey || this.publicKey;
|
||||
const key = crypto.createPublicKey(publicKey);
|
||||
return crypto.verify(this.tbsCertificate.toDER(), key, this.signatureValue, this.signatureAlgorithm);
|
||||
}
|
||||
validForDate(date) {
|
||||
return this.notBefore <= date && date <= this.notAfter;
|
||||
}
|
||||
equals(other) {
|
||||
return this.root.toDER().equals(other.root.toDER());
|
||||
}
|
||||
// Creates a copy of the certificate with a new buffer
|
||||
clone() {
|
||||
const der = this.root.toDER();
|
||||
const clone = Buffer.alloc(der.length);
|
||||
der.copy(clone);
|
||||
return X509Certificate.parse(clone);
|
||||
}
|
||||
findExtension(oid) {
|
||||
// Find the extension with the given OID. The OID will always be the first
|
||||
// element of the extension sequence
|
||||
return this.extensions.find((ext) => ext.subs[0].toOID() === oid);
|
||||
}
|
||||
/////////////////////////////////////////////////////////////////////////////
|
||||
// The following properties use the documented x509 structure to locate the
|
||||
// desired ASN.1 object
|
||||
// https://www.rfc-editor.org/rfc/rfc5280#section-4.1
|
||||
// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.1
|
||||
get tbsCertificateObj() {
|
||||
// tbsCertificate is the first element of the certificate sequence
|
||||
return this.root.subs[0];
|
||||
}
|
||||
// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.2
|
||||
get signatureAlgorithmObj() {
|
||||
// signatureAlgorithm is the second element of the certificate sequence
|
||||
return this.root.subs[1];
|
||||
}
|
||||
// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.3
|
||||
get signatureValueObj() {
|
||||
// signatureValue is the third element of the certificate sequence
|
||||
return this.root.subs[2];
|
||||
}
|
||||
// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.1
|
||||
get versionObj() {
|
||||
// version is the first element of the tbsCertificate sequence
|
||||
return this.tbsCertificateObj.subs[0];
|
||||
}
|
||||
// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.2
|
||||
get serialNumberObj() {
|
||||
// serialNumber is the second element of the tbsCertificate sequence
|
||||
return this.tbsCertificateObj.subs[1];
|
||||
}
|
||||
// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.4
|
||||
get issuerObj() {
|
||||
// issuer is the fourth element of the tbsCertificate sequence
|
||||
return this.tbsCertificateObj.subs[3];
|
||||
}
|
||||
// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5
|
||||
get validityObj() {
|
||||
// version is the fifth element of the tbsCertificate sequence
|
||||
return this.tbsCertificateObj.subs[4];
|
||||
}
|
||||
// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.6
|
||||
get subjectObj() {
|
||||
// subject is the sixth element of the tbsCertificate sequence
|
||||
return this.tbsCertificateObj.subs[5];
|
||||
}
|
||||
// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.7
|
||||
get subjectPublicKeyInfoObj() {
|
||||
// subjectPublicKeyInfo is the seventh element of the tbsCertificate sequence
|
||||
return this.tbsCertificateObj.subs[6];
|
||||
}
|
||||
// Extensions can't be located by index because their position varies. Instead,
|
||||
// we need to find the extensions context specific tag
|
||||
// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.9
|
||||
get extensionsObj() {
|
||||
return this.tbsCertificateObj.subs.find((sub) => sub.tag.isContextSpecific(0x03));
|
||||
}
|
||||
}
|
||||
exports.X509Certificate = X509Certificate;
|
||||
42
node_modules/@sigstore/core/dist/x509/ext.d.ts
generated
vendored
Normal file
42
node_modules/@sigstore/core/dist/x509/ext.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
/// <reference types="node" />
|
||||
import { ASN1Obj } from '../asn1';
|
||||
import { SignedCertificateTimestamp } from './sct';
|
||||
export declare class X509Extension {
|
||||
protected root: ASN1Obj;
|
||||
constructor(asn1: ASN1Obj);
|
||||
get oid(): string;
|
||||
get critical(): boolean;
|
||||
get value(): Buffer;
|
||||
get valueObj(): ASN1Obj;
|
||||
protected get extnValueObj(): ASN1Obj;
|
||||
}
|
||||
export declare class X509BasicConstraintsExtension extends X509Extension {
|
||||
get isCA(): boolean;
|
||||
get pathLenConstraint(): bigint | undefined;
|
||||
private get sequence();
|
||||
}
|
||||
export declare class X509KeyUsageExtension extends X509Extension {
|
||||
get digitalSignature(): boolean;
|
||||
get keyCertSign(): boolean;
|
||||
get crlSign(): boolean;
|
||||
private get bitString();
|
||||
}
|
||||
export declare class X509SubjectAlternativeNameExtension extends X509Extension {
|
||||
get rfc822Name(): string | undefined;
|
||||
get uri(): string | undefined;
|
||||
otherName(oid: string): string | undefined;
|
||||
private findGeneralName;
|
||||
private get generalNames();
|
||||
}
|
||||
export declare class X509AuthorityKeyIDExtension extends X509Extension {
|
||||
get keyIdentifier(): Buffer | undefined;
|
||||
private findSequenceMember;
|
||||
private get sequence();
|
||||
}
|
||||
export declare class X509SubjectKeyIDExtension extends X509Extension {
|
||||
get keyIdentifier(): Buffer;
|
||||
}
|
||||
export declare class X509SCTExtension extends X509Extension {
|
||||
constructor(asn1: ASN1Obj);
|
||||
get signedCertificateTimestamps(): SignedCertificateTimestamp[];
|
||||
}
|
||||
145
node_modules/@sigstore/core/dist/x509/ext.js
generated
vendored
Normal file
145
node_modules/@sigstore/core/dist/x509/ext.js
generated
vendored
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.X509SCTExtension = exports.X509SubjectKeyIDExtension = exports.X509AuthorityKeyIDExtension = exports.X509SubjectAlternativeNameExtension = exports.X509KeyUsageExtension = exports.X509BasicConstraintsExtension = exports.X509Extension = void 0;
|
||||
const stream_1 = require("../stream");
|
||||
const sct_1 = require("./sct");
|
||||
// https://www.rfc-editor.org/rfc/rfc5280#section-4.1
|
||||
class X509Extension {
|
||||
constructor(asn1) {
|
||||
this.root = asn1;
|
||||
}
|
||||
get oid() {
|
||||
return this.root.subs[0].toOID();
|
||||
}
|
||||
get critical() {
|
||||
// The critical field is optional and will be the second element of the
|
||||
// extension sequence if present. Default to false if not present.
|
||||
return this.root.subs.length === 3 ? this.root.subs[1].toBoolean() : false;
|
||||
}
|
||||
get value() {
|
||||
return this.extnValueObj.value;
|
||||
}
|
||||
get valueObj() {
|
||||
return this.extnValueObj;
|
||||
}
|
||||
get extnValueObj() {
|
||||
// The extnValue field will be the last element of the extension sequence
|
||||
return this.root.subs[this.root.subs.length - 1];
|
||||
}
|
||||
}
|
||||
exports.X509Extension = X509Extension;
|
||||
// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.9
|
||||
class X509BasicConstraintsExtension extends X509Extension {
|
||||
get isCA() {
|
||||
return this.sequence.subs[0]?.toBoolean() ?? false;
|
||||
}
|
||||
get pathLenConstraint() {
|
||||
return this.sequence.subs.length > 1
|
||||
? this.sequence.subs[1].toInteger()
|
||||
: undefined;
|
||||
}
|
||||
// The extnValue field contains a single sequence wrapping the isCA and
|
||||
// pathLenConstraint.
|
||||
get sequence() {
|
||||
return this.extnValueObj.subs[0];
|
||||
}
|
||||
}
|
||||
exports.X509BasicConstraintsExtension = X509BasicConstraintsExtension;
|
||||
// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.3
|
||||
class X509KeyUsageExtension extends X509Extension {
|
||||
get digitalSignature() {
|
||||
return this.bitString[0] === 1;
|
||||
}
|
||||
get keyCertSign() {
|
||||
return this.bitString[5] === 1;
|
||||
}
|
||||
get crlSign() {
|
||||
return this.bitString[6] === 1;
|
||||
}
|
||||
// The extnValue field contains a single bit string which is a bit mask
|
||||
// indicating which key usages are enabled.
|
||||
get bitString() {
|
||||
return this.extnValueObj.subs[0].toBitString();
|
||||
}
|
||||
}
|
||||
exports.X509KeyUsageExtension = X509KeyUsageExtension;
|
||||
// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.6
|
||||
class X509SubjectAlternativeNameExtension extends X509Extension {
|
||||
get rfc822Name() {
|
||||
return this.findGeneralName(0x01)?.value.toString('ascii');
|
||||
}
|
||||
get uri() {
|
||||
return this.findGeneralName(0x06)?.value.toString('ascii');
|
||||
}
|
||||
// Retrieve the value of an otherName with the given OID.
|
||||
otherName(oid) {
|
||||
const otherName = this.findGeneralName(0x00);
|
||||
if (otherName === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
// The otherName is a sequence containing an OID and a value.
|
||||
// Need to check that the OID matches the one we're looking for.
|
||||
const otherNameOID = otherName.subs[0].toOID();
|
||||
if (otherNameOID !== oid) {
|
||||
return undefined;
|
||||
}
|
||||
// The otherNameValue is a sequence containing the actual value.
|
||||
const otherNameValue = otherName.subs[1];
|
||||
return otherNameValue.subs[0].value.toString('ascii');
|
||||
}
|
||||
findGeneralName(tag) {
|
||||
return this.generalNames.find((gn) => gn.tag.isContextSpecific(tag));
|
||||
}
|
||||
// The extnValue field contains a sequence of GeneralNames.
|
||||
get generalNames() {
|
||||
return this.extnValueObj.subs[0].subs;
|
||||
}
|
||||
}
|
||||
exports.X509SubjectAlternativeNameExtension = X509SubjectAlternativeNameExtension;
|
||||
// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.1
|
||||
class X509AuthorityKeyIDExtension extends X509Extension {
|
||||
get keyIdentifier() {
|
||||
return this.findSequenceMember(0x00)?.value;
|
||||
}
|
||||
findSequenceMember(tag) {
|
||||
return this.sequence.subs.find((el) => el.tag.isContextSpecific(tag));
|
||||
}
|
||||
// The extnValue field contains a single sequence wrapping the keyIdentifier
|
||||
get sequence() {
|
||||
return this.extnValueObj.subs[0];
|
||||
}
|
||||
}
|
||||
exports.X509AuthorityKeyIDExtension = X509AuthorityKeyIDExtension;
|
||||
// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.2
|
||||
class X509SubjectKeyIDExtension extends X509Extension {
|
||||
get keyIdentifier() {
|
||||
return this.extnValueObj.subs[0].value;
|
||||
}
|
||||
}
|
||||
exports.X509SubjectKeyIDExtension = X509SubjectKeyIDExtension;
|
||||
// https://www.rfc-editor.org/rfc/rfc6962#section-3.3
|
||||
class X509SCTExtension extends X509Extension {
|
||||
constructor(asn1) {
|
||||
super(asn1);
|
||||
}
|
||||
get signedCertificateTimestamps() {
|
||||
const buf = this.extnValueObj.subs[0].value;
|
||||
const stream = new stream_1.ByteStream(buf);
|
||||
// The overall list length is encoded in the first two bytes -- note this
|
||||
// is the length of the list in bytes, NOT the number of SCTs in the list
|
||||
const end = stream.getUint16() + 2;
|
||||
const sctList = [];
|
||||
while (stream.position < end) {
|
||||
// Read the length of the next SCT
|
||||
const sctLength = stream.getUint16();
|
||||
// Slice out the bytes for the next SCT and parse it
|
||||
const sct = stream.getBlock(sctLength);
|
||||
sctList.push(sct_1.SignedCertificateTimestamp.parse(sct));
|
||||
}
|
||||
if (stream.position !== end) {
|
||||
throw new Error('SCT list length does not match actual length');
|
||||
}
|
||||
return sctList;
|
||||
}
|
||||
}
|
||||
exports.X509SCTExtension = X509SCTExtension;
|
||||
2
node_modules/@sigstore/core/dist/x509/index.d.ts
generated
vendored
Normal file
2
node_modules/@sigstore/core/dist/x509/index.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
export { EXTENSION_OID_SCT, X509Certificate } from './cert';
|
||||
export { X509SCTExtension } from './ext';
|
||||
23
node_modules/@sigstore/core/dist/x509/index.js
generated
vendored
Normal file
23
node_modules/@sigstore/core/dist/x509/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
"use strict";
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.X509SCTExtension = exports.X509Certificate = exports.EXTENSION_OID_SCT = void 0;
|
||||
var cert_1 = require("./cert");
|
||||
Object.defineProperty(exports, "EXTENSION_OID_SCT", { enumerable: true, get: function () { return cert_1.EXTENSION_OID_SCT; } });
|
||||
Object.defineProperty(exports, "X509Certificate", { enumerable: true, get: function () { return cert_1.X509Certificate; } });
|
||||
var ext_1 = require("./ext");
|
||||
Object.defineProperty(exports, "X509SCTExtension", { enumerable: true, get: function () { return ext_1.X509SCTExtension; } });
|
||||
27
node_modules/@sigstore/core/dist/x509/sct.d.ts
generated
vendored
Normal file
27
node_modules/@sigstore/core/dist/x509/sct.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
/// <reference types="node" />
|
||||
/// <reference types="node" />
|
||||
import * as crypto from '../crypto';
|
||||
interface SCTOptions {
|
||||
version: number;
|
||||
logID: Buffer;
|
||||
timestamp: Buffer;
|
||||
extensions: Buffer;
|
||||
hashAlgorithm: number;
|
||||
signatureAlgorithm: number;
|
||||
signature: Buffer;
|
||||
}
|
||||
export declare class SignedCertificateTimestamp {
|
||||
readonly version: number;
|
||||
readonly logID: Buffer;
|
||||
readonly timestamp: Buffer;
|
||||
readonly extensions: Buffer;
|
||||
readonly hashAlgorithm: number;
|
||||
readonly signatureAlgorithm: number;
|
||||
readonly signature: Buffer;
|
||||
constructor(options: SCTOptions);
|
||||
get datetime(): Date;
|
||||
get algorithm(): string;
|
||||
verify(preCert: Buffer, key: crypto.KeyObject): boolean;
|
||||
static parse(buf: Buffer): SignedCertificateTimestamp;
|
||||
}
|
||||
export {};
|
||||
141
node_modules/@sigstore/core/dist/x509/sct.js
generated
vendored
Normal file
141
node_modules/@sigstore/core/dist/x509/sct.js
generated
vendored
Normal file
|
|
@ -0,0 +1,141 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.SignedCertificateTimestamp = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
const crypto = __importStar(require("../crypto"));
|
||||
const stream_1 = require("../stream");
|
||||
class SignedCertificateTimestamp {
|
||||
constructor(options) {
|
||||
this.version = options.version;
|
||||
this.logID = options.logID;
|
||||
this.timestamp = options.timestamp;
|
||||
this.extensions = options.extensions;
|
||||
this.hashAlgorithm = options.hashAlgorithm;
|
||||
this.signatureAlgorithm = options.signatureAlgorithm;
|
||||
this.signature = options.signature;
|
||||
}
|
||||
get datetime() {
|
||||
return new Date(Number(this.timestamp.readBigInt64BE()));
|
||||
}
|
||||
// Returns the hash algorithm used to generate the SCT's signature.
|
||||
// https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1
|
||||
get algorithm() {
|
||||
switch (this.hashAlgorithm) {
|
||||
/* istanbul ignore next */
|
||||
case 0:
|
||||
return 'none';
|
||||
/* istanbul ignore next */
|
||||
case 1:
|
||||
return 'md5';
|
||||
/* istanbul ignore next */
|
||||
case 2:
|
||||
return 'sha1';
|
||||
/* istanbul ignore next */
|
||||
case 3:
|
||||
return 'sha224';
|
||||
case 4:
|
||||
return 'sha256';
|
||||
/* istanbul ignore next */
|
||||
case 5:
|
||||
return 'sha384';
|
||||
/* istanbul ignore next */
|
||||
case 6:
|
||||
return 'sha512';
|
||||
/* istanbul ignore next */
|
||||
default:
|
||||
return 'unknown';
|
||||
}
|
||||
}
|
||||
verify(preCert, key) {
|
||||
// Assemble the digitally-signed struct (the data over which the signature
|
||||
// was generated).
|
||||
// https://www.rfc-editor.org/rfc/rfc6962#section-3.2
|
||||
const stream = new stream_1.ByteStream();
|
||||
stream.appendChar(this.version);
|
||||
stream.appendChar(0x00); // SignatureType = certificate_timestamp(0)
|
||||
stream.appendView(this.timestamp);
|
||||
stream.appendUint16(0x01); // LogEntryType = precert_entry(1)
|
||||
stream.appendView(preCert);
|
||||
stream.appendUint16(this.extensions.byteLength);
|
||||
/* istanbul ignore next - extensions are very uncommon */
|
||||
if (this.extensions.byteLength > 0) {
|
||||
stream.appendView(this.extensions);
|
||||
}
|
||||
return crypto.verify(stream.buffer, key, this.signature, this.algorithm);
|
||||
}
|
||||
// Parses a SignedCertificateTimestamp from a buffer. SCTs are encoded using
|
||||
// TLS encoding which means the fields and lengths of most fields are
|
||||
// specified as part of the SCT and TLS specs.
|
||||
// https://www.rfc-editor.org/rfc/rfc6962#section-3.2
|
||||
// https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1
|
||||
static parse(buf) {
|
||||
const stream = new stream_1.ByteStream(buf);
|
||||
// Version - enum { v1(0), (255) }
|
||||
const version = stream.getUint8();
|
||||
// Log ID - struct { opaque key_id[32]; }
|
||||
const logID = stream.getBlock(32);
|
||||
// Timestamp - uint64
|
||||
const timestamp = stream.getBlock(8);
|
||||
// Extensions - opaque extensions<0..2^16-1>;
|
||||
const extenstionLength = stream.getUint16();
|
||||
const extensions = stream.getBlock(extenstionLength);
|
||||
// Hash algo - enum { sha256(4), . . . (255) }
|
||||
const hashAlgorithm = stream.getUint8();
|
||||
// Signature algo - enum { anonymous(0), rsa(1), dsa(2), ecdsa(3), (255) }
|
||||
const signatureAlgorithm = stream.getUint8();
|
||||
// Signature - opaque signature<0..2^16-1>;
|
||||
const sigLength = stream.getUint16();
|
||||
const signature = stream.getBlock(sigLength);
|
||||
// Check that we read the entire buffer
|
||||
if (stream.position !== buf.length) {
|
||||
throw new Error('SCT buffer length mismatch');
|
||||
}
|
||||
return new SignedCertificateTimestamp({
|
||||
version,
|
||||
logID,
|
||||
timestamp,
|
||||
extensions,
|
||||
hashAlgorithm,
|
||||
signatureAlgorithm,
|
||||
signature,
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.SignedCertificateTimestamp = SignedCertificateTimestamp;
|
||||
31
node_modules/@sigstore/core/package.json
generated
vendored
Normal file
31
node_modules/@sigstore/core/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
{
|
||||
"name": "@sigstore/core",
|
||||
"version": "1.1.0",
|
||||
"description": "Base library for Sigstore",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"scripts": {
|
||||
"clean": "shx rm -rf dist *.tsbuildinfo",
|
||||
"build": "tsc --build",
|
||||
"test": "jest"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"author": "bdehamer@github.com",
|
||||
"license": "Apache-2.0",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/sigstore/sigstore-js.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/sigstore/sigstore-js/issues"
|
||||
},
|
||||
"homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/core#readme",
|
||||
"publishConfig": {
|
||||
"provenance": true
|
||||
},
|
||||
"engines": {
|
||||
"node": "^16.14.0 || >=18.0.0"
|
||||
}
|
||||
}
|
||||
202
node_modules/@sigstore/protobuf-specs/LICENSE
generated
vendored
Normal file
202
node_modules/@sigstore/protobuf-specs/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,202 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2023 The Sigstore Authors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
5
node_modules/@sigstore/protobuf-specs/README.md
generated
vendored
Normal file
5
node_modules/@sigstore/protobuf-specs/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
# @sigstore/protobuf-specs
|
||||
|
||||
TypeScript language bindings for Sigstore's protobuf specs.
|
||||
|
||||
See the [repository's README](https://github.com/sigstore/protobuf-specs) for more information.
|
||||
46
node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.d.ts
generated
vendored
Normal file
46
node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
/// <reference types="node" />
|
||||
/** An authenticated message of arbitrary type. */
|
||||
export interface Envelope {
|
||||
/**
|
||||
* Message to be signed. (In JSON, this is encoded as base64.)
|
||||
* REQUIRED.
|
||||
*/
|
||||
payload: Buffer;
|
||||
/**
|
||||
* String unambiguously identifying how to interpret payload.
|
||||
* REQUIRED.
|
||||
*/
|
||||
payloadType: string;
|
||||
/**
|
||||
* Signature over:
|
||||
* PAE(type, payload)
|
||||
* Where PAE is defined as:
|
||||
* PAE(type, payload) = "DSSEv1" + SP + LEN(type) + SP + type + SP + LEN(payload) + SP + payload
|
||||
* + = concatenation
|
||||
* SP = ASCII space [0x20]
|
||||
* "DSSEv1" = ASCII [0x44, 0x53, 0x53, 0x45, 0x76, 0x31]
|
||||
* LEN(s) = ASCII decimal encoding of the byte length of s, with no leading zeros
|
||||
* REQUIRED (length >= 1).
|
||||
*/
|
||||
signatures: Signature[];
|
||||
}
|
||||
export interface Signature {
|
||||
/**
|
||||
* Signature itself. (In JSON, this is encoded as base64.)
|
||||
* REQUIRED.
|
||||
*/
|
||||
sig: Buffer;
|
||||
/**
|
||||
* Unauthenticated* hint identifying which public key was used.
|
||||
* OPTIONAL.
|
||||
*/
|
||||
keyid: string;
|
||||
}
|
||||
export declare const Envelope: {
|
||||
fromJSON(object: any): Envelope;
|
||||
toJSON(message: Envelope): unknown;
|
||||
};
|
||||
export declare const Signature: {
|
||||
fromJSON(object: any): Signature;
|
||||
toJSON(message: Signature): unknown;
|
||||
};
|
||||
89
node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
generated
vendored
Normal file
89
node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
generated
vendored
Normal file
|
|
@ -0,0 +1,89 @@
|
|||
"use strict";
|
||||
/* eslint-disable */
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Signature = exports.Envelope = void 0;
|
||||
function createBaseEnvelope() {
|
||||
return { payload: Buffer.alloc(0), payloadType: "", signatures: [] };
|
||||
}
|
||||
exports.Envelope = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
|
||||
payloadType: isSet(object.payloadType) ? String(object.payloadType) : "",
|
||||
signatures: Array.isArray(object?.signatures) ? object.signatures.map((e) => exports.Signature.fromJSON(e)) : [],
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.payload !== undefined &&
|
||||
(obj.payload = base64FromBytes(message.payload !== undefined ? message.payload : Buffer.alloc(0)));
|
||||
message.payloadType !== undefined && (obj.payloadType = message.payloadType);
|
||||
if (message.signatures) {
|
||||
obj.signatures = message.signatures.map((e) => e ? exports.Signature.toJSON(e) : undefined);
|
||||
}
|
||||
else {
|
||||
obj.signatures = [];
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseSignature() {
|
||||
return { sig: Buffer.alloc(0), keyid: "" };
|
||||
}
|
||||
exports.Signature = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
|
||||
keyid: isSet(object.keyid) ? String(object.keyid) : "",
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.sig !== undefined && (obj.sig = base64FromBytes(message.sig !== undefined ? message.sig : Buffer.alloc(0)));
|
||||
message.keyid !== undefined && (obj.keyid = message.keyid);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
var tsProtoGlobalThis = (() => {
|
||||
if (typeof globalThis !== "undefined") {
|
||||
return globalThis;
|
||||
}
|
||||
if (typeof self !== "undefined") {
|
||||
return self;
|
||||
}
|
||||
if (typeof window !== "undefined") {
|
||||
return window;
|
||||
}
|
||||
if (typeof global !== "undefined") {
|
||||
return global;
|
||||
}
|
||||
throw "Unable to locate global object";
|
||||
})();
|
||||
function bytesFromBase64(b64) {
|
||||
if (tsProtoGlobalThis.Buffer) {
|
||||
return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
|
||||
}
|
||||
else {
|
||||
const bin = tsProtoGlobalThis.atob(b64);
|
||||
const arr = new Uint8Array(bin.length);
|
||||
for (let i = 0; i < bin.length; ++i) {
|
||||
arr[i] = bin.charCodeAt(i);
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
}
|
||||
function base64FromBytes(arr) {
|
||||
if (tsProtoGlobalThis.Buffer) {
|
||||
return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
|
||||
}
|
||||
else {
|
||||
const bin = [];
|
||||
arr.forEach((byte) => {
|
||||
bin.push(String.fromCharCode(byte));
|
||||
});
|
||||
return tsProtoGlobalThis.btoa(bin.join(""));
|
||||
}
|
||||
}
|
||||
function isSet(value) {
|
||||
return value !== null && value !== undefined;
|
||||
}
|
||||
71
node_modules/@sigstore/protobuf-specs/dist/__generated__/events.d.ts
generated
vendored
Normal file
71
node_modules/@sigstore/protobuf-specs/dist/__generated__/events.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
/// <reference types="node" />
|
||||
import { Any } from "./google/protobuf/any";
|
||||
export interface CloudEvent {
|
||||
/** Required Attributes */
|
||||
id: string;
|
||||
/** URI-reference */
|
||||
source: string;
|
||||
specVersion: string;
|
||||
type: string;
|
||||
/** Optional & Extension Attributes */
|
||||
attributes: {
|
||||
[key: string]: CloudEvent_CloudEventAttributeValue;
|
||||
};
|
||||
data?: {
|
||||
$case: "binaryData";
|
||||
binaryData: Buffer;
|
||||
} | {
|
||||
$case: "textData";
|
||||
textData: string;
|
||||
} | {
|
||||
$case: "protoData";
|
||||
protoData: Any;
|
||||
};
|
||||
}
|
||||
export interface CloudEvent_AttributesEntry {
|
||||
key: string;
|
||||
value: CloudEvent_CloudEventAttributeValue | undefined;
|
||||
}
|
||||
export interface CloudEvent_CloudEventAttributeValue {
|
||||
attr?: {
|
||||
$case: "ceBoolean";
|
||||
ceBoolean: boolean;
|
||||
} | {
|
||||
$case: "ceInteger";
|
||||
ceInteger: number;
|
||||
} | {
|
||||
$case: "ceString";
|
||||
ceString: string;
|
||||
} | {
|
||||
$case: "ceBytes";
|
||||
ceBytes: Buffer;
|
||||
} | {
|
||||
$case: "ceUri";
|
||||
ceUri: string;
|
||||
} | {
|
||||
$case: "ceUriRef";
|
||||
ceUriRef: string;
|
||||
} | {
|
||||
$case: "ceTimestamp";
|
||||
ceTimestamp: Date;
|
||||
};
|
||||
}
|
||||
export interface CloudEventBatch {
|
||||
events: CloudEvent[];
|
||||
}
|
||||
export declare const CloudEvent: {
|
||||
fromJSON(object: any): CloudEvent;
|
||||
toJSON(message: CloudEvent): unknown;
|
||||
};
|
||||
export declare const CloudEvent_AttributesEntry: {
|
||||
fromJSON(object: any): CloudEvent_AttributesEntry;
|
||||
toJSON(message: CloudEvent_AttributesEntry): unknown;
|
||||
};
|
||||
export declare const CloudEvent_CloudEventAttributeValue: {
|
||||
fromJSON(object: any): CloudEvent_CloudEventAttributeValue;
|
||||
toJSON(message: CloudEvent_CloudEventAttributeValue): unknown;
|
||||
};
|
||||
export declare const CloudEventBatch: {
|
||||
fromJSON(object: any): CloudEventBatch;
|
||||
toJSON(message: CloudEventBatch): unknown;
|
||||
};
|
||||
185
node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
generated
vendored
Normal file
185
node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
generated
vendored
Normal file
|
|
@ -0,0 +1,185 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
|
||||
/* eslint-disable */
|
||||
const any_1 = require("./google/protobuf/any");
|
||||
const timestamp_1 = require("./google/protobuf/timestamp");
|
||||
function createBaseCloudEvent() {
|
||||
return { id: "", source: "", specVersion: "", type: "", attributes: {}, data: undefined };
|
||||
}
|
||||
exports.CloudEvent = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
id: isSet(object.id) ? String(object.id) : "",
|
||||
source: isSet(object.source) ? String(object.source) : "",
|
||||
specVersion: isSet(object.specVersion) ? String(object.specVersion) : "",
|
||||
type: isSet(object.type) ? String(object.type) : "",
|
||||
attributes: isObject(object.attributes)
|
||||
? Object.entries(object.attributes).reduce((acc, [key, value]) => {
|
||||
acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
|
||||
return acc;
|
||||
}, {})
|
||||
: {},
|
||||
data: isSet(object.binaryData)
|
||||
? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
|
||||
: isSet(object.textData)
|
||||
? { $case: "textData", textData: String(object.textData) }
|
||||
: isSet(object.protoData)
|
||||
? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
|
||||
: undefined,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.id !== undefined && (obj.id = message.id);
|
||||
message.source !== undefined && (obj.source = message.source);
|
||||
message.specVersion !== undefined && (obj.specVersion = message.specVersion);
|
||||
message.type !== undefined && (obj.type = message.type);
|
||||
obj.attributes = {};
|
||||
if (message.attributes) {
|
||||
Object.entries(message.attributes).forEach(([k, v]) => {
|
||||
obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
|
||||
});
|
||||
}
|
||||
message.data?.$case === "binaryData" &&
|
||||
(obj.binaryData = message.data?.binaryData !== undefined ? base64FromBytes(message.data?.binaryData) : undefined);
|
||||
message.data?.$case === "textData" && (obj.textData = message.data?.textData);
|
||||
message.data?.$case === "protoData" &&
|
||||
(obj.protoData = message.data?.protoData ? any_1.Any.toJSON(message.data?.protoData) : undefined);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseCloudEvent_AttributesEntry() {
|
||||
return { key: "", value: undefined };
|
||||
}
|
||||
exports.CloudEvent_AttributesEntry = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
key: isSet(object.key) ? String(object.key) : "",
|
||||
value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.key !== undefined && (obj.key = message.key);
|
||||
message.value !== undefined &&
|
||||
(obj.value = message.value ? exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value) : undefined);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseCloudEvent_CloudEventAttributeValue() {
|
||||
return { attr: undefined };
|
||||
}
|
||||
exports.CloudEvent_CloudEventAttributeValue = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
attr: isSet(object.ceBoolean)
|
||||
? { $case: "ceBoolean", ceBoolean: Boolean(object.ceBoolean) }
|
||||
: isSet(object.ceInteger)
|
||||
? { $case: "ceInteger", ceInteger: Number(object.ceInteger) }
|
||||
: isSet(object.ceString)
|
||||
? { $case: "ceString", ceString: String(object.ceString) }
|
||||
: isSet(object.ceBytes)
|
||||
? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
|
||||
: isSet(object.ceUri)
|
||||
? { $case: "ceUri", ceUri: String(object.ceUri) }
|
||||
: isSet(object.ceUriRef)
|
||||
? { $case: "ceUriRef", ceUriRef: String(object.ceUriRef) }
|
||||
: isSet(object.ceTimestamp)
|
||||
? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
|
||||
: undefined,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.attr?.$case === "ceBoolean" && (obj.ceBoolean = message.attr?.ceBoolean);
|
||||
message.attr?.$case === "ceInteger" && (obj.ceInteger = Math.round(message.attr?.ceInteger));
|
||||
message.attr?.$case === "ceString" && (obj.ceString = message.attr?.ceString);
|
||||
message.attr?.$case === "ceBytes" &&
|
||||
(obj.ceBytes = message.attr?.ceBytes !== undefined ? base64FromBytes(message.attr?.ceBytes) : undefined);
|
||||
message.attr?.$case === "ceUri" && (obj.ceUri = message.attr?.ceUri);
|
||||
message.attr?.$case === "ceUriRef" && (obj.ceUriRef = message.attr?.ceUriRef);
|
||||
message.attr?.$case === "ceTimestamp" && (obj.ceTimestamp = message.attr?.ceTimestamp.toISOString());
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseCloudEventBatch() {
|
||||
return { events: [] };
|
||||
}
|
||||
exports.CloudEventBatch = {
|
||||
fromJSON(object) {
|
||||
return { events: Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [] };
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
if (message.events) {
|
||||
obj.events = message.events.map((e) => e ? exports.CloudEvent.toJSON(e) : undefined);
|
||||
}
|
||||
else {
|
||||
obj.events = [];
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
var tsProtoGlobalThis = (() => {
|
||||
if (typeof globalThis !== "undefined") {
|
||||
return globalThis;
|
||||
}
|
||||
if (typeof self !== "undefined") {
|
||||
return self;
|
||||
}
|
||||
if (typeof window !== "undefined") {
|
||||
return window;
|
||||
}
|
||||
if (typeof global !== "undefined") {
|
||||
return global;
|
||||
}
|
||||
throw "Unable to locate global object";
|
||||
})();
|
||||
function bytesFromBase64(b64) {
|
||||
if (tsProtoGlobalThis.Buffer) {
|
||||
return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
|
||||
}
|
||||
else {
|
||||
const bin = tsProtoGlobalThis.atob(b64);
|
||||
const arr = new Uint8Array(bin.length);
|
||||
for (let i = 0; i < bin.length; ++i) {
|
||||
arr[i] = bin.charCodeAt(i);
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
}
|
||||
function base64FromBytes(arr) {
|
||||
if (tsProtoGlobalThis.Buffer) {
|
||||
return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
|
||||
}
|
||||
else {
|
||||
const bin = [];
|
||||
arr.forEach((byte) => {
|
||||
bin.push(String.fromCharCode(byte));
|
||||
});
|
||||
return tsProtoGlobalThis.btoa(bin.join(""));
|
||||
}
|
||||
}
|
||||
function fromTimestamp(t) {
|
||||
let millis = Number(t.seconds) * 1000;
|
||||
millis += t.nanos / 1000000;
|
||||
return new Date(millis);
|
||||
}
|
||||
function fromJsonTimestamp(o) {
|
||||
if (o instanceof Date) {
|
||||
return o;
|
||||
}
|
||||
else if (typeof o === "string") {
|
||||
return new Date(o);
|
||||
}
|
||||
else {
|
||||
return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
|
||||
}
|
||||
}
|
||||
function isObject(value) {
|
||||
return typeof value === "object" && value !== null;
|
||||
}
|
||||
function isSet(value) {
|
||||
return value !== null && value !== undefined;
|
||||
}
|
||||
52
node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.d.ts
generated
vendored
Normal file
52
node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
/**
|
||||
* An indicator of the behavior of a given field (for example, that a field
|
||||
* is required in requests, or given as output but ignored as input).
|
||||
* This **does not** change the behavior in protocol buffers itself; it only
|
||||
* denotes the behavior and may affect how API tooling handles the field.
|
||||
*
|
||||
* Note: This enum **may** receive new values in the future.
|
||||
*/
|
||||
export declare enum FieldBehavior {
|
||||
/** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
|
||||
FIELD_BEHAVIOR_UNSPECIFIED = 0,
|
||||
/**
|
||||
* OPTIONAL - Specifically denotes a field as optional.
|
||||
* While all fields in protocol buffers are optional, this may be specified
|
||||
* for emphasis if appropriate.
|
||||
*/
|
||||
OPTIONAL = 1,
|
||||
/**
|
||||
* REQUIRED - Denotes a field as required.
|
||||
* This indicates that the field **must** be provided as part of the request,
|
||||
* and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
|
||||
*/
|
||||
REQUIRED = 2,
|
||||
/**
|
||||
* OUTPUT_ONLY - Denotes a field as output only.
|
||||
* This indicates that the field is provided in responses, but including the
|
||||
* field in a request does nothing (the server *must* ignore it and
|
||||
* *must not* throw an error as a result of the field's presence).
|
||||
*/
|
||||
OUTPUT_ONLY = 3,
|
||||
/**
|
||||
* INPUT_ONLY - Denotes a field as input only.
|
||||
* This indicates that the field is provided in requests, and the
|
||||
* corresponding field is not included in output.
|
||||
*/
|
||||
INPUT_ONLY = 4,
|
||||
/**
|
||||
* IMMUTABLE - Denotes a field as immutable.
|
||||
* This indicates that the field may be set once in a request to create a
|
||||
* resource, but may not be changed thereafter.
|
||||
*/
|
||||
IMMUTABLE = 5,
|
||||
/**
|
||||
* UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
|
||||
* This indicates that the service may provide the elements of the list
|
||||
* in any arbitrary order, rather than the order the user originally
|
||||
* provided. Additionally, the list's order may or may not be stable.
|
||||
*/
|
||||
UNORDERED_LIST = 6
|
||||
}
|
||||
export declare function fieldBehaviorFromJSON(object: any): FieldBehavior;
|
||||
export declare function fieldBehaviorToJSON(object: FieldBehavior): string;
|
||||
119
node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
generated
vendored
Normal file
119
node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
generated
vendored
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
"use strict";
|
||||
/* eslint-disable */
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.fieldBehaviorToJSON = exports.fieldBehaviorFromJSON = exports.FieldBehavior = void 0;
|
||||
/**
|
||||
* An indicator of the behavior of a given field (for example, that a field
|
||||
* is required in requests, or given as output but ignored as input).
|
||||
* This **does not** change the behavior in protocol buffers itself; it only
|
||||
* denotes the behavior and may affect how API tooling handles the field.
|
||||
*
|
||||
* Note: This enum **may** receive new values in the future.
|
||||
*/
|
||||
var FieldBehavior;
|
||||
(function (FieldBehavior) {
|
||||
/** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
|
||||
FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
|
||||
/**
|
||||
* OPTIONAL - Specifically denotes a field as optional.
|
||||
* While all fields in protocol buffers are optional, this may be specified
|
||||
* for emphasis if appropriate.
|
||||
*/
|
||||
FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
|
||||
/**
|
||||
* REQUIRED - Denotes a field as required.
|
||||
* This indicates that the field **must** be provided as part of the request,
|
||||
* and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
|
||||
*/
|
||||
FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
|
||||
/**
|
||||
* OUTPUT_ONLY - Denotes a field as output only.
|
||||
* This indicates that the field is provided in responses, but including the
|
||||
* field in a request does nothing (the server *must* ignore it and
|
||||
* *must not* throw an error as a result of the field's presence).
|
||||
*/
|
||||
FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
|
||||
/**
|
||||
* INPUT_ONLY - Denotes a field as input only.
|
||||
* This indicates that the field is provided in requests, and the
|
||||
* corresponding field is not included in output.
|
||||
*/
|
||||
FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
|
||||
/**
|
||||
* IMMUTABLE - Denotes a field as immutable.
|
||||
* This indicates that the field may be set once in a request to create a
|
||||
* resource, but may not be changed thereafter.
|
||||
*/
|
||||
FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
|
||||
/**
|
||||
* UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
|
||||
* This indicates that the service may provide the elements of the list
|
||||
* in any arbitrary order, rather than the order the user originally
|
||||
* provided. Additionally, the list's order may or may not be stable.
|
||||
*/
|
||||
FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
|
||||
})(FieldBehavior = exports.FieldBehavior || (exports.FieldBehavior = {}));
|
||||
function fieldBehaviorFromJSON(object) {
|
||||
switch (object) {
|
||||
case 0:
|
||||
case "FIELD_BEHAVIOR_UNSPECIFIED":
|
||||
return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
|
||||
case 1:
|
||||
case "OPTIONAL":
|
||||
return FieldBehavior.OPTIONAL;
|
||||
case 2:
|
||||
case "REQUIRED":
|
||||
return FieldBehavior.REQUIRED;
|
||||
case 3:
|
||||
case "OUTPUT_ONLY":
|
||||
return FieldBehavior.OUTPUT_ONLY;
|
||||
case 4:
|
||||
case "INPUT_ONLY":
|
||||
return FieldBehavior.INPUT_ONLY;
|
||||
case 5:
|
||||
case "IMMUTABLE":
|
||||
return FieldBehavior.IMMUTABLE;
|
||||
case 6:
|
||||
case "UNORDERED_LIST":
|
||||
return FieldBehavior.UNORDERED_LIST;
|
||||
default:
|
||||
throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
|
||||
}
|
||||
}
|
||||
exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
|
||||
function fieldBehaviorToJSON(object) {
|
||||
switch (object) {
|
||||
case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
|
||||
return "FIELD_BEHAVIOR_UNSPECIFIED";
|
||||
case FieldBehavior.OPTIONAL:
|
||||
return "OPTIONAL";
|
||||
case FieldBehavior.REQUIRED:
|
||||
return "REQUIRED";
|
||||
case FieldBehavior.OUTPUT_ONLY:
|
||||
return "OUTPUT_ONLY";
|
||||
case FieldBehavior.INPUT_ONLY:
|
||||
return "INPUT_ONLY";
|
||||
case FieldBehavior.IMMUTABLE:
|
||||
return "IMMUTABLE";
|
||||
case FieldBehavior.UNORDERED_LIST:
|
||||
return "UNORDERED_LIST";
|
||||
default:
|
||||
throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
|
||||
}
|
||||
}
|
||||
exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
|
||||
var tsProtoGlobalThis = (() => {
|
||||
if (typeof globalThis !== "undefined") {
|
||||
return globalThis;
|
||||
}
|
||||
if (typeof self !== "undefined") {
|
||||
return self;
|
||||
}
|
||||
if (typeof window !== "undefined") {
|
||||
return window;
|
||||
}
|
||||
if (typeof global !== "undefined") {
|
||||
return global;
|
||||
}
|
||||
throw "Unable to locate global object";
|
||||
})();
|
||||
122
node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.d.ts
generated
vendored
Normal file
122
node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,122 @@
|
|||
/// <reference types="node" />
|
||||
/**
|
||||
* `Any` contains an arbitrary serialized protocol buffer message along with a
|
||||
* URL that describes the type of the serialized message.
|
||||
*
|
||||
* Protobuf library provides support to pack/unpack Any values in the form
|
||||
* of utility functions or additional generated methods of the Any type.
|
||||
*
|
||||
* Example 1: Pack and unpack a message in C++.
|
||||
*
|
||||
* Foo foo = ...;
|
||||
* Any any;
|
||||
* any.PackFrom(foo);
|
||||
* ...
|
||||
* if (any.UnpackTo(&foo)) {
|
||||
* ...
|
||||
* }
|
||||
*
|
||||
* Example 2: Pack and unpack a message in Java.
|
||||
*
|
||||
* Foo foo = ...;
|
||||
* Any any = Any.pack(foo);
|
||||
* ...
|
||||
* if (any.is(Foo.class)) {
|
||||
* foo = any.unpack(Foo.class);
|
||||
* }
|
||||
*
|
||||
* Example 3: Pack and unpack a message in Python.
|
||||
*
|
||||
* foo = Foo(...)
|
||||
* any = Any()
|
||||
* any.Pack(foo)
|
||||
* ...
|
||||
* if any.Is(Foo.DESCRIPTOR):
|
||||
* any.Unpack(foo)
|
||||
* ...
|
||||
*
|
||||
* Example 4: Pack and unpack a message in Go
|
||||
*
|
||||
* foo := &pb.Foo{...}
|
||||
* any, err := anypb.New(foo)
|
||||
* if err != nil {
|
||||
* ...
|
||||
* }
|
||||
* ...
|
||||
* foo := &pb.Foo{}
|
||||
* if err := any.UnmarshalTo(foo); err != nil {
|
||||
* ...
|
||||
* }
|
||||
*
|
||||
* The pack methods provided by protobuf library will by default use
|
||||
* 'type.googleapis.com/full.type.name' as the type URL and the unpack
|
||||
* methods only use the fully qualified type name after the last '/'
|
||||
* in the type URL, for example "foo.bar.com/x/y.z" will yield type
|
||||
* name "y.z".
|
||||
*
|
||||
* JSON
|
||||
*
|
||||
* The JSON representation of an `Any` value uses the regular
|
||||
* representation of the deserialized, embedded message, with an
|
||||
* additional field `@type` which contains the type URL. Example:
|
||||
*
|
||||
* package google.profile;
|
||||
* message Person {
|
||||
* string first_name = 1;
|
||||
* string last_name = 2;
|
||||
* }
|
||||
*
|
||||
* {
|
||||
* "@type": "type.googleapis.com/google.profile.Person",
|
||||
* "firstName": <string>,
|
||||
* "lastName": <string>
|
||||
* }
|
||||
*
|
||||
* If the embedded message type is well-known and has a custom JSON
|
||||
* representation, that representation will be embedded adding a field
|
||||
* `value` which holds the custom JSON in addition to the `@type`
|
||||
* field. Example (for message [google.protobuf.Duration][]):
|
||||
*
|
||||
* {
|
||||
* "@type": "type.googleapis.com/google.protobuf.Duration",
|
||||
* "value": "1.212s"
|
||||
* }
|
||||
*/
|
||||
export interface Any {
|
||||
/**
|
||||
* A URL/resource name that uniquely identifies the type of the serialized
|
||||
* protocol buffer message. This string must contain at least
|
||||
* one "/" character. The last segment of the URL's path must represent
|
||||
* the fully qualified name of the type (as in
|
||||
* `path/google.protobuf.Duration`). The name should be in a canonical form
|
||||
* (e.g., leading "." is not accepted).
|
||||
*
|
||||
* In practice, teams usually precompile into the binary all types that they
|
||||
* expect it to use in the context of Any. However, for URLs which use the
|
||||
* scheme `http`, `https`, or no scheme, one can optionally set up a type
|
||||
* server that maps type URLs to message definitions as follows:
|
||||
*
|
||||
* * If no scheme is provided, `https` is assumed.
|
||||
* * An HTTP GET on the URL must yield a [google.protobuf.Type][]
|
||||
* value in binary format, or produce an error.
|
||||
* * Applications are allowed to cache lookup results based on the
|
||||
* URL, or have them precompiled into a binary to avoid any
|
||||
* lookup. Therefore, binary compatibility needs to be preserved
|
||||
* on changes to types. (Use versioned type names to manage
|
||||
* breaking changes.)
|
||||
*
|
||||
* Note: this functionality is not currently available in the official
|
||||
* protobuf release, and it is not used for type URLs beginning with
|
||||
* type.googleapis.com.
|
||||
*
|
||||
* Schemes other than `http`, `https` (or the empty scheme) might be
|
||||
* used with implementation specific semantics.
|
||||
*/
|
||||
typeUrl: string;
|
||||
/** Must be a valid serialized protocol buffer of the above specified type. */
|
||||
value: Buffer;
|
||||
}
|
||||
export declare const Any: {
|
||||
fromJSON(object: any): Any;
|
||||
toJSON(message: Any): unknown;
|
||||
};
|
||||
65
node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
generated
vendored
Normal file
65
node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
generated
vendored
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
"use strict";
|
||||
/* eslint-disable */
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Any = void 0;
|
||||
function createBaseAny() {
|
||||
return { typeUrl: "", value: Buffer.alloc(0) };
|
||||
}
|
||||
exports.Any = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "",
|
||||
value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl);
|
||||
message.value !== undefined &&
|
||||
(obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
var tsProtoGlobalThis = (() => {
|
||||
if (typeof globalThis !== "undefined") {
|
||||
return globalThis;
|
||||
}
|
||||
if (typeof self !== "undefined") {
|
||||
return self;
|
||||
}
|
||||
if (typeof window !== "undefined") {
|
||||
return window;
|
||||
}
|
||||
if (typeof global !== "undefined") {
|
||||
return global;
|
||||
}
|
||||
throw "Unable to locate global object";
|
||||
})();
|
||||
function bytesFromBase64(b64) {
|
||||
if (tsProtoGlobalThis.Buffer) {
|
||||
return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
|
||||
}
|
||||
else {
|
||||
const bin = tsProtoGlobalThis.atob(b64);
|
||||
const arr = new Uint8Array(bin.length);
|
||||
for (let i = 0; i < bin.length; ++i) {
|
||||
arr[i] = bin.charCodeAt(i);
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
}
|
||||
function base64FromBytes(arr) {
|
||||
if (tsProtoGlobalThis.Buffer) {
|
||||
return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
|
||||
}
|
||||
else {
|
||||
const bin = [];
|
||||
arr.forEach((byte) => {
|
||||
bin.push(String.fromCharCode(byte));
|
||||
});
|
||||
return tsProtoGlobalThis.btoa(bin.join(""));
|
||||
}
|
||||
}
|
||||
function isSet(value) {
|
||||
return value !== null && value !== undefined;
|
||||
}
|
||||
939
node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.d.ts
generated
vendored
Normal file
939
node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,939 @@
|
|||
/// <reference types="node" />
|
||||
/**
|
||||
* The protocol compiler can output a FileDescriptorSet containing the .proto
|
||||
* files it parses.
|
||||
*/
|
||||
export interface FileDescriptorSet {
|
||||
file: FileDescriptorProto[];
|
||||
}
|
||||
/** Describes a complete .proto file. */
|
||||
export interface FileDescriptorProto {
|
||||
/** file name, relative to root of source tree */
|
||||
name: string;
|
||||
/** e.g. "foo", "foo.bar", etc. */
|
||||
package: string;
|
||||
/** Names of files imported by this file. */
|
||||
dependency: string[];
|
||||
/** Indexes of the public imported files in the dependency list above. */
|
||||
publicDependency: number[];
|
||||
/**
|
||||
* Indexes of the weak imported files in the dependency list.
|
||||
* For Google-internal migration only. Do not use.
|
||||
*/
|
||||
weakDependency: number[];
|
||||
/** All top-level definitions in this file. */
|
||||
messageType: DescriptorProto[];
|
||||
enumType: EnumDescriptorProto[];
|
||||
service: ServiceDescriptorProto[];
|
||||
extension: FieldDescriptorProto[];
|
||||
options: FileOptions | undefined;
|
||||
/**
|
||||
* This field contains optional information about the original source code.
|
||||
* You may safely remove this entire field without harming runtime
|
||||
* functionality of the descriptors -- the information is needed only by
|
||||
* development tools.
|
||||
*/
|
||||
sourceCodeInfo: SourceCodeInfo | undefined;
|
||||
/**
|
||||
* The syntax of the proto file.
|
||||
* The supported values are "proto2" and "proto3".
|
||||
*/
|
||||
syntax: string;
|
||||
}
|
||||
/** Describes a message type. */
|
||||
export interface DescriptorProto {
|
||||
name: string;
|
||||
field: FieldDescriptorProto[];
|
||||
extension: FieldDescriptorProto[];
|
||||
nestedType: DescriptorProto[];
|
||||
enumType: EnumDescriptorProto[];
|
||||
extensionRange: DescriptorProto_ExtensionRange[];
|
||||
oneofDecl: OneofDescriptorProto[];
|
||||
options: MessageOptions | undefined;
|
||||
reservedRange: DescriptorProto_ReservedRange[];
|
||||
/**
|
||||
* Reserved field names, which may not be used by fields in the same message.
|
||||
* A given name may only be reserved once.
|
||||
*/
|
||||
reservedName: string[];
|
||||
}
|
||||
export interface DescriptorProto_ExtensionRange {
|
||||
/** Inclusive. */
|
||||
start: number;
|
||||
/** Exclusive. */
|
||||
end: number;
|
||||
options: ExtensionRangeOptions | undefined;
|
||||
}
|
||||
/**
|
||||
* Range of reserved tag numbers. Reserved tag numbers may not be used by
|
||||
* fields or extension ranges in the same message. Reserved ranges may
|
||||
* not overlap.
|
||||
*/
|
||||
export interface DescriptorProto_ReservedRange {
|
||||
/** Inclusive. */
|
||||
start: number;
|
||||
/** Exclusive. */
|
||||
end: number;
|
||||
}
|
||||
export interface ExtensionRangeOptions {
|
||||
/** The parser stores options it doesn't recognize here. See above. */
|
||||
uninterpretedOption: UninterpretedOption[];
|
||||
}
|
||||
/** Describes a field within a message. */
|
||||
export interface FieldDescriptorProto {
|
||||
name: string;
|
||||
number: number;
|
||||
label: FieldDescriptorProto_Label;
|
||||
/**
|
||||
* If type_name is set, this need not be set. If both this and type_name
|
||||
* are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
|
||||
*/
|
||||
type: FieldDescriptorProto_Type;
|
||||
/**
|
||||
* For message and enum types, this is the name of the type. If the name
|
||||
* starts with a '.', it is fully-qualified. Otherwise, C++-like scoping
|
||||
* rules are used to find the type (i.e. first the nested types within this
|
||||
* message are searched, then within the parent, on up to the root
|
||||
* namespace).
|
||||
*/
|
||||
typeName: string;
|
||||
/**
|
||||
* For extensions, this is the name of the type being extended. It is
|
||||
* resolved in the same manner as type_name.
|
||||
*/
|
||||
extendee: string;
|
||||
/**
|
||||
* For numeric types, contains the original text representation of the value.
|
||||
* For booleans, "true" or "false".
|
||||
* For strings, contains the default text contents (not escaped in any way).
|
||||
* For bytes, contains the C escaped value. All bytes >= 128 are escaped.
|
||||
*/
|
||||
defaultValue: string;
|
||||
/**
|
||||
* If set, gives the index of a oneof in the containing type's oneof_decl
|
||||
* list. This field is a member of that oneof.
|
||||
*/
|
||||
oneofIndex: number;
|
||||
/**
|
||||
* JSON name of this field. The value is set by protocol compiler. If the
|
||||
* user has set a "json_name" option on this field, that option's value
|
||||
* will be used. Otherwise, it's deduced from the field's name by converting
|
||||
* it to camelCase.
|
||||
*/
|
||||
jsonName: string;
|
||||
options: FieldOptions | undefined;
|
||||
/**
|
||||
* If true, this is a proto3 "optional". When a proto3 field is optional, it
|
||||
* tracks presence regardless of field type.
|
||||
*
|
||||
* When proto3_optional is true, this field must be belong to a oneof to
|
||||
* signal to old proto3 clients that presence is tracked for this field. This
|
||||
* oneof is known as a "synthetic" oneof, and this field must be its sole
|
||||
* member (each proto3 optional field gets its own synthetic oneof). Synthetic
|
||||
* oneofs exist in the descriptor only, and do not generate any API. Synthetic
|
||||
* oneofs must be ordered after all "real" oneofs.
|
||||
*
|
||||
* For message fields, proto3_optional doesn't create any semantic change,
|
||||
* since non-repeated message fields always track presence. However it still
|
||||
* indicates the semantic detail of whether the user wrote "optional" or not.
|
||||
* This can be useful for round-tripping the .proto file. For consistency we
|
||||
* give message fields a synthetic oneof also, even though it is not required
|
||||
* to track presence. This is especially important because the parser can't
|
||||
* tell if a field is a message or an enum, so it must always create a
|
||||
* synthetic oneof.
|
||||
*
|
||||
* Proto2 optional fields do not set this flag, because they already indicate
|
||||
* optional with `LABEL_OPTIONAL`.
|
||||
*/
|
||||
proto3Optional: boolean;
|
||||
}
|
||||
export declare enum FieldDescriptorProto_Type {
|
||||
/**
|
||||
* TYPE_DOUBLE - 0 is reserved for errors.
|
||||
* Order is weird for historical reasons.
|
||||
*/
|
||||
TYPE_DOUBLE = 1,
|
||||
TYPE_FLOAT = 2,
|
||||
/**
|
||||
* TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if
|
||||
* negative values are likely.
|
||||
*/
|
||||
TYPE_INT64 = 3,
|
||||
TYPE_UINT64 = 4,
|
||||
/**
|
||||
* TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if
|
||||
* negative values are likely.
|
||||
*/
|
||||
TYPE_INT32 = 5,
|
||||
TYPE_FIXED64 = 6,
|
||||
TYPE_FIXED32 = 7,
|
||||
TYPE_BOOL = 8,
|
||||
TYPE_STRING = 9,
|
||||
/**
|
||||
* TYPE_GROUP - Tag-delimited aggregate.
|
||||
* Group type is deprecated and not supported in proto3. However, Proto3
|
||||
* implementations should still be able to parse the group wire format and
|
||||
* treat group fields as unknown fields.
|
||||
*/
|
||||
TYPE_GROUP = 10,
|
||||
/** TYPE_MESSAGE - Length-delimited aggregate. */
|
||||
TYPE_MESSAGE = 11,
|
||||
/** TYPE_BYTES - New in version 2. */
|
||||
TYPE_BYTES = 12,
|
||||
TYPE_UINT32 = 13,
|
||||
TYPE_ENUM = 14,
|
||||
TYPE_SFIXED32 = 15,
|
||||
TYPE_SFIXED64 = 16,
|
||||
/** TYPE_SINT32 - Uses ZigZag encoding. */
|
||||
TYPE_SINT32 = 17,
|
||||
/** TYPE_SINT64 - Uses ZigZag encoding. */
|
||||
TYPE_SINT64 = 18
|
||||
}
|
||||
export declare function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type;
|
||||
export declare function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string;
|
||||
export declare enum FieldDescriptorProto_Label {
|
||||
/** LABEL_OPTIONAL - 0 is reserved for errors */
|
||||
LABEL_OPTIONAL = 1,
|
||||
LABEL_REQUIRED = 2,
|
||||
LABEL_REPEATED = 3
|
||||
}
|
||||
export declare function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label;
|
||||
export declare function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string;
|
||||
/** Describes a oneof. */
|
||||
export interface OneofDescriptorProto {
|
||||
name: string;
|
||||
options: OneofOptions | undefined;
|
||||
}
|
||||
/** Describes an enum type. */
|
||||
export interface EnumDescriptorProto {
|
||||
name: string;
|
||||
value: EnumValueDescriptorProto[];
|
||||
options: EnumOptions | undefined;
|
||||
/**
|
||||
* Range of reserved numeric values. Reserved numeric values may not be used
|
||||
* by enum values in the same enum declaration. Reserved ranges may not
|
||||
* overlap.
|
||||
*/
|
||||
reservedRange: EnumDescriptorProto_EnumReservedRange[];
|
||||
/**
|
||||
* Reserved enum value names, which may not be reused. A given name may only
|
||||
* be reserved once.
|
||||
*/
|
||||
reservedName: string[];
|
||||
}
|
||||
/**
|
||||
* Range of reserved numeric values. Reserved values may not be used by
|
||||
* entries in the same enum. Reserved ranges may not overlap.
|
||||
*
|
||||
* Note that this is distinct from DescriptorProto.ReservedRange in that it
|
||||
* is inclusive such that it can appropriately represent the entire int32
|
||||
* domain.
|
||||
*/
|
||||
export interface EnumDescriptorProto_EnumReservedRange {
|
||||
/** Inclusive. */
|
||||
start: number;
|
||||
/** Inclusive. */
|
||||
end: number;
|
||||
}
|
||||
/** Describes a value within an enum. */
|
||||
export interface EnumValueDescriptorProto {
|
||||
name: string;
|
||||
number: number;
|
||||
options: EnumValueOptions | undefined;
|
||||
}
|
||||
/** Describes a service. */
|
||||
export interface ServiceDescriptorProto {
|
||||
name: string;
|
||||
method: MethodDescriptorProto[];
|
||||
options: ServiceOptions | undefined;
|
||||
}
|
||||
/** Describes a method of a service. */
|
||||
export interface MethodDescriptorProto {
|
||||
name: string;
|
||||
/**
|
||||
* Input and output type names. These are resolved in the same way as
|
||||
* FieldDescriptorProto.type_name, but must refer to a message type.
|
||||
*/
|
||||
inputType: string;
|
||||
outputType: string;
|
||||
options: MethodOptions | undefined;
|
||||
/** Identifies if client streams multiple client messages */
|
||||
clientStreaming: boolean;
|
||||
/** Identifies if server streams multiple server messages */
|
||||
serverStreaming: boolean;
|
||||
}
|
||||
export interface FileOptions {
|
||||
/**
|
||||
* Sets the Java package where classes generated from this .proto will be
|
||||
* placed. By default, the proto package is used, but this is often
|
||||
* inappropriate because proto packages do not normally start with backwards
|
||||
* domain names.
|
||||
*/
|
||||
javaPackage: string;
|
||||
/**
|
||||
* Controls the name of the wrapper Java class generated for the .proto file.
|
||||
* That class will always contain the .proto file's getDescriptor() method as
|
||||
* well as any top-level extensions defined in the .proto file.
|
||||
* If java_multiple_files is disabled, then all the other classes from the
|
||||
* .proto file will be nested inside the single wrapper outer class.
|
||||
*/
|
||||
javaOuterClassname: string;
|
||||
/**
|
||||
* If enabled, then the Java code generator will generate a separate .java
|
||||
* file for each top-level message, enum, and service defined in the .proto
|
||||
* file. Thus, these types will *not* be nested inside the wrapper class
|
||||
* named by java_outer_classname. However, the wrapper class will still be
|
||||
* generated to contain the file's getDescriptor() method as well as any
|
||||
* top-level extensions defined in the file.
|
||||
*/
|
||||
javaMultipleFiles: boolean;
|
||||
/**
|
||||
* This option does nothing.
|
||||
*
|
||||
* @deprecated
|
||||
*/
|
||||
javaGenerateEqualsAndHash: boolean;
|
||||
/**
|
||||
* If set true, then the Java2 code generator will generate code that
|
||||
* throws an exception whenever an attempt is made to assign a non-UTF-8
|
||||
* byte sequence to a string field.
|
||||
* Message reflection will do the same.
|
||||
* However, an extension field still accepts non-UTF-8 byte sequences.
|
||||
* This option has no effect on when used with the lite runtime.
|
||||
*/
|
||||
javaStringCheckUtf8: boolean;
|
||||
optimizeFor: FileOptions_OptimizeMode;
|
||||
/**
|
||||
* Sets the Go package where structs generated from this .proto will be
|
||||
* placed. If omitted, the Go package will be derived from the following:
|
||||
* - The basename of the package import path, if provided.
|
||||
* - Otherwise, the package statement in the .proto file, if present.
|
||||
* - Otherwise, the basename of the .proto file, without extension.
|
||||
*/
|
||||
goPackage: string;
|
||||
/**
|
||||
* Should generic services be generated in each language? "Generic" services
|
||||
* are not specific to any particular RPC system. They are generated by the
|
||||
* main code generators in each language (without additional plugins).
|
||||
* Generic services were the only kind of service generation supported by
|
||||
* early versions of google.protobuf.
|
||||
*
|
||||
* Generic services are now considered deprecated in favor of using plugins
|
||||
* that generate code specific to your particular RPC system. Therefore,
|
||||
* these default to false. Old code which depends on generic services should
|
||||
* explicitly set them to true.
|
||||
*/
|
||||
ccGenericServices: boolean;
|
||||
javaGenericServices: boolean;
|
||||
pyGenericServices: boolean;
|
||||
phpGenericServices: boolean;
|
||||
/**
|
||||
* Is this file deprecated?
|
||||
* Depending on the target platform, this can emit Deprecated annotations
|
||||
* for everything in the file, or it will be completely ignored; in the very
|
||||
* least, this is a formalization for deprecating files.
|
||||
*/
|
||||
deprecated: boolean;
|
||||
/**
|
||||
* Enables the use of arenas for the proto messages in this file. This applies
|
||||
* only to generated classes for C++.
|
||||
*/
|
||||
ccEnableArenas: boolean;
|
||||
/**
|
||||
* Sets the objective c class prefix which is prepended to all objective c
|
||||
* generated classes from this .proto. There is no default.
|
||||
*/
|
||||
objcClassPrefix: string;
|
||||
/** Namespace for generated classes; defaults to the package. */
|
||||
csharpNamespace: string;
|
||||
/**
|
||||
* By default Swift generators will take the proto package and CamelCase it
|
||||
* replacing '.' with underscore and use that to prefix the types/symbols
|
||||
* defined. When this options is provided, they will use this value instead
|
||||
* to prefix the types/symbols defined.
|
||||
*/
|
||||
swiftPrefix: string;
|
||||
/**
|
||||
* Sets the php class prefix which is prepended to all php generated classes
|
||||
* from this .proto. Default is empty.
|
||||
*/
|
||||
phpClassPrefix: string;
|
||||
/**
|
||||
* Use this option to change the namespace of php generated classes. Default
|
||||
* is empty. When this option is empty, the package name will be used for
|
||||
* determining the namespace.
|
||||
*/
|
||||
phpNamespace: string;
|
||||
/**
|
||||
* Use this option to change the namespace of php generated metadata classes.
|
||||
* Default is empty. When this option is empty, the proto file name will be
|
||||
* used for determining the namespace.
|
||||
*/
|
||||
phpMetadataNamespace: string;
|
||||
/**
|
||||
* Use this option to change the package of ruby generated classes. Default
|
||||
* is empty. When this option is not set, the package name will be used for
|
||||
* determining the ruby package.
|
||||
*/
|
||||
rubyPackage: string;
|
||||
/**
|
||||
* The parser stores options it doesn't recognize here.
|
||||
* See the documentation for the "Options" section above.
|
||||
*/
|
||||
uninterpretedOption: UninterpretedOption[];
|
||||
}
|
||||
/** Generated classes can be optimized for speed or code size. */
|
||||
export declare enum FileOptions_OptimizeMode {
|
||||
/** SPEED - Generate complete code for parsing, serialization, */
|
||||
SPEED = 1,
|
||||
/** CODE_SIZE - etc. */
|
||||
CODE_SIZE = 2,
|
||||
/** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
|
||||
LITE_RUNTIME = 3
|
||||
}
|
||||
export declare function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode;
|
||||
export declare function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string;
|
||||
export interface MessageOptions {
|
||||
/**
|
||||
* Set true to use the old proto1 MessageSet wire format for extensions.
|
||||
* This is provided for backwards-compatibility with the MessageSet wire
|
||||
* format. You should not use this for any other reason: It's less
|
||||
* efficient, has fewer features, and is more complicated.
|
||||
*
|
||||
* The message must be defined exactly as follows:
|
||||
* message Foo {
|
||||
* option message_set_wire_format = true;
|
||||
* extensions 4 to max;
|
||||
* }
|
||||
* Note that the message cannot have any defined fields; MessageSets only
|
||||
* have extensions.
|
||||
*
|
||||
* All extensions of your type must be singular messages; e.g. they cannot
|
||||
* be int32s, enums, or repeated messages.
|
||||
*
|
||||
* Because this is an option, the above two restrictions are not enforced by
|
||||
* the protocol compiler.
|
||||
*/
|
||||
messageSetWireFormat: boolean;
|
||||
/**
|
||||
* Disables the generation of the standard "descriptor()" accessor, which can
|
||||
* conflict with a field of the same name. This is meant to make migration
|
||||
* from proto1 easier; new code should avoid fields named "descriptor".
|
||||
*/
|
||||
noStandardDescriptorAccessor: boolean;
|
||||
/**
|
||||
* Is this message deprecated?
|
||||
* Depending on the target platform, this can emit Deprecated annotations
|
||||
* for the message, or it will be completely ignored; in the very least,
|
||||
* this is a formalization for deprecating messages.
|
||||
*/
|
||||
deprecated: boolean;
|
||||
/**
|
||||
* Whether the message is an automatically generated map entry type for the
|
||||
* maps field.
|
||||
*
|
||||
* For maps fields:
|
||||
* map<KeyType, ValueType> map_field = 1;
|
||||
* The parsed descriptor looks like:
|
||||
* message MapFieldEntry {
|
||||
* option map_entry = true;
|
||||
* optional KeyType key = 1;
|
||||
* optional ValueType value = 2;
|
||||
* }
|
||||
* repeated MapFieldEntry map_field = 1;
|
||||
*
|
||||
* Implementations may choose not to generate the map_entry=true message, but
|
||||
* use a native map in the target language to hold the keys and values.
|
||||
* The reflection APIs in such implementations still need to work as
|
||||
* if the field is a repeated message field.
|
||||
*
|
||||
* NOTE: Do not set the option in .proto files. Always use the maps syntax
|
||||
* instead. The option should only be implicitly set by the proto compiler
|
||||
* parser.
|
||||
*/
|
||||
mapEntry: boolean;
|
||||
/** The parser stores options it doesn't recognize here. See above. */
|
||||
uninterpretedOption: UninterpretedOption[];
|
||||
}
|
||||
export interface FieldOptions {
|
||||
/**
|
||||
* The ctype option instructs the C++ code generator to use a different
|
||||
* representation of the field than it normally would. See the specific
|
||||
* options below. This option is not yet implemented in the open source
|
||||
* release -- sorry, we'll try to include it in a future version!
|
||||
*/
|
||||
ctype: FieldOptions_CType;
|
||||
/**
|
||||
* The packed option can be enabled for repeated primitive fields to enable
|
||||
* a more efficient representation on the wire. Rather than repeatedly
|
||||
* writing the tag and type for each element, the entire array is encoded as
|
||||
* a single length-delimited blob. In proto3, only explicit setting it to
|
||||
* false will avoid using packed encoding.
|
||||
*/
|
||||
packed: boolean;
|
||||
/**
|
||||
* The jstype option determines the JavaScript type used for values of the
|
||||
* field. The option is permitted only for 64 bit integral and fixed types
|
||||
* (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING
|
||||
* is represented as JavaScript string, which avoids loss of precision that
|
||||
* can happen when a large value is converted to a floating point JavaScript.
|
||||
* Specifying JS_NUMBER for the jstype causes the generated JavaScript code to
|
||||
* use the JavaScript "number" type. The behavior of the default option
|
||||
* JS_NORMAL is implementation dependent.
|
||||
*
|
||||
* This option is an enum to permit additional types to be added, e.g.
|
||||
* goog.math.Integer.
|
||||
*/
|
||||
jstype: FieldOptions_JSType;
|
||||
/**
|
||||
* Should this field be parsed lazily? Lazy applies only to message-type
|
||||
* fields. It means that when the outer message is initially parsed, the
|
||||
* inner message's contents will not be parsed but instead stored in encoded
|
||||
* form. The inner message will actually be parsed when it is first accessed.
|
||||
*
|
||||
* This is only a hint. Implementations are free to choose whether to use
|
||||
* eager or lazy parsing regardless of the value of this option. However,
|
||||
* setting this option true suggests that the protocol author believes that
|
||||
* using lazy parsing on this field is worth the additional bookkeeping
|
||||
* overhead typically needed to implement it.
|
||||
*
|
||||
* This option does not affect the public interface of any generated code;
|
||||
* all method signatures remain the same. Furthermore, thread-safety of the
|
||||
* interface is not affected by this option; const methods remain safe to
|
||||
* call from multiple threads concurrently, while non-const methods continue
|
||||
* to require exclusive access.
|
||||
*
|
||||
* Note that implementations may choose not to check required fields within
|
||||
* a lazy sub-message. That is, calling IsInitialized() on the outer message
|
||||
* may return true even if the inner message has missing required fields.
|
||||
* This is necessary because otherwise the inner message would have to be
|
||||
* parsed in order to perform the check, defeating the purpose of lazy
|
||||
* parsing. An implementation which chooses not to check required fields
|
||||
* must be consistent about it. That is, for any particular sub-message, the
|
||||
* implementation must either *always* check its required fields, or *never*
|
||||
* check its required fields, regardless of whether or not the message has
|
||||
* been parsed.
|
||||
*
|
||||
* As of 2021, lazy does no correctness checks on the byte stream during
|
||||
* parsing. This may lead to crashes if and when an invalid byte stream is
|
||||
* finally parsed upon access.
|
||||
*
|
||||
* TODO(b/211906113): Enable validation on lazy fields.
|
||||
*/
|
||||
lazy: boolean;
|
||||
/**
|
||||
* unverified_lazy does no correctness checks on the byte stream. This should
|
||||
* only be used where lazy with verification is prohibitive for performance
|
||||
* reasons.
|
||||
*/
|
||||
unverifiedLazy: boolean;
|
||||
/**
|
||||
* Is this field deprecated?
|
||||
* Depending on the target platform, this can emit Deprecated annotations
|
||||
* for accessors, or it will be completely ignored; in the very least, this
|
||||
* is a formalization for deprecating fields.
|
||||
*/
|
||||
deprecated: boolean;
|
||||
/** For Google-internal migration only. Do not use. */
|
||||
weak: boolean;
|
||||
/** The parser stores options it doesn't recognize here. See above. */
|
||||
uninterpretedOption: UninterpretedOption[];
|
||||
}
|
||||
export declare enum FieldOptions_CType {
|
||||
/** STRING - Default mode. */
|
||||
STRING = 0,
|
||||
CORD = 1,
|
||||
STRING_PIECE = 2
|
||||
}
|
||||
export declare function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType;
|
||||
export declare function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string;
|
||||
export declare enum FieldOptions_JSType {
|
||||
/** JS_NORMAL - Use the default type. */
|
||||
JS_NORMAL = 0,
|
||||
/** JS_STRING - Use JavaScript strings. */
|
||||
JS_STRING = 1,
|
||||
/** JS_NUMBER - Use JavaScript numbers. */
|
||||
JS_NUMBER = 2
|
||||
}
|
||||
export declare function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType;
|
||||
export declare function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string;
|
||||
export interface OneofOptions {
|
||||
/** The parser stores options it doesn't recognize here. See above. */
|
||||
uninterpretedOption: UninterpretedOption[];
|
||||
}
|
||||
export interface EnumOptions {
|
||||
/**
|
||||
* Set this option to true to allow mapping different tag names to the same
|
||||
* value.
|
||||
*/
|
||||
allowAlias: boolean;
|
||||
/**
|
||||
* Is this enum deprecated?
|
||||
* Depending on the target platform, this can emit Deprecated annotations
|
||||
* for the enum, or it will be completely ignored; in the very least, this
|
||||
* is a formalization for deprecating enums.
|
||||
*/
|
||||
deprecated: boolean;
|
||||
/** The parser stores options it doesn't recognize here. See above. */
|
||||
uninterpretedOption: UninterpretedOption[];
|
||||
}
|
||||
export interface EnumValueOptions {
|
||||
/**
|
||||
* Is this enum value deprecated?
|
||||
* Depending on the target platform, this can emit Deprecated annotations
|
||||
* for the enum value, or it will be completely ignored; in the very least,
|
||||
* this is a formalization for deprecating enum values.
|
||||
*/
|
||||
deprecated: boolean;
|
||||
/** The parser stores options it doesn't recognize here. See above. */
|
||||
uninterpretedOption: UninterpretedOption[];
|
||||
}
|
||||
export interface ServiceOptions {
|
||||
/**
|
||||
* Is this service deprecated?
|
||||
* Depending on the target platform, this can emit Deprecated annotations
|
||||
* for the service, or it will be completely ignored; in the very least,
|
||||
* this is a formalization for deprecating services.
|
||||
*/
|
||||
deprecated: boolean;
|
||||
/** The parser stores options it doesn't recognize here. See above. */
|
||||
uninterpretedOption: UninterpretedOption[];
|
||||
}
|
||||
export interface MethodOptions {
|
||||
/**
|
||||
* Is this method deprecated?
|
||||
* Depending on the target platform, this can emit Deprecated annotations
|
||||
* for the method, or it will be completely ignored; in the very least,
|
||||
* this is a formalization for deprecating methods.
|
||||
*/
|
||||
deprecated: boolean;
|
||||
idempotencyLevel: MethodOptions_IdempotencyLevel;
|
||||
/** The parser stores options it doesn't recognize here. See above. */
|
||||
uninterpretedOption: UninterpretedOption[];
|
||||
}
|
||||
/**
|
||||
* Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
|
||||
* or neither? HTTP based RPC implementation may choose GET verb for safe
|
||||
* methods, and PUT verb for idempotent methods instead of the default POST.
|
||||
*/
|
||||
export declare enum MethodOptions_IdempotencyLevel {
|
||||
IDEMPOTENCY_UNKNOWN = 0,
|
||||
/** NO_SIDE_EFFECTS - implies idempotent */
|
||||
NO_SIDE_EFFECTS = 1,
|
||||
/** IDEMPOTENT - idempotent, but may have side effects */
|
||||
IDEMPOTENT = 2
|
||||
}
|
||||
export declare function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel;
|
||||
export declare function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string;
|
||||
/**
|
||||
* A message representing a option the parser does not recognize. This only
|
||||
* appears in options protos created by the compiler::Parser class.
|
||||
* DescriptorPool resolves these when building Descriptor objects. Therefore,
|
||||
* options protos in descriptor objects (e.g. returned by Descriptor::options(),
|
||||
* or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
|
||||
* in them.
|
||||
*/
|
||||
export interface UninterpretedOption {
|
||||
name: UninterpretedOption_NamePart[];
|
||||
/**
|
||||
* The value of the uninterpreted option, in whatever type the tokenizer
|
||||
* identified it as during parsing. Exactly one of these should be set.
|
||||
*/
|
||||
identifierValue: string;
|
||||
positiveIntValue: string;
|
||||
negativeIntValue: string;
|
||||
doubleValue: number;
|
||||
stringValue: Buffer;
|
||||
aggregateValue: string;
|
||||
}
|
||||
/**
|
||||
* The name of the uninterpreted option. Each string represents a segment in
|
||||
* a dot-separated name. is_extension is true iff a segment represents an
|
||||
* extension (denoted with parentheses in options specs in .proto files).
|
||||
* E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents
|
||||
* "foo.(bar.baz).moo".
|
||||
*/
|
||||
export interface UninterpretedOption_NamePart {
|
||||
namePart: string;
|
||||
isExtension: boolean;
|
||||
}
|
||||
/**
|
||||
* Encapsulates information about the original source file from which a
|
||||
* FileDescriptorProto was generated.
|
||||
*/
|
||||
export interface SourceCodeInfo {
|
||||
/**
|
||||
* A Location identifies a piece of source code in a .proto file which
|
||||
* corresponds to a particular definition. This information is intended
|
||||
* to be useful to IDEs, code indexers, documentation generators, and similar
|
||||
* tools.
|
||||
*
|
||||
* For example, say we have a file like:
|
||||
* message Foo {
|
||||
* optional string foo = 1;
|
||||
* }
|
||||
* Let's look at just the field definition:
|
||||
* optional string foo = 1;
|
||||
* ^ ^^ ^^ ^ ^^^
|
||||
* a bc de f ghi
|
||||
* We have the following locations:
|
||||
* span path represents
|
||||
* [a,i) [ 4, 0, 2, 0 ] The whole field definition.
|
||||
* [a,b) [ 4, 0, 2, 0, 4 ] The label (optional).
|
||||
* [c,d) [ 4, 0, 2, 0, 5 ] The type (string).
|
||||
* [e,f) [ 4, 0, 2, 0, 1 ] The name (foo).
|
||||
* [g,h) [ 4, 0, 2, 0, 3 ] The number (1).
|
||||
*
|
||||
* Notes:
|
||||
* - A location may refer to a repeated field itself (i.e. not to any
|
||||
* particular index within it). This is used whenever a set of elements are
|
||||
* logically enclosed in a single code segment. For example, an entire
|
||||
* extend block (possibly containing multiple extension definitions) will
|
||||
* have an outer location whose path refers to the "extensions" repeated
|
||||
* field without an index.
|
||||
* - Multiple locations may have the same path. This happens when a single
|
||||
* logical declaration is spread out across multiple places. The most
|
||||
* obvious example is the "extend" block again -- there may be multiple
|
||||
* extend blocks in the same scope, each of which will have the same path.
|
||||
* - A location's span is not always a subset of its parent's span. For
|
||||
* example, the "extendee" of an extension declaration appears at the
|
||||
* beginning of the "extend" block and is shared by all extensions within
|
||||
* the block.
|
||||
* - Just because a location's span is a subset of some other location's span
|
||||
* does not mean that it is a descendant. For example, a "group" defines
|
||||
* both a type and a field in a single declaration. Thus, the locations
|
||||
* corresponding to the type and field and their components will overlap.
|
||||
* - Code which tries to interpret locations should probably be designed to
|
||||
* ignore those that it doesn't understand, as more types of locations could
|
||||
* be recorded in the future.
|
||||
*/
|
||||
location: SourceCodeInfo_Location[];
|
||||
}
|
||||
export interface SourceCodeInfo_Location {
|
||||
/**
|
||||
* Identifies which part of the FileDescriptorProto was defined at this
|
||||
* location.
|
||||
*
|
||||
* Each element is a field number or an index. They form a path from
|
||||
* the root FileDescriptorProto to the place where the definition occurs.
|
||||
* For example, this path:
|
||||
* [ 4, 3, 2, 7, 1 ]
|
||||
* refers to:
|
||||
* file.message_type(3) // 4, 3
|
||||
* .field(7) // 2, 7
|
||||
* .name() // 1
|
||||
* This is because FileDescriptorProto.message_type has field number 4:
|
||||
* repeated DescriptorProto message_type = 4;
|
||||
* and DescriptorProto.field has field number 2:
|
||||
* repeated FieldDescriptorProto field = 2;
|
||||
* and FieldDescriptorProto.name has field number 1:
|
||||
* optional string name = 1;
|
||||
*
|
||||
* Thus, the above path gives the location of a field name. If we removed
|
||||
* the last element:
|
||||
* [ 4, 3, 2, 7 ]
|
||||
* this path refers to the whole field declaration (from the beginning
|
||||
* of the label to the terminating semicolon).
|
||||
*/
|
||||
path: number[];
|
||||
/**
|
||||
* Always has exactly three or four elements: start line, start column,
|
||||
* end line (optional, otherwise assumed same as start line), end column.
|
||||
* These are packed into a single field for efficiency. Note that line
|
||||
* and column numbers are zero-based -- typically you will want to add
|
||||
* 1 to each before displaying to a user.
|
||||
*/
|
||||
span: number[];
|
||||
/**
|
||||
* If this SourceCodeInfo represents a complete declaration, these are any
|
||||
* comments appearing before and after the declaration which appear to be
|
||||
* attached to the declaration.
|
||||
*
|
||||
* A series of line comments appearing on consecutive lines, with no other
|
||||
* tokens appearing on those lines, will be treated as a single comment.
|
||||
*
|
||||
* leading_detached_comments will keep paragraphs of comments that appear
|
||||
* before (but not connected to) the current element. Each paragraph,
|
||||
* separated by empty lines, will be one comment element in the repeated
|
||||
* field.
|
||||
*
|
||||
* Only the comment content is provided; comment markers (e.g. //) are
|
||||
* stripped out. For block comments, leading whitespace and an asterisk
|
||||
* will be stripped from the beginning of each line other than the first.
|
||||
* Newlines are included in the output.
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* optional int32 foo = 1; // Comment attached to foo.
|
||||
* // Comment attached to bar.
|
||||
* optional int32 bar = 2;
|
||||
*
|
||||
* optional string baz = 3;
|
||||
* // Comment attached to baz.
|
||||
* // Another line attached to baz.
|
||||
*
|
||||
* // Comment attached to moo.
|
||||
* //
|
||||
* // Another line attached to moo.
|
||||
* optional double moo = 4;
|
||||
*
|
||||
* // Detached comment for corge. This is not leading or trailing comments
|
||||
* // to moo or corge because there are blank lines separating it from
|
||||
* // both.
|
||||
*
|
||||
* // Detached comment for corge paragraph 2.
|
||||
*
|
||||
* optional string corge = 5;
|
||||
* /* Block comment attached
|
||||
* * to corge. Leading asterisks
|
||||
* * will be removed. * /
|
||||
* /* Block comment attached to
|
||||
* * grault. * /
|
||||
* optional int32 grault = 6;
|
||||
*
|
||||
* // ignored detached comments.
|
||||
*/
|
||||
leadingComments: string;
|
||||
trailingComments: string;
|
||||
leadingDetachedComments: string[];
|
||||
}
|
||||
/**
|
||||
* Describes the relationship between generated code and its original source
|
||||
* file. A GeneratedCodeInfo message is associated with only one generated
|
||||
* source file, but may contain references to different source .proto files.
|
||||
*/
|
||||
export interface GeneratedCodeInfo {
|
||||
/**
|
||||
* An Annotation connects some span of text in generated code to an element
|
||||
* of its generating .proto file.
|
||||
*/
|
||||
annotation: GeneratedCodeInfo_Annotation[];
|
||||
}
|
||||
export interface GeneratedCodeInfo_Annotation {
|
||||
/**
|
||||
* Identifies the element in the original source .proto file. This field
|
||||
* is formatted the same as SourceCodeInfo.Location.path.
|
||||
*/
|
||||
path: number[];
|
||||
/** Identifies the filesystem path to the original source .proto. */
|
||||
sourceFile: string;
|
||||
/**
|
||||
* Identifies the starting offset in bytes in the generated code
|
||||
* that relates to the identified object.
|
||||
*/
|
||||
begin: number;
|
||||
/**
|
||||
* Identifies the ending offset in bytes in the generated code that
|
||||
* relates to the identified offset. The end offset should be one past
|
||||
* the last relevant byte (so the length of the text = end - begin).
|
||||
*/
|
||||
end: number;
|
||||
}
|
||||
export declare const FileDescriptorSet: {
|
||||
fromJSON(object: any): FileDescriptorSet;
|
||||
toJSON(message: FileDescriptorSet): unknown;
|
||||
};
|
||||
export declare const FileDescriptorProto: {
|
||||
fromJSON(object: any): FileDescriptorProto;
|
||||
toJSON(message: FileDescriptorProto): unknown;
|
||||
};
|
||||
export declare const DescriptorProto: {
|
||||
fromJSON(object: any): DescriptorProto;
|
||||
toJSON(message: DescriptorProto): unknown;
|
||||
};
|
||||
export declare const DescriptorProto_ExtensionRange: {
|
||||
fromJSON(object: any): DescriptorProto_ExtensionRange;
|
||||
toJSON(message: DescriptorProto_ExtensionRange): unknown;
|
||||
};
|
||||
export declare const DescriptorProto_ReservedRange: {
|
||||
fromJSON(object: any): DescriptorProto_ReservedRange;
|
||||
toJSON(message: DescriptorProto_ReservedRange): unknown;
|
||||
};
|
||||
export declare const ExtensionRangeOptions: {
|
||||
fromJSON(object: any): ExtensionRangeOptions;
|
||||
toJSON(message: ExtensionRangeOptions): unknown;
|
||||
};
|
||||
export declare const FieldDescriptorProto: {
|
||||
fromJSON(object: any): FieldDescriptorProto;
|
||||
toJSON(message: FieldDescriptorProto): unknown;
|
||||
};
|
||||
export declare const OneofDescriptorProto: {
|
||||
fromJSON(object: any): OneofDescriptorProto;
|
||||
toJSON(message: OneofDescriptorProto): unknown;
|
||||
};
|
||||
export declare const EnumDescriptorProto: {
|
||||
fromJSON(object: any): EnumDescriptorProto;
|
||||
toJSON(message: EnumDescriptorProto): unknown;
|
||||
};
|
||||
export declare const EnumDescriptorProto_EnumReservedRange: {
|
||||
fromJSON(object: any): EnumDescriptorProto_EnumReservedRange;
|
||||
toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown;
|
||||
};
|
||||
export declare const EnumValueDescriptorProto: {
|
||||
fromJSON(object: any): EnumValueDescriptorProto;
|
||||
toJSON(message: EnumValueDescriptorProto): unknown;
|
||||
};
|
||||
export declare const ServiceDescriptorProto: {
|
||||
fromJSON(object: any): ServiceDescriptorProto;
|
||||
toJSON(message: ServiceDescriptorProto): unknown;
|
||||
};
|
||||
export declare const MethodDescriptorProto: {
|
||||
fromJSON(object: any): MethodDescriptorProto;
|
||||
toJSON(message: MethodDescriptorProto): unknown;
|
||||
};
|
||||
export declare const FileOptions: {
|
||||
fromJSON(object: any): FileOptions;
|
||||
toJSON(message: FileOptions): unknown;
|
||||
};
|
||||
export declare const MessageOptions: {
|
||||
fromJSON(object: any): MessageOptions;
|
||||
toJSON(message: MessageOptions): unknown;
|
||||
};
|
||||
export declare const FieldOptions: {
|
||||
fromJSON(object: any): FieldOptions;
|
||||
toJSON(message: FieldOptions): unknown;
|
||||
};
|
||||
export declare const OneofOptions: {
|
||||
fromJSON(object: any): OneofOptions;
|
||||
toJSON(message: OneofOptions): unknown;
|
||||
};
|
||||
export declare const EnumOptions: {
|
||||
fromJSON(object: any): EnumOptions;
|
||||
toJSON(message: EnumOptions): unknown;
|
||||
};
|
||||
export declare const EnumValueOptions: {
|
||||
fromJSON(object: any): EnumValueOptions;
|
||||
toJSON(message: EnumValueOptions): unknown;
|
||||
};
|
||||
export declare const ServiceOptions: {
|
||||
fromJSON(object: any): ServiceOptions;
|
||||
toJSON(message: ServiceOptions): unknown;
|
||||
};
|
||||
export declare const MethodOptions: {
|
||||
fromJSON(object: any): MethodOptions;
|
||||
toJSON(message: MethodOptions): unknown;
|
||||
};
|
||||
export declare const UninterpretedOption: {
|
||||
fromJSON(object: any): UninterpretedOption;
|
||||
toJSON(message: UninterpretedOption): unknown;
|
||||
};
|
||||
export declare const UninterpretedOption_NamePart: {
|
||||
fromJSON(object: any): UninterpretedOption_NamePart;
|
||||
toJSON(message: UninterpretedOption_NamePart): unknown;
|
||||
};
|
||||
export declare const SourceCodeInfo: {
|
||||
fromJSON(object: any): SourceCodeInfo;
|
||||
toJSON(message: SourceCodeInfo): unknown;
|
||||
};
|
||||
export declare const SourceCodeInfo_Location: {
|
||||
fromJSON(object: any): SourceCodeInfo_Location;
|
||||
toJSON(message: SourceCodeInfo_Location): unknown;
|
||||
};
|
||||
export declare const GeneratedCodeInfo: {
|
||||
fromJSON(object: any): GeneratedCodeInfo;
|
||||
toJSON(message: GeneratedCodeInfo): unknown;
|
||||
};
|
||||
export declare const GeneratedCodeInfo_Annotation: {
|
||||
fromJSON(object: any): GeneratedCodeInfo_Annotation;
|
||||
toJSON(message: GeneratedCodeInfo_Annotation): unknown;
|
||||
};
|
||||
1308
node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
generated
vendored
Normal file
1308
node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
110
node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.d.ts
generated
vendored
Normal file
110
node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,110 @@
|
|||
/**
|
||||
* A Timestamp represents a point in time independent of any time zone or local
|
||||
* calendar, encoded as a count of seconds and fractions of seconds at
|
||||
* nanosecond resolution. The count is relative to an epoch at UTC midnight on
|
||||
* January 1, 1970, in the proleptic Gregorian calendar which extends the
|
||||
* Gregorian calendar backwards to year one.
|
||||
*
|
||||
* All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
|
||||
* second table is needed for interpretation, using a [24-hour linear
|
||||
* smear](https://developers.google.com/time/smear).
|
||||
*
|
||||
* The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
|
||||
* restricting to that range, we ensure that we can convert to and from [RFC
|
||||
* 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
|
||||
*
|
||||
* # Examples
|
||||
*
|
||||
* Example 1: Compute Timestamp from POSIX `time()`.
|
||||
*
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds(time(NULL));
|
||||
* timestamp.set_nanos(0);
|
||||
*
|
||||
* Example 2: Compute Timestamp from POSIX `gettimeofday()`.
|
||||
*
|
||||
* struct timeval tv;
|
||||
* gettimeofday(&tv, NULL);
|
||||
*
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds(tv.tv_sec);
|
||||
* timestamp.set_nanos(tv.tv_usec * 1000);
|
||||
*
|
||||
* Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
|
||||
*
|
||||
* FILETIME ft;
|
||||
* GetSystemTimeAsFileTime(&ft);
|
||||
* UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
|
||||
*
|
||||
* // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
|
||||
* // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
|
||||
* timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
|
||||
*
|
||||
* Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
|
||||
*
|
||||
* long millis = System.currentTimeMillis();
|
||||
*
|
||||
* Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
|
||||
* .setNanos((int) ((millis % 1000) * 1000000)).build();
|
||||
*
|
||||
* Example 5: Compute Timestamp from Java `Instant.now()`.
|
||||
*
|
||||
* Instant now = Instant.now();
|
||||
*
|
||||
* Timestamp timestamp =
|
||||
* Timestamp.newBuilder().setSeconds(now.getEpochSecond())
|
||||
* .setNanos(now.getNano()).build();
|
||||
*
|
||||
* Example 6: Compute Timestamp from current time in Python.
|
||||
*
|
||||
* timestamp = Timestamp()
|
||||
* timestamp.GetCurrentTime()
|
||||
*
|
||||
* # JSON Mapping
|
||||
*
|
||||
* In JSON format, the Timestamp type is encoded as a string in the
|
||||
* [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
|
||||
* format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
|
||||
* where {year} is always expressed using four digits while {month}, {day},
|
||||
* {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
|
||||
* seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
|
||||
* are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
|
||||
* is required. A proto3 JSON serializer should always use UTC (as indicated by
|
||||
* "Z") when printing the Timestamp type and a proto3 JSON parser should be
|
||||
* able to accept both UTC and other timezones (as indicated by an offset).
|
||||
*
|
||||
* For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
|
||||
* 01:30 UTC on January 15, 2017.
|
||||
*
|
||||
* In JavaScript, one can convert a Date object to this format using the
|
||||
* standard
|
||||
* [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
|
||||
* method. In Python, a standard `datetime.datetime` object can be converted
|
||||
* to this format using
|
||||
* [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
|
||||
* the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
|
||||
* the Joda Time's [`ISODateTimeFormat.dateTime()`](
|
||||
* http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
|
||||
* ) to obtain a formatter capable of generating timestamps in this format.
|
||||
*/
|
||||
export interface Timestamp {
|
||||
/**
|
||||
* Represents seconds of UTC time since Unix epoch
|
||||
* 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
|
||||
* 9999-12-31T23:59:59Z inclusive.
|
||||
*/
|
||||
seconds: string;
|
||||
/**
|
||||
* Non-negative fractions of a second at nanosecond resolution. Negative
|
||||
* second values with fractions must still have non-negative nanos values
|
||||
* that count forward in time. Must be from 0 to 999,999,999
|
||||
* inclusive.
|
||||
*/
|
||||
nanos: number;
|
||||
}
|
||||
export declare const Timestamp: {
|
||||
fromJSON(object: any): Timestamp;
|
||||
toJSON(message: Timestamp): unknown;
|
||||
};
|
||||
24
node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
generated
vendored
Normal file
24
node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
generated
vendored
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
"use strict";
|
||||
/* eslint-disable */
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Timestamp = void 0;
|
||||
function createBaseTimestamp() {
|
||||
return { seconds: "0", nanos: 0 };
|
||||
}
|
||||
exports.Timestamp = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
seconds: isSet(object.seconds) ? String(object.seconds) : "0",
|
||||
nanos: isSet(object.nanos) ? Number(object.nanos) : 0,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.seconds !== undefined && (obj.seconds = message.seconds);
|
||||
message.nanos !== undefined && (obj.nanos = Math.round(message.nanos));
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function isSet(value) {
|
||||
return value !== null && value !== undefined;
|
||||
}
|
||||
99
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.d.ts
generated
vendored
Normal file
99
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
import { Envelope } from "./envelope";
|
||||
import { MessageSignature, PublicKeyIdentifier, RFC3161SignedTimestamp, X509Certificate, X509CertificateChain } from "./sigstore_common";
|
||||
import { TransparencyLogEntry } from "./sigstore_rekor";
|
||||
/**
|
||||
* Various timestamped counter signatures over the artifacts signature.
|
||||
* Currently only RFC3161 signatures are provided. More formats may be added
|
||||
* in the future.
|
||||
*/
|
||||
export interface TimestampVerificationData {
|
||||
/**
|
||||
* A list of RFC3161 signed timestamps provided by the user.
|
||||
* This can be used when the entry has not been stored on a
|
||||
* transparency log, or in conjunction for a stronger trust model.
|
||||
* Clients MUST verify the hashed message in the message imprint
|
||||
* against the signature in the bundle.
|
||||
*/
|
||||
rfc3161Timestamps: RFC3161SignedTimestamp[];
|
||||
}
|
||||
/**
|
||||
* VerificationMaterial captures details on the materials used to verify
|
||||
* signatures. This message may be embedded in a DSSE envelope as a signature
|
||||
* extension. Specifically, the `ext` field of the extension will expect this
|
||||
* message when the signature extension is for Sigstore. This is identified by
|
||||
* the `kind` field in the extension, which must be set to
|
||||
* application/vnd.dev.sigstore.verificationmaterial;version=0.1 for Sigstore.
|
||||
* When used as a DSSE extension, if the `public_key` field is used to indicate
|
||||
* the key identifier, it MUST match the `keyid` field of the signature the
|
||||
* extension is attached to.
|
||||
*/
|
||||
export interface VerificationMaterial {
|
||||
content?: {
|
||||
$case: "publicKey";
|
||||
publicKey: PublicKeyIdentifier;
|
||||
} | {
|
||||
$case: "x509CertificateChain";
|
||||
x509CertificateChain: X509CertificateChain;
|
||||
} | {
|
||||
$case: "certificate";
|
||||
certificate: X509Certificate;
|
||||
};
|
||||
/**
|
||||
* An inclusion proof and an optional signed timestamp from the log.
|
||||
* Client verification libraries MAY provide an option to support v0.1
|
||||
* bundles for backwards compatibility, which may contain an inclusion
|
||||
* promise and not an inclusion proof. In this case, the client MUST
|
||||
* validate the promise.
|
||||
* Verifiers SHOULD NOT allow v0.1 bundles if they're used in an
|
||||
* ecosystem which never produced them.
|
||||
*/
|
||||
tlogEntries: TransparencyLogEntry[];
|
||||
/**
|
||||
* Timestamp may also come from
|
||||
* tlog_entries.inclusion_promise.signed_entry_timestamp.
|
||||
*/
|
||||
timestampVerificationData: TimestampVerificationData | undefined;
|
||||
}
|
||||
export interface Bundle {
|
||||
/**
|
||||
* MUST be application/vnd.dev.sigstore.bundle.v0.3+json when
|
||||
* when encoded as JSON.
|
||||
* Clients must to be able to accept media type using the previously
|
||||
* defined formats:
|
||||
* * application/vnd.dev.sigstore.bundle+json;version=0.1
|
||||
* * application/vnd.dev.sigstore.bundle+json;version=0.2
|
||||
* * application/vnd.dev.sigstore.bundle+json;version=0.3
|
||||
*/
|
||||
mediaType: string;
|
||||
/**
|
||||
* When a signer is identified by a X.509 certificate, a verifier MUST
|
||||
* verify that the signature was computed at the time the certificate
|
||||
* was valid as described in the Sigstore client spec: "Verification
|
||||
* using a Bundle".
|
||||
* <https://docs.google.com/document/d/1kbhK2qyPPk8SLavHzYSDM8-Ueul9_oxIMVFuWMWKz0E/edit#heading=h.x8bduppe89ln>
|
||||
* If the verification material contains a public key identifier
|
||||
* (key hint) and the `content` is a DSSE envelope, the key hints
|
||||
* MUST be exactly the same in the verification material and in the
|
||||
* DSSE envelope.
|
||||
*/
|
||||
verificationMaterial: VerificationMaterial | undefined;
|
||||
content?: {
|
||||
$case: "messageSignature";
|
||||
messageSignature: MessageSignature;
|
||||
} | {
|
||||
$case: "dsseEnvelope";
|
||||
dsseEnvelope: Envelope;
|
||||
};
|
||||
}
|
||||
export declare const TimestampVerificationData: {
|
||||
fromJSON(object: any): TimestampVerificationData;
|
||||
toJSON(message: TimestampVerificationData): unknown;
|
||||
};
|
||||
export declare const VerificationMaterial: {
|
||||
fromJSON(object: any): VerificationMaterial;
|
||||
toJSON(message: VerificationMaterial): unknown;
|
||||
};
|
||||
export declare const Bundle: {
|
||||
fromJSON(object: any): Bundle;
|
||||
toJSON(message: Bundle): unknown;
|
||||
};
|
||||
112
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
generated
vendored
Normal file
112
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
generated
vendored
Normal file
|
|
@ -0,0 +1,112 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
|
||||
/* eslint-disable */
|
||||
const envelope_1 = require("./envelope");
|
||||
const sigstore_common_1 = require("./sigstore_common");
|
||||
const sigstore_rekor_1 = require("./sigstore_rekor");
|
||||
function createBaseTimestampVerificationData() {
|
||||
return { rfc3161Timestamps: [] };
|
||||
}
|
||||
exports.TimestampVerificationData = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
rfc3161Timestamps: Array.isArray(object?.rfc3161Timestamps)
|
||||
? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
|
||||
: [],
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
if (message.rfc3161Timestamps) {
|
||||
obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => e ? sigstore_common_1.RFC3161SignedTimestamp.toJSON(e) : undefined);
|
||||
}
|
||||
else {
|
||||
obj.rfc3161Timestamps = [];
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseVerificationMaterial() {
|
||||
return { content: undefined, tlogEntries: [], timestampVerificationData: undefined };
|
||||
}
|
||||
exports.VerificationMaterial = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
content: isSet(object.publicKey)
|
||||
? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
|
||||
: isSet(object.x509CertificateChain)
|
||||
? {
|
||||
$case: "x509CertificateChain",
|
||||
x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
|
||||
}
|
||||
: isSet(object.certificate)
|
||||
? { $case: "certificate", certificate: sigstore_common_1.X509Certificate.fromJSON(object.certificate) }
|
||||
: undefined,
|
||||
tlogEntries: Array.isArray(object?.tlogEntries)
|
||||
? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
|
||||
: [],
|
||||
timestampVerificationData: isSet(object.timestampVerificationData)
|
||||
? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
|
||||
: undefined,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.content?.$case === "publicKey" &&
|
||||
(obj.publicKey = message.content?.publicKey ? sigstore_common_1.PublicKeyIdentifier.toJSON(message.content?.publicKey) : undefined);
|
||||
message.content?.$case === "x509CertificateChain" &&
|
||||
(obj.x509CertificateChain = message.content?.x509CertificateChain
|
||||
? sigstore_common_1.X509CertificateChain.toJSON(message.content?.x509CertificateChain)
|
||||
: undefined);
|
||||
message.content?.$case === "certificate" &&
|
||||
(obj.certificate = message.content?.certificate
|
||||
? sigstore_common_1.X509Certificate.toJSON(message.content?.certificate)
|
||||
: undefined);
|
||||
if (message.tlogEntries) {
|
||||
obj.tlogEntries = message.tlogEntries.map((e) => e ? sigstore_rekor_1.TransparencyLogEntry.toJSON(e) : undefined);
|
||||
}
|
||||
else {
|
||||
obj.tlogEntries = [];
|
||||
}
|
||||
message.timestampVerificationData !== undefined &&
|
||||
(obj.timestampVerificationData = message.timestampVerificationData
|
||||
? exports.TimestampVerificationData.toJSON(message.timestampVerificationData)
|
||||
: undefined);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseBundle() {
|
||||
return { mediaType: "", verificationMaterial: undefined, content: undefined };
|
||||
}
|
||||
exports.Bundle = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
|
||||
verificationMaterial: isSet(object.verificationMaterial)
|
||||
? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
|
||||
: undefined,
|
||||
content: isSet(object.messageSignature)
|
||||
? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
|
||||
: isSet(object.dsseEnvelope)
|
||||
? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
|
||||
: undefined,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.mediaType !== undefined && (obj.mediaType = message.mediaType);
|
||||
message.verificationMaterial !== undefined && (obj.verificationMaterial = message.verificationMaterial
|
||||
? exports.VerificationMaterial.toJSON(message.verificationMaterial)
|
||||
: undefined);
|
||||
message.content?.$case === "messageSignature" && (obj.messageSignature = message.content?.messageSignature
|
||||
? sigstore_common_1.MessageSignature.toJSON(message.content?.messageSignature)
|
||||
: undefined);
|
||||
message.content?.$case === "dsseEnvelope" &&
|
||||
(obj.dsseEnvelope = message.content?.dsseEnvelope ? envelope_1.Envelope.toJSON(message.content?.dsseEnvelope) : undefined);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function isSet(value) {
|
||||
return value !== null && value !== undefined;
|
||||
}
|
||||
290
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.d.ts
generated
vendored
Normal file
290
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,290 @@
|
|||
/// <reference types="node" />
|
||||
/**
|
||||
* Only a subset of the secure hash standard algorithms are supported.
|
||||
* See <https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf> for more
|
||||
* details.
|
||||
* UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
|
||||
* any proto JSON serialization to emit the used hash algorithm, as default
|
||||
* option is to *omit* the default value of an enum (which is the first
|
||||
* value, represented by '0'.
|
||||
*/
|
||||
export declare enum HashAlgorithm {
|
||||
HASH_ALGORITHM_UNSPECIFIED = 0,
|
||||
SHA2_256 = 1,
|
||||
SHA2_384 = 2,
|
||||
SHA2_512 = 3,
|
||||
SHA3_256 = 4,
|
||||
SHA3_384 = 5
|
||||
}
|
||||
export declare function hashAlgorithmFromJSON(object: any): HashAlgorithm;
|
||||
export declare function hashAlgorithmToJSON(object: HashAlgorithm): string;
|
||||
/**
|
||||
* Details of a specific public key, capturing the the key encoding method,
|
||||
* and signature algorithm.
|
||||
*
|
||||
* PublicKeyDetails captures the public key/hash algorithm combinations
|
||||
* recommended in the Sigstore ecosystem.
|
||||
*
|
||||
* This is modelled as a linear set as we want to provide a small number of
|
||||
* opinionated options instead of allowing every possible permutation.
|
||||
*
|
||||
* Any changes to this enum MUST be reflected in the algorithm registry.
|
||||
* See: docs/algorithm-registry.md
|
||||
*
|
||||
* To avoid the possibility of contradicting formats such as PKCS1 with
|
||||
* ED25519 the valid permutations are listed as a linear set instead of a
|
||||
* cartesian set (i.e one combined variable instead of two, one for encoding
|
||||
* and one for the signature algorithm).
|
||||
*/
|
||||
export declare enum PublicKeyDetails {
|
||||
PUBLIC_KEY_DETAILS_UNSPECIFIED = 0,
|
||||
/**
|
||||
* PKCS1_RSA_PKCS1V5 - RSA
|
||||
*
|
||||
* @deprecated
|
||||
*/
|
||||
PKCS1_RSA_PKCS1V5 = 1,
|
||||
/**
|
||||
* PKCS1_RSA_PSS - See RFC8017
|
||||
*
|
||||
* @deprecated
|
||||
*/
|
||||
PKCS1_RSA_PSS = 2,
|
||||
/** @deprecated */
|
||||
PKIX_RSA_PKCS1V5 = 3,
|
||||
/** @deprecated */
|
||||
PKIX_RSA_PSS = 4,
|
||||
/** PKIX_RSA_PKCS1V15_2048_SHA256 - RSA public key in PKIX format, PKCS#1v1.5 signature */
|
||||
PKIX_RSA_PKCS1V15_2048_SHA256 = 9,
|
||||
PKIX_RSA_PKCS1V15_3072_SHA256 = 10,
|
||||
PKIX_RSA_PKCS1V15_4096_SHA256 = 11,
|
||||
/** PKIX_RSA_PSS_2048_SHA256 - RSA public key in PKIX format, RSASSA-PSS signature */
|
||||
PKIX_RSA_PSS_2048_SHA256 = 16,
|
||||
PKIX_RSA_PSS_3072_SHA256 = 17,
|
||||
PKIX_RSA_PSS_4096_SHA256 = 18,
|
||||
/**
|
||||
* PKIX_ECDSA_P256_HMAC_SHA_256 - ECDSA
|
||||
*
|
||||
* @deprecated
|
||||
*/
|
||||
PKIX_ECDSA_P256_HMAC_SHA_256 = 6,
|
||||
/** PKIX_ECDSA_P256_SHA_256 - See NIST FIPS 186-4 */
|
||||
PKIX_ECDSA_P256_SHA_256 = 5,
|
||||
PKIX_ECDSA_P384_SHA_384 = 12,
|
||||
PKIX_ECDSA_P521_SHA_512 = 13,
|
||||
/** PKIX_ED25519 - Ed 25519 */
|
||||
PKIX_ED25519 = 7,
|
||||
PKIX_ED25519_PH = 8,
|
||||
/**
|
||||
* LMS_SHA256 - LMS and LM-OTS
|
||||
*
|
||||
* These keys and signatures may be used by private Sigstore
|
||||
* deployments, but are not currently supported by the public
|
||||
* good instance.
|
||||
*
|
||||
* USER WARNING: LMS and LM-OTS are both stateful signature schemes.
|
||||
* Using them correctly requires discretion and careful consideration
|
||||
* to ensure that individual secret keys are not used more than once.
|
||||
* In addition, LM-OTS is a single-use scheme, meaning that it
|
||||
* MUST NOT be used for more than one signature per LM-OTS key.
|
||||
* If you cannot maintain these invariants, you MUST NOT use these
|
||||
* schemes.
|
||||
*/
|
||||
LMS_SHA256 = 14,
|
||||
LMOTS_SHA256 = 15
|
||||
}
|
||||
export declare function publicKeyDetailsFromJSON(object: any): PublicKeyDetails;
|
||||
export declare function publicKeyDetailsToJSON(object: PublicKeyDetails): string;
|
||||
export declare enum SubjectAlternativeNameType {
|
||||
SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED = 0,
|
||||
EMAIL = 1,
|
||||
URI = 2,
|
||||
/**
|
||||
* OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
|
||||
* See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
|
||||
* for more details.
|
||||
*/
|
||||
OTHER_NAME = 3
|
||||
}
|
||||
export declare function subjectAlternativeNameTypeFromJSON(object: any): SubjectAlternativeNameType;
|
||||
export declare function subjectAlternativeNameTypeToJSON(object: SubjectAlternativeNameType): string;
|
||||
/**
|
||||
* HashOutput captures a digest of a 'message' (generic octet sequence)
|
||||
* and the corresponding hash algorithm used.
|
||||
*/
|
||||
export interface HashOutput {
|
||||
algorithm: HashAlgorithm;
|
||||
/**
|
||||
* This is the raw octets of the message digest as computed by
|
||||
* the hash algorithm.
|
||||
*/
|
||||
digest: Buffer;
|
||||
}
|
||||
/** MessageSignature stores the computed signature over a message. */
|
||||
export interface MessageSignature {
|
||||
/**
|
||||
* Message digest can be used to identify the artifact.
|
||||
* Clients MUST NOT attempt to use this digest to verify the associated
|
||||
* signature; it is intended solely for identification.
|
||||
*/
|
||||
messageDigest: HashOutput | undefined;
|
||||
/**
|
||||
* The raw bytes as returned from the signature algorithm.
|
||||
* The signature algorithm (and so the format of the signature bytes)
|
||||
* are determined by the contents of the 'verification_material',
|
||||
* either a key-pair or a certificate. If using a certificate, the
|
||||
* certificate contains the required information on the signature
|
||||
* algorithm.
|
||||
* When using a key pair, the algorithm MUST be part of the public
|
||||
* key, which MUST be communicated out-of-band.
|
||||
*/
|
||||
signature: Buffer;
|
||||
}
|
||||
/** LogId captures the identity of a transparency log. */
|
||||
export interface LogId {
|
||||
/** The unique identity of the log, represented by its public key. */
|
||||
keyId: Buffer;
|
||||
}
|
||||
/** This message holds a RFC 3161 timestamp. */
|
||||
export interface RFC3161SignedTimestamp {
|
||||
/**
|
||||
* Signed timestamp is the DER encoded TimeStampResponse.
|
||||
* See https://www.rfc-editor.org/rfc/rfc3161.html#section-2.4.2
|
||||
*/
|
||||
signedTimestamp: Buffer;
|
||||
}
|
||||
export interface PublicKey {
|
||||
/**
|
||||
* DER-encoded public key, encoding method is specified by the
|
||||
* key_details attribute.
|
||||
*/
|
||||
rawBytes?: Buffer | undefined;
|
||||
/** Key encoding and signature algorithm to use for this key. */
|
||||
keyDetails: PublicKeyDetails;
|
||||
/** Optional validity period for this key, *inclusive* of the endpoints. */
|
||||
validFor?: TimeRange | undefined;
|
||||
}
|
||||
/**
|
||||
* PublicKeyIdentifier can be used to identify an (out of band) delivered
|
||||
* key, to verify a signature.
|
||||
*/
|
||||
export interface PublicKeyIdentifier {
|
||||
/**
|
||||
* Optional unauthenticated hint on which key to use.
|
||||
* The format of the hint must be agreed upon out of band by the
|
||||
* signer and the verifiers, and so is not subject to this
|
||||
* specification.
|
||||
* Example use-case is to specify the public key to use, from a
|
||||
* trusted key-ring.
|
||||
* Implementors are RECOMMENDED to derive the value from the public
|
||||
* key as described in RFC 6962.
|
||||
* See: <https://www.rfc-editor.org/rfc/rfc6962#section-3.2>
|
||||
*/
|
||||
hint: string;
|
||||
}
|
||||
/** An ASN.1 OBJECT IDENTIFIER */
|
||||
export interface ObjectIdentifier {
|
||||
id: number[];
|
||||
}
|
||||
/** An OID and the corresponding (byte) value. */
|
||||
export interface ObjectIdentifierValuePair {
|
||||
oid: ObjectIdentifier | undefined;
|
||||
value: Buffer;
|
||||
}
|
||||
export interface DistinguishedName {
|
||||
organization: string;
|
||||
commonName: string;
|
||||
}
|
||||
export interface X509Certificate {
|
||||
/** DER-encoded X.509 certificate. */
|
||||
rawBytes: Buffer;
|
||||
}
|
||||
export interface SubjectAlternativeName {
|
||||
type: SubjectAlternativeNameType;
|
||||
identity?: {
|
||||
$case: "regexp";
|
||||
regexp: string;
|
||||
} | {
|
||||
$case: "value";
|
||||
value: string;
|
||||
};
|
||||
}
|
||||
/**
|
||||
* A collection of X.509 certificates.
|
||||
*
|
||||
* This "chain" can be used in multiple contexts, such as providing a root CA
|
||||
* certificate within a TUF root of trust or multiple untrusted certificates for
|
||||
* the purpose of chain building.
|
||||
*/
|
||||
export interface X509CertificateChain {
|
||||
/**
|
||||
* One or more DER-encoded certificates.
|
||||
*
|
||||
* In some contexts (such as `VerificationMaterial.x509_certificate_chain`), this sequence
|
||||
* has an imposed order. Unless explicitly specified, there is otherwise no
|
||||
* guaranteed order.
|
||||
*/
|
||||
certificates: X509Certificate[];
|
||||
}
|
||||
/**
|
||||
* The time range is closed and includes both the start and end times,
|
||||
* (i.e., [start, end]).
|
||||
* End is optional to be able to capture a period that has started but
|
||||
* has no known end.
|
||||
*/
|
||||
export interface TimeRange {
|
||||
start: Date | undefined;
|
||||
end?: Date | undefined;
|
||||
}
|
||||
export declare const HashOutput: {
|
||||
fromJSON(object: any): HashOutput;
|
||||
toJSON(message: HashOutput): unknown;
|
||||
};
|
||||
export declare const MessageSignature: {
|
||||
fromJSON(object: any): MessageSignature;
|
||||
toJSON(message: MessageSignature): unknown;
|
||||
};
|
||||
export declare const LogId: {
|
||||
fromJSON(object: any): LogId;
|
||||
toJSON(message: LogId): unknown;
|
||||
};
|
||||
export declare const RFC3161SignedTimestamp: {
|
||||
fromJSON(object: any): RFC3161SignedTimestamp;
|
||||
toJSON(message: RFC3161SignedTimestamp): unknown;
|
||||
};
|
||||
export declare const PublicKey: {
|
||||
fromJSON(object: any): PublicKey;
|
||||
toJSON(message: PublicKey): unknown;
|
||||
};
|
||||
export declare const PublicKeyIdentifier: {
|
||||
fromJSON(object: any): PublicKeyIdentifier;
|
||||
toJSON(message: PublicKeyIdentifier): unknown;
|
||||
};
|
||||
export declare const ObjectIdentifier: {
|
||||
fromJSON(object: any): ObjectIdentifier;
|
||||
toJSON(message: ObjectIdentifier): unknown;
|
||||
};
|
||||
export declare const ObjectIdentifierValuePair: {
|
||||
fromJSON(object: any): ObjectIdentifierValuePair;
|
||||
toJSON(message: ObjectIdentifierValuePair): unknown;
|
||||
};
|
||||
export declare const DistinguishedName: {
|
||||
fromJSON(object: any): DistinguishedName;
|
||||
toJSON(message: DistinguishedName): unknown;
|
||||
};
|
||||
export declare const X509Certificate: {
|
||||
fromJSON(object: any): X509Certificate;
|
||||
toJSON(message: X509Certificate): unknown;
|
||||
};
|
||||
export declare const SubjectAlternativeName: {
|
||||
fromJSON(object: any): SubjectAlternativeName;
|
||||
toJSON(message: SubjectAlternativeName): unknown;
|
||||
};
|
||||
export declare const X509CertificateChain: {
|
||||
fromJSON(object: any): X509CertificateChain;
|
||||
toJSON(message: X509CertificateChain): unknown;
|
||||
};
|
||||
export declare const TimeRange: {
|
||||
fromJSON(object: any): TimeRange;
|
||||
toJSON(message: TimeRange): unknown;
|
||||
};
|
||||
588
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
generated
vendored
Normal file
588
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
generated
vendored
Normal file
|
|
@ -0,0 +1,588 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.subjectAlternativeNameTypeToJSON = exports.subjectAlternativeNameTypeFromJSON = exports.SubjectAlternativeNameType = exports.publicKeyDetailsToJSON = exports.publicKeyDetailsFromJSON = exports.PublicKeyDetails = exports.hashAlgorithmToJSON = exports.hashAlgorithmFromJSON = exports.HashAlgorithm = void 0;
|
||||
/* eslint-disable */
|
||||
const timestamp_1 = require("./google/protobuf/timestamp");
|
||||
/**
|
||||
* Only a subset of the secure hash standard algorithms are supported.
|
||||
* See <https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf> for more
|
||||
* details.
|
||||
* UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
|
||||
* any proto JSON serialization to emit the used hash algorithm, as default
|
||||
* option is to *omit* the default value of an enum (which is the first
|
||||
* value, represented by '0'.
|
||||
*/
|
||||
var HashAlgorithm;
|
||||
(function (HashAlgorithm) {
|
||||
HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
|
||||
HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
|
||||
HashAlgorithm[HashAlgorithm["SHA2_384"] = 2] = "SHA2_384";
|
||||
HashAlgorithm[HashAlgorithm["SHA2_512"] = 3] = "SHA2_512";
|
||||
HashAlgorithm[HashAlgorithm["SHA3_256"] = 4] = "SHA3_256";
|
||||
HashAlgorithm[HashAlgorithm["SHA3_384"] = 5] = "SHA3_384";
|
||||
})(HashAlgorithm = exports.HashAlgorithm || (exports.HashAlgorithm = {}));
|
||||
function hashAlgorithmFromJSON(object) {
|
||||
switch (object) {
|
||||
case 0:
|
||||
case "HASH_ALGORITHM_UNSPECIFIED":
|
||||
return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
|
||||
case 1:
|
||||
case "SHA2_256":
|
||||
return HashAlgorithm.SHA2_256;
|
||||
case 2:
|
||||
case "SHA2_384":
|
||||
return HashAlgorithm.SHA2_384;
|
||||
case 3:
|
||||
case "SHA2_512":
|
||||
return HashAlgorithm.SHA2_512;
|
||||
case 4:
|
||||
case "SHA3_256":
|
||||
return HashAlgorithm.SHA3_256;
|
||||
case 5:
|
||||
case "SHA3_384":
|
||||
return HashAlgorithm.SHA3_384;
|
||||
default:
|
||||
throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
|
||||
}
|
||||
}
|
||||
exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
|
||||
function hashAlgorithmToJSON(object) {
|
||||
switch (object) {
|
||||
case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
|
||||
return "HASH_ALGORITHM_UNSPECIFIED";
|
||||
case HashAlgorithm.SHA2_256:
|
||||
return "SHA2_256";
|
||||
case HashAlgorithm.SHA2_384:
|
||||
return "SHA2_384";
|
||||
case HashAlgorithm.SHA2_512:
|
||||
return "SHA2_512";
|
||||
case HashAlgorithm.SHA3_256:
|
||||
return "SHA3_256";
|
||||
case HashAlgorithm.SHA3_384:
|
||||
return "SHA3_384";
|
||||
default:
|
||||
throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
|
||||
}
|
||||
}
|
||||
exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
|
||||
/**
|
||||
* Details of a specific public key, capturing the the key encoding method,
|
||||
* and signature algorithm.
|
||||
*
|
||||
* PublicKeyDetails captures the public key/hash algorithm combinations
|
||||
* recommended in the Sigstore ecosystem.
|
||||
*
|
||||
* This is modelled as a linear set as we want to provide a small number of
|
||||
* opinionated options instead of allowing every possible permutation.
|
||||
*
|
||||
* Any changes to this enum MUST be reflected in the algorithm registry.
|
||||
* See: docs/algorithm-registry.md
|
||||
*
|
||||
* To avoid the possibility of contradicting formats such as PKCS1 with
|
||||
* ED25519 the valid permutations are listed as a linear set instead of a
|
||||
* cartesian set (i.e one combined variable instead of two, one for encoding
|
||||
* and one for the signature algorithm).
|
||||
*/
|
||||
var PublicKeyDetails;
|
||||
(function (PublicKeyDetails) {
|
||||
PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
|
||||
/**
|
||||
* PKCS1_RSA_PKCS1V5 - RSA
|
||||
*
|
||||
* @deprecated
|
||||
*/
|
||||
PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
|
||||
/**
|
||||
* PKCS1_RSA_PSS - See RFC8017
|
||||
*
|
||||
* @deprecated
|
||||
*/
|
||||
PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
|
||||
/** @deprecated */
|
||||
PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
|
||||
/** @deprecated */
|
||||
PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
|
||||
/** PKIX_RSA_PKCS1V15_2048_SHA256 - RSA public key in PKIX format, PKCS#1v1.5 signature */
|
||||
PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_2048_SHA256"] = 9] = "PKIX_RSA_PKCS1V15_2048_SHA256";
|
||||
PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_3072_SHA256"] = 10] = "PKIX_RSA_PKCS1V15_3072_SHA256";
|
||||
PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_4096_SHA256"] = 11] = "PKIX_RSA_PKCS1V15_4096_SHA256";
|
||||
/** PKIX_RSA_PSS_2048_SHA256 - RSA public key in PKIX format, RSASSA-PSS signature */
|
||||
PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_2048_SHA256"] = 16] = "PKIX_RSA_PSS_2048_SHA256";
|
||||
PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_3072_SHA256"] = 17] = "PKIX_RSA_PSS_3072_SHA256";
|
||||
PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_4096_SHA256"] = 18] = "PKIX_RSA_PSS_4096_SHA256";
|
||||
/**
|
||||
* PKIX_ECDSA_P256_HMAC_SHA_256 - ECDSA
|
||||
*
|
||||
* @deprecated
|
||||
*/
|
||||
PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
|
||||
/** PKIX_ECDSA_P256_SHA_256 - See NIST FIPS 186-4 */
|
||||
PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
|
||||
PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_384"] = 12] = "PKIX_ECDSA_P384_SHA_384";
|
||||
PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_512"] = 13] = "PKIX_ECDSA_P521_SHA_512";
|
||||
/** PKIX_ED25519 - Ed 25519 */
|
||||
PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
|
||||
PublicKeyDetails[PublicKeyDetails["PKIX_ED25519_PH"] = 8] = "PKIX_ED25519_PH";
|
||||
/**
|
||||
* LMS_SHA256 - LMS and LM-OTS
|
||||
*
|
||||
* These keys and signatures may be used by private Sigstore
|
||||
* deployments, but are not currently supported by the public
|
||||
* good instance.
|
||||
*
|
||||
* USER WARNING: LMS and LM-OTS are both stateful signature schemes.
|
||||
* Using them correctly requires discretion and careful consideration
|
||||
* to ensure that individual secret keys are not used more than once.
|
||||
* In addition, LM-OTS is a single-use scheme, meaning that it
|
||||
* MUST NOT be used for more than one signature per LM-OTS key.
|
||||
* If you cannot maintain these invariants, you MUST NOT use these
|
||||
* schemes.
|
||||
*/
|
||||
PublicKeyDetails[PublicKeyDetails["LMS_SHA256"] = 14] = "LMS_SHA256";
|
||||
PublicKeyDetails[PublicKeyDetails["LMOTS_SHA256"] = 15] = "LMOTS_SHA256";
|
||||
})(PublicKeyDetails = exports.PublicKeyDetails || (exports.PublicKeyDetails = {}));
|
||||
function publicKeyDetailsFromJSON(object) {
|
||||
switch (object) {
|
||||
case 0:
|
||||
case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
|
||||
return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
|
||||
case 1:
|
||||
case "PKCS1_RSA_PKCS1V5":
|
||||
return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
|
||||
case 2:
|
||||
case "PKCS1_RSA_PSS":
|
||||
return PublicKeyDetails.PKCS1_RSA_PSS;
|
||||
case 3:
|
||||
case "PKIX_RSA_PKCS1V5":
|
||||
return PublicKeyDetails.PKIX_RSA_PKCS1V5;
|
||||
case 4:
|
||||
case "PKIX_RSA_PSS":
|
||||
return PublicKeyDetails.PKIX_RSA_PSS;
|
||||
case 9:
|
||||
case "PKIX_RSA_PKCS1V15_2048_SHA256":
|
||||
return PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256;
|
||||
case 10:
|
||||
case "PKIX_RSA_PKCS1V15_3072_SHA256":
|
||||
return PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256;
|
||||
case 11:
|
||||
case "PKIX_RSA_PKCS1V15_4096_SHA256":
|
||||
return PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256;
|
||||
case 16:
|
||||
case "PKIX_RSA_PSS_2048_SHA256":
|
||||
return PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256;
|
||||
case 17:
|
||||
case "PKIX_RSA_PSS_3072_SHA256":
|
||||
return PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256;
|
||||
case 18:
|
||||
case "PKIX_RSA_PSS_4096_SHA256":
|
||||
return PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256;
|
||||
case 6:
|
||||
case "PKIX_ECDSA_P256_HMAC_SHA_256":
|
||||
return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
|
||||
case 5:
|
||||
case "PKIX_ECDSA_P256_SHA_256":
|
||||
return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
|
||||
case 12:
|
||||
case "PKIX_ECDSA_P384_SHA_384":
|
||||
return PublicKeyDetails.PKIX_ECDSA_P384_SHA_384;
|
||||
case 13:
|
||||
case "PKIX_ECDSA_P521_SHA_512":
|
||||
return PublicKeyDetails.PKIX_ECDSA_P521_SHA_512;
|
||||
case 7:
|
||||
case "PKIX_ED25519":
|
||||
return PublicKeyDetails.PKIX_ED25519;
|
||||
case 8:
|
||||
case "PKIX_ED25519_PH":
|
||||
return PublicKeyDetails.PKIX_ED25519_PH;
|
||||
case 14:
|
||||
case "LMS_SHA256":
|
||||
return PublicKeyDetails.LMS_SHA256;
|
||||
case 15:
|
||||
case "LMOTS_SHA256":
|
||||
return PublicKeyDetails.LMOTS_SHA256;
|
||||
default:
|
||||
throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
|
||||
}
|
||||
}
|
||||
exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
|
||||
function publicKeyDetailsToJSON(object) {
|
||||
switch (object) {
|
||||
case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
|
||||
return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
|
||||
case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
|
||||
return "PKCS1_RSA_PKCS1V5";
|
||||
case PublicKeyDetails.PKCS1_RSA_PSS:
|
||||
return "PKCS1_RSA_PSS";
|
||||
case PublicKeyDetails.PKIX_RSA_PKCS1V5:
|
||||
return "PKIX_RSA_PKCS1V5";
|
||||
case PublicKeyDetails.PKIX_RSA_PSS:
|
||||
return "PKIX_RSA_PSS";
|
||||
case PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256:
|
||||
return "PKIX_RSA_PKCS1V15_2048_SHA256";
|
||||
case PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256:
|
||||
return "PKIX_RSA_PKCS1V15_3072_SHA256";
|
||||
case PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256:
|
||||
return "PKIX_RSA_PKCS1V15_4096_SHA256";
|
||||
case PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256:
|
||||
return "PKIX_RSA_PSS_2048_SHA256";
|
||||
case PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256:
|
||||
return "PKIX_RSA_PSS_3072_SHA256";
|
||||
case PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256:
|
||||
return "PKIX_RSA_PSS_4096_SHA256";
|
||||
case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
|
||||
return "PKIX_ECDSA_P256_HMAC_SHA_256";
|
||||
case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
|
||||
return "PKIX_ECDSA_P256_SHA_256";
|
||||
case PublicKeyDetails.PKIX_ECDSA_P384_SHA_384:
|
||||
return "PKIX_ECDSA_P384_SHA_384";
|
||||
case PublicKeyDetails.PKIX_ECDSA_P521_SHA_512:
|
||||
return "PKIX_ECDSA_P521_SHA_512";
|
||||
case PublicKeyDetails.PKIX_ED25519:
|
||||
return "PKIX_ED25519";
|
||||
case PublicKeyDetails.PKIX_ED25519_PH:
|
||||
return "PKIX_ED25519_PH";
|
||||
case PublicKeyDetails.LMS_SHA256:
|
||||
return "LMS_SHA256";
|
||||
case PublicKeyDetails.LMOTS_SHA256:
|
||||
return "LMOTS_SHA256";
|
||||
default:
|
||||
throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
|
||||
}
|
||||
}
|
||||
exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
|
||||
var SubjectAlternativeNameType;
|
||||
(function (SubjectAlternativeNameType) {
|
||||
SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
|
||||
SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
|
||||
SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
|
||||
/**
|
||||
* OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
|
||||
* See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
|
||||
* for more details.
|
||||
*/
|
||||
SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
|
||||
})(SubjectAlternativeNameType = exports.SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = {}));
|
||||
function subjectAlternativeNameTypeFromJSON(object) {
|
||||
switch (object) {
|
||||
case 0:
|
||||
case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
|
||||
return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
|
||||
case 1:
|
||||
case "EMAIL":
|
||||
return SubjectAlternativeNameType.EMAIL;
|
||||
case 2:
|
||||
case "URI":
|
||||
return SubjectAlternativeNameType.URI;
|
||||
case 3:
|
||||
case "OTHER_NAME":
|
||||
return SubjectAlternativeNameType.OTHER_NAME;
|
||||
default:
|
||||
throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
|
||||
}
|
||||
}
|
||||
exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
|
||||
function subjectAlternativeNameTypeToJSON(object) {
|
||||
switch (object) {
|
||||
case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
|
||||
return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
|
||||
case SubjectAlternativeNameType.EMAIL:
|
||||
return "EMAIL";
|
||||
case SubjectAlternativeNameType.URI:
|
||||
return "URI";
|
||||
case SubjectAlternativeNameType.OTHER_NAME:
|
||||
return "OTHER_NAME";
|
||||
default:
|
||||
throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
|
||||
}
|
||||
}
|
||||
exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
|
||||
function createBaseHashOutput() {
|
||||
return { algorithm: 0, digest: Buffer.alloc(0) };
|
||||
}
|
||||
exports.HashOutput = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
|
||||
digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.algorithm !== undefined && (obj.algorithm = hashAlgorithmToJSON(message.algorithm));
|
||||
message.digest !== undefined &&
|
||||
(obj.digest = base64FromBytes(message.digest !== undefined ? message.digest : Buffer.alloc(0)));
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseMessageSignature() {
|
||||
return { messageDigest: undefined, signature: Buffer.alloc(0) };
|
||||
}
|
||||
exports.MessageSignature = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
|
||||
signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.messageDigest !== undefined &&
|
||||
(obj.messageDigest = message.messageDigest ? exports.HashOutput.toJSON(message.messageDigest) : undefined);
|
||||
message.signature !== undefined &&
|
||||
(obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : Buffer.alloc(0)));
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseLogId() {
|
||||
return { keyId: Buffer.alloc(0) };
|
||||
}
|
||||
exports.LogId = {
|
||||
fromJSON(object) {
|
||||
return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.keyId !== undefined &&
|
||||
(obj.keyId = base64FromBytes(message.keyId !== undefined ? message.keyId : Buffer.alloc(0)));
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseRFC3161SignedTimestamp() {
|
||||
return { signedTimestamp: Buffer.alloc(0) };
|
||||
}
|
||||
exports.RFC3161SignedTimestamp = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
signedTimestamp: isSet(object.signedTimestamp)
|
||||
? Buffer.from(bytesFromBase64(object.signedTimestamp))
|
||||
: Buffer.alloc(0),
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.signedTimestamp !== undefined &&
|
||||
(obj.signedTimestamp = base64FromBytes(message.signedTimestamp !== undefined ? message.signedTimestamp : Buffer.alloc(0)));
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBasePublicKey() {
|
||||
return { rawBytes: undefined, keyDetails: 0, validFor: undefined };
|
||||
}
|
||||
exports.PublicKey = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
|
||||
keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
|
||||
validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.rawBytes !== undefined &&
|
||||
(obj.rawBytes = message.rawBytes !== undefined ? base64FromBytes(message.rawBytes) : undefined);
|
||||
message.keyDetails !== undefined && (obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails));
|
||||
message.validFor !== undefined &&
|
||||
(obj.validFor = message.validFor ? exports.TimeRange.toJSON(message.validFor) : undefined);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBasePublicKeyIdentifier() {
|
||||
return { hint: "" };
|
||||
}
|
||||
exports.PublicKeyIdentifier = {
|
||||
fromJSON(object) {
|
||||
return { hint: isSet(object.hint) ? String(object.hint) : "" };
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.hint !== undefined && (obj.hint = message.hint);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseObjectIdentifier() {
|
||||
return { id: [] };
|
||||
}
|
||||
exports.ObjectIdentifier = {
|
||||
fromJSON(object) {
|
||||
return { id: Array.isArray(object?.id) ? object.id.map((e) => Number(e)) : [] };
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
if (message.id) {
|
||||
obj.id = message.id.map((e) => Math.round(e));
|
||||
}
|
||||
else {
|
||||
obj.id = [];
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseObjectIdentifierValuePair() {
|
||||
return { oid: undefined, value: Buffer.alloc(0) };
|
||||
}
|
||||
exports.ObjectIdentifierValuePair = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
|
||||
value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.oid !== undefined && (obj.oid = message.oid ? exports.ObjectIdentifier.toJSON(message.oid) : undefined);
|
||||
message.value !== undefined &&
|
||||
(obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseDistinguishedName() {
|
||||
return { organization: "", commonName: "" };
|
||||
}
|
||||
exports.DistinguishedName = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
organization: isSet(object.organization) ? String(object.organization) : "",
|
||||
commonName: isSet(object.commonName) ? String(object.commonName) : "",
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.organization !== undefined && (obj.organization = message.organization);
|
||||
message.commonName !== undefined && (obj.commonName = message.commonName);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseX509Certificate() {
|
||||
return { rawBytes: Buffer.alloc(0) };
|
||||
}
|
||||
exports.X509Certificate = {
|
||||
fromJSON(object) {
|
||||
return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.rawBytes !== undefined &&
|
||||
(obj.rawBytes = base64FromBytes(message.rawBytes !== undefined ? message.rawBytes : Buffer.alloc(0)));
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseSubjectAlternativeName() {
|
||||
return { type: 0, identity: undefined };
|
||||
}
|
||||
exports.SubjectAlternativeName = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
|
||||
identity: isSet(object.regexp)
|
||||
? { $case: "regexp", regexp: String(object.regexp) }
|
||||
: isSet(object.value)
|
||||
? { $case: "value", value: String(object.value) }
|
||||
: undefined,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.type !== undefined && (obj.type = subjectAlternativeNameTypeToJSON(message.type));
|
||||
message.identity?.$case === "regexp" && (obj.regexp = message.identity?.regexp);
|
||||
message.identity?.$case === "value" && (obj.value = message.identity?.value);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseX509CertificateChain() {
|
||||
return { certificates: [] };
|
||||
}
|
||||
exports.X509CertificateChain = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
certificates: Array.isArray(object?.certificates)
|
||||
? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
|
||||
: [],
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
if (message.certificates) {
|
||||
obj.certificates = message.certificates.map((e) => e ? exports.X509Certificate.toJSON(e) : undefined);
|
||||
}
|
||||
else {
|
||||
obj.certificates = [];
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseTimeRange() {
|
||||
return { start: undefined, end: undefined };
|
||||
}
|
||||
exports.TimeRange = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
|
||||
end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.start !== undefined && (obj.start = message.start.toISOString());
|
||||
message.end !== undefined && (obj.end = message.end.toISOString());
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
var tsProtoGlobalThis = (() => {
|
||||
if (typeof globalThis !== "undefined") {
|
||||
return globalThis;
|
||||
}
|
||||
if (typeof self !== "undefined") {
|
||||
return self;
|
||||
}
|
||||
if (typeof window !== "undefined") {
|
||||
return window;
|
||||
}
|
||||
if (typeof global !== "undefined") {
|
||||
return global;
|
||||
}
|
||||
throw "Unable to locate global object";
|
||||
})();
|
||||
function bytesFromBase64(b64) {
|
||||
if (tsProtoGlobalThis.Buffer) {
|
||||
return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
|
||||
}
|
||||
else {
|
||||
const bin = tsProtoGlobalThis.atob(b64);
|
||||
const arr = new Uint8Array(bin.length);
|
||||
for (let i = 0; i < bin.length; ++i) {
|
||||
arr[i] = bin.charCodeAt(i);
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
}
|
||||
function base64FromBytes(arr) {
|
||||
if (tsProtoGlobalThis.Buffer) {
|
||||
return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
|
||||
}
|
||||
else {
|
||||
const bin = [];
|
||||
arr.forEach((byte) => {
|
||||
bin.push(String.fromCharCode(byte));
|
||||
});
|
||||
return tsProtoGlobalThis.btoa(bin.join(""));
|
||||
}
|
||||
}
|
||||
function fromTimestamp(t) {
|
||||
let millis = Number(t.seconds) * 1000;
|
||||
millis += t.nanos / 1000000;
|
||||
return new Date(millis);
|
||||
}
|
||||
function fromJsonTimestamp(o) {
|
||||
if (o instanceof Date) {
|
||||
return o;
|
||||
}
|
||||
else if (typeof o === "string") {
|
||||
return new Date(o);
|
||||
}
|
||||
else {
|
||||
return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
|
||||
}
|
||||
}
|
||||
function isSet(value) {
|
||||
return value !== null && value !== undefined;
|
||||
}
|
||||
148
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.d.ts
generated
vendored
Normal file
148
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,148 @@
|
|||
/// <reference types="node" />
|
||||
import { LogId } from "./sigstore_common";
|
||||
/** KindVersion contains the entry's kind and api version. */
|
||||
export interface KindVersion {
|
||||
/**
|
||||
* Kind is the type of entry being stored in the log.
|
||||
* See here for a list: https://github.com/sigstore/rekor/tree/main/pkg/types
|
||||
*/
|
||||
kind: string;
|
||||
/** The specific api version of the type. */
|
||||
version: string;
|
||||
}
|
||||
/**
|
||||
* The checkpoint MUST contain an origin string as a unique log identifier,
|
||||
* the tree size, and the root hash. It MAY also be followed by optional data,
|
||||
* and clients MUST NOT assume optional data. The checkpoint MUST also contain
|
||||
* a signature over the root hash (tree head). The checkpoint MAY contain additional
|
||||
* signatures, but the first SHOULD be the signature from the log. Checkpoint contents
|
||||
* are concatenated with newlines into a single string.
|
||||
* The checkpoint format is described in
|
||||
* https://github.com/transparency-dev/formats/blob/main/log/README.md
|
||||
* and https://github.com/C2SP/C2SP/blob/main/tlog-checkpoint.md.
|
||||
* An example implementation can be found in https://github.com/sigstore/rekor/blob/main/pkg/util/signed_note.go
|
||||
*/
|
||||
export interface Checkpoint {
|
||||
envelope: string;
|
||||
}
|
||||
/**
|
||||
* InclusionProof is the proof returned from the transparency log. Can
|
||||
* be used for offline or online verification against the log.
|
||||
*/
|
||||
export interface InclusionProof {
|
||||
/** The index of the entry in the tree it was written to. */
|
||||
logIndex: string;
|
||||
/**
|
||||
* The hash digest stored at the root of the merkle tree at the time
|
||||
* the proof was generated.
|
||||
*/
|
||||
rootHash: Buffer;
|
||||
/** The size of the merkle tree at the time the proof was generated. */
|
||||
treeSize: string;
|
||||
/**
|
||||
* A list of hashes required to compute the inclusion proof, sorted
|
||||
* in order from leaf to root.
|
||||
* Note that leaf and root hashes are not included.
|
||||
* The root hash is available separately in this message, and the
|
||||
* leaf hash should be calculated by the client.
|
||||
*/
|
||||
hashes: Buffer[];
|
||||
/**
|
||||
* Signature of the tree head, as of the time of this proof was
|
||||
* generated. See above info on 'Checkpoint' for more details.
|
||||
*/
|
||||
checkpoint: Checkpoint | undefined;
|
||||
}
|
||||
/**
|
||||
* The inclusion promise is calculated by Rekor. It's calculated as a
|
||||
* signature over a canonical JSON serialization of the persisted entry, the
|
||||
* log ID, log index and the integration timestamp.
|
||||
* See https://github.com/sigstore/rekor/blob/a6e58f72b6b18cc06cefe61808efd562b9726330/pkg/api/entries.go#L54
|
||||
* The format of the signature depends on the transparency log's public key.
|
||||
* If the signature algorithm requires a hash function and/or a signature
|
||||
* scheme (e.g. RSA) those has to be retrieved out-of-band from the log's
|
||||
* operators, together with the public key.
|
||||
* This is used to verify the integration timestamp's value and that the log
|
||||
* has promised to include the entry.
|
||||
*/
|
||||
export interface InclusionPromise {
|
||||
signedEntryTimestamp: Buffer;
|
||||
}
|
||||
/**
|
||||
* TransparencyLogEntry captures all the details required from Rekor to
|
||||
* reconstruct an entry, given that the payload is provided via other means.
|
||||
* This type can easily be created from the existing response from Rekor.
|
||||
* Future iterations could rely on Rekor returning the minimal set of
|
||||
* attributes (excluding the payload) that are required for verifying the
|
||||
* inclusion promise. The inclusion promise (called SignedEntryTimestamp in
|
||||
* the response from Rekor) is similar to a Signed Certificate Timestamp
|
||||
* as described here https://www.rfc-editor.org/rfc/rfc6962.html#section-3.2.
|
||||
*/
|
||||
export interface TransparencyLogEntry {
|
||||
/** The global index of the entry, used when querying the log by index. */
|
||||
logIndex: string;
|
||||
/** The unique identifier of the log. */
|
||||
logId: LogId | undefined;
|
||||
/**
|
||||
* The kind (type) and version of the object associated with this
|
||||
* entry. These values are required to construct the entry during
|
||||
* verification.
|
||||
*/
|
||||
kindVersion: KindVersion | undefined;
|
||||
/** The UNIX timestamp from the log when the entry was persisted. */
|
||||
integratedTime: string;
|
||||
/**
|
||||
* The inclusion promise/signed entry timestamp from the log.
|
||||
* Required for v0.1 bundles, and MUST be verified.
|
||||
* Optional for >= v0.2 bundles, and SHOULD be verified when present.
|
||||
* Also may be used as a signed timestamp.
|
||||
*/
|
||||
inclusionPromise: InclusionPromise | undefined;
|
||||
/**
|
||||
* The inclusion proof can be used for offline or online verification
|
||||
* that the entry was appended to the log, and that the log has not been
|
||||
* altered.
|
||||
*/
|
||||
inclusionProof: InclusionProof | undefined;
|
||||
/**
|
||||
* Optional. The canonicalized transparency log entry, used to
|
||||
* reconstruct the Signed Entry Timestamp (SET) during verification.
|
||||
* The contents of this field are the same as the `body` field in
|
||||
* a Rekor response, meaning that it does **not** include the "full"
|
||||
* canonicalized form (of log index, ID, etc.) which are
|
||||
* exposed as separate fields. The verifier is responsible for
|
||||
* combining the `canonicalized_body`, `log_index`, `log_id`,
|
||||
* and `integrated_time` into the payload that the SET's signature
|
||||
* is generated over.
|
||||
* This field is intended to be used in cases where the SET cannot be
|
||||
* produced determinisitically (e.g. inconsistent JSON field ordering,
|
||||
* differing whitespace, etc).
|
||||
*
|
||||
* If set, clients MUST verify that the signature referenced in the
|
||||
* `canonicalized_body` matches the signature provided in the
|
||||
* `Bundle.content`.
|
||||
* If not set, clients are responsible for constructing an equivalent
|
||||
* payload from other sources to verify the signature.
|
||||
*/
|
||||
canonicalizedBody: Buffer;
|
||||
}
|
||||
export declare const KindVersion: {
|
||||
fromJSON(object: any): KindVersion;
|
||||
toJSON(message: KindVersion): unknown;
|
||||
};
|
||||
export declare const Checkpoint: {
|
||||
fromJSON(object: any): Checkpoint;
|
||||
toJSON(message: Checkpoint): unknown;
|
||||
};
|
||||
export declare const InclusionProof: {
|
||||
fromJSON(object: any): InclusionProof;
|
||||
toJSON(message: InclusionProof): unknown;
|
||||
};
|
||||
export declare const InclusionPromise: {
|
||||
fromJSON(object: any): InclusionPromise;
|
||||
toJSON(message: InclusionPromise): unknown;
|
||||
};
|
||||
export declare const TransparencyLogEntry: {
|
||||
fromJSON(object: any): TransparencyLogEntry;
|
||||
toJSON(message: TransparencyLogEntry): unknown;
|
||||
};
|
||||
167
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
generated
vendored
Normal file
167
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
generated
vendored
Normal file
|
|
@ -0,0 +1,167 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
|
||||
/* eslint-disable */
|
||||
const sigstore_common_1 = require("./sigstore_common");
|
||||
function createBaseKindVersion() {
|
||||
return { kind: "", version: "" };
|
||||
}
|
||||
exports.KindVersion = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
kind: isSet(object.kind) ? String(object.kind) : "",
|
||||
version: isSet(object.version) ? String(object.version) : "",
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.kind !== undefined && (obj.kind = message.kind);
|
||||
message.version !== undefined && (obj.version = message.version);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseCheckpoint() {
|
||||
return { envelope: "" };
|
||||
}
|
||||
exports.Checkpoint = {
|
||||
fromJSON(object) {
|
||||
return { envelope: isSet(object.envelope) ? String(object.envelope) : "" };
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.envelope !== undefined && (obj.envelope = message.envelope);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseInclusionProof() {
|
||||
return { logIndex: "0", rootHash: Buffer.alloc(0), treeSize: "0", hashes: [], checkpoint: undefined };
|
||||
}
|
||||
exports.InclusionProof = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
|
||||
rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
|
||||
treeSize: isSet(object.treeSize) ? String(object.treeSize) : "0",
|
||||
hashes: Array.isArray(object?.hashes) ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e))) : [],
|
||||
checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.logIndex !== undefined && (obj.logIndex = message.logIndex);
|
||||
message.rootHash !== undefined &&
|
||||
(obj.rootHash = base64FromBytes(message.rootHash !== undefined ? message.rootHash : Buffer.alloc(0)));
|
||||
message.treeSize !== undefined && (obj.treeSize = message.treeSize);
|
||||
if (message.hashes) {
|
||||
obj.hashes = message.hashes.map((e) => base64FromBytes(e !== undefined ? e : Buffer.alloc(0)));
|
||||
}
|
||||
else {
|
||||
obj.hashes = [];
|
||||
}
|
||||
message.checkpoint !== undefined &&
|
||||
(obj.checkpoint = message.checkpoint ? exports.Checkpoint.toJSON(message.checkpoint) : undefined);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseInclusionPromise() {
|
||||
return { signedEntryTimestamp: Buffer.alloc(0) };
|
||||
}
|
||||
exports.InclusionPromise = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
signedEntryTimestamp: isSet(object.signedEntryTimestamp)
|
||||
? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
|
||||
: Buffer.alloc(0),
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.signedEntryTimestamp !== undefined &&
|
||||
(obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp !== undefined ? message.signedEntryTimestamp : Buffer.alloc(0)));
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseTransparencyLogEntry() {
|
||||
return {
|
||||
logIndex: "0",
|
||||
logId: undefined,
|
||||
kindVersion: undefined,
|
||||
integratedTime: "0",
|
||||
inclusionPromise: undefined,
|
||||
inclusionProof: undefined,
|
||||
canonicalizedBody: Buffer.alloc(0),
|
||||
};
|
||||
}
|
||||
exports.TransparencyLogEntry = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
|
||||
logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
|
||||
kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
|
||||
integratedTime: isSet(object.integratedTime) ? String(object.integratedTime) : "0",
|
||||
inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
|
||||
inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
|
||||
canonicalizedBody: isSet(object.canonicalizedBody)
|
||||
? Buffer.from(bytesFromBase64(object.canonicalizedBody))
|
||||
: Buffer.alloc(0),
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.logIndex !== undefined && (obj.logIndex = message.logIndex);
|
||||
message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
|
||||
message.kindVersion !== undefined &&
|
||||
(obj.kindVersion = message.kindVersion ? exports.KindVersion.toJSON(message.kindVersion) : undefined);
|
||||
message.integratedTime !== undefined && (obj.integratedTime = message.integratedTime);
|
||||
message.inclusionPromise !== undefined &&
|
||||
(obj.inclusionPromise = message.inclusionPromise ? exports.InclusionPromise.toJSON(message.inclusionPromise) : undefined);
|
||||
message.inclusionProof !== undefined &&
|
||||
(obj.inclusionProof = message.inclusionProof ? exports.InclusionProof.toJSON(message.inclusionProof) : undefined);
|
||||
message.canonicalizedBody !== undefined &&
|
||||
(obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody !== undefined ? message.canonicalizedBody : Buffer.alloc(0)));
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
var tsProtoGlobalThis = (() => {
|
||||
if (typeof globalThis !== "undefined") {
|
||||
return globalThis;
|
||||
}
|
||||
if (typeof self !== "undefined") {
|
||||
return self;
|
||||
}
|
||||
if (typeof window !== "undefined") {
|
||||
return window;
|
||||
}
|
||||
if (typeof global !== "undefined") {
|
||||
return global;
|
||||
}
|
||||
throw "Unable to locate global object";
|
||||
})();
|
||||
function bytesFromBase64(b64) {
|
||||
if (tsProtoGlobalThis.Buffer) {
|
||||
return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
|
||||
}
|
||||
else {
|
||||
const bin = tsProtoGlobalThis.atob(b64);
|
||||
const arr = new Uint8Array(bin.length);
|
||||
for (let i = 0; i < bin.length; ++i) {
|
||||
arr[i] = bin.charCodeAt(i);
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
}
|
||||
function base64FromBytes(arr) {
|
||||
if (tsProtoGlobalThis.Buffer) {
|
||||
return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
|
||||
}
|
||||
else {
|
||||
const bin = [];
|
||||
arr.forEach((byte) => {
|
||||
bin.push(String.fromCharCode(byte));
|
||||
});
|
||||
return tsProtoGlobalThis.btoa(bin.join(""));
|
||||
}
|
||||
}
|
||||
function isSet(value) {
|
||||
return value !== null && value !== undefined;
|
||||
}
|
||||
215
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.d.ts
generated
vendored
Normal file
215
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,215 @@
|
|||
import { DistinguishedName, HashAlgorithm, LogId, PublicKey, TimeRange, X509CertificateChain } from "./sigstore_common";
|
||||
/**
|
||||
* TransparencyLogInstance describes the immutable parameters from a
|
||||
* transparency log.
|
||||
* See https://www.rfc-editor.org/rfc/rfc9162.html#name-log-parameters
|
||||
* for more details.
|
||||
* The included parameters are the minimal set required to identify a log,
|
||||
* and verify an inclusion proof/promise.
|
||||
*/
|
||||
export interface TransparencyLogInstance {
|
||||
/** The base URL at which can be used to URLs for the client. */
|
||||
baseUrl: string;
|
||||
/** The hash algorithm used for the Merkle Tree. */
|
||||
hashAlgorithm: HashAlgorithm;
|
||||
/**
|
||||
* The public key used to verify signatures generated by the log.
|
||||
* This attribute contains the signature algorithm used by the log.
|
||||
*/
|
||||
publicKey: PublicKey | undefined;
|
||||
/**
|
||||
* The unique identifier for this transparency log.
|
||||
* Represented as the SHA-256 hash of the log's public key,
|
||||
* calculated over the DER encoding of the key represented as
|
||||
* SubjectPublicKeyInfo.
|
||||
* See https://www.rfc-editor.org/rfc/rfc6962#section-3.2
|
||||
*/
|
||||
logId: LogId | undefined;
|
||||
/**
|
||||
* The checkpoint key identifier for the log used in a checkpoint.
|
||||
* Optional, not provided for logs that do not generate checkpoints.
|
||||
* For logs that do generate checkpoints, if not set, assume
|
||||
* log_id equals checkpoint_key_id.
|
||||
* Follows the specification described here
|
||||
* for ECDSA and Ed25519 signatures:
|
||||
* https://github.com/C2SP/C2SP/blob/main/signed-note.md#signatures
|
||||
* For RSA signatures, the key ID will match the ECDSA format, the
|
||||
* hashed DER-encoded SPKI public key. Publicly witnessed logs MUST NOT
|
||||
* use RSA-signed checkpoints, since witnesses do not support
|
||||
* RSA signatures.
|
||||
* This is provided for convenience. Clients can also calculate the
|
||||
* checkpoint key ID given the log's public key.
|
||||
* SHOULD be set for logs generating Ed25519 signatures.
|
||||
* SHOULD be 4 bytes long, as a truncated hash.
|
||||
*/
|
||||
checkpointKeyId: LogId | undefined;
|
||||
}
|
||||
/**
|
||||
* CertificateAuthority enlists the information required to identify which
|
||||
* CA to use and perform signature verification.
|
||||
*/
|
||||
export interface CertificateAuthority {
|
||||
/**
|
||||
* The root certificate MUST be self-signed, and so the subject and
|
||||
* issuer are the same.
|
||||
*/
|
||||
subject: DistinguishedName | undefined;
|
||||
/**
|
||||
* The URI identifies the certificate authority.
|
||||
*
|
||||
* It is RECOMMENDED that the URI is the base URL for the certificate
|
||||
* authority, that can be provided to any SDK/client provided
|
||||
* by the certificate authority to interact with the certificate
|
||||
* authority.
|
||||
*/
|
||||
uri: string;
|
||||
/**
|
||||
* The certificate chain for this CA. The last certificate in the chain
|
||||
* MUST be the trust anchor. The trust anchor MAY be a self-signed root
|
||||
* CA certificate or MAY be an intermediate CA certificate.
|
||||
*/
|
||||
certChain: X509CertificateChain | undefined;
|
||||
/**
|
||||
* The time the *entire* chain was valid. This is at max the
|
||||
* longest interval when *all* certificates in the chain were valid,
|
||||
* but it MAY be shorter. Clients MUST check timestamps against *both*
|
||||
* the `valid_for` time range *and* the entire certificate chain.
|
||||
*
|
||||
* The TimeRange should be considered valid *inclusive* of the
|
||||
* endpoints.
|
||||
*/
|
||||
validFor: TimeRange | undefined;
|
||||
}
|
||||
/**
|
||||
* TrustedRoot describes the client's complete set of trusted entities.
|
||||
* How the TrustedRoot is populated is not specified, but can be a
|
||||
* combination of many sources such as TUF repositories, files on disk etc.
|
||||
*
|
||||
* The TrustedRoot is not meant to be used for any artifact verification, only
|
||||
* to capture the complete/global set of trusted verification materials.
|
||||
* When verifying an artifact, based on the artifact and policies, a selection
|
||||
* of keys/authorities are expected to be extracted and provided to the
|
||||
* verification function. This way the set of keys/authorities can be kept to
|
||||
* a minimal set by the policy to gain better control over what signatures
|
||||
* that are allowed.
|
||||
*
|
||||
* The embedded transparency logs, CT logs, CAs and TSAs MUST include any
|
||||
* previously used instance -- otherwise signatures made in the past cannot
|
||||
* be verified.
|
||||
*
|
||||
* All the listed instances SHOULD be sorted by the 'valid_for' in ascending
|
||||
* order, that is, the oldest instance first. Only the last instance is
|
||||
* allowed to have their 'end' timestamp unset. All previous instances MUST
|
||||
* have a closed interval of validity. The last instance MAY have a closed
|
||||
* interval. Clients MUST accept instances that overlaps in time, if not
|
||||
* clients may experience problems during rotations of verification
|
||||
* materials.
|
||||
*
|
||||
* To be able to manage planned rotations of either transparency logs or
|
||||
* certificate authorities, clienst MUST accept lists of instances where
|
||||
* the last instance have a 'valid_for' that belongs to the future.
|
||||
* This should not be a problem as clients SHOULD first seek the trust root
|
||||
* for a suitable instance before creating a per artifact trust root (that
|
||||
* is, a sub-set of the complete trust root) that is used for verification.
|
||||
*/
|
||||
export interface TrustedRoot {
|
||||
/**
|
||||
* MUST be application/vnd.dev.sigstore.trustedroot.v0.1+json
|
||||
* when encoded as JSON.
|
||||
* Clients MUST be able to process and parse content with the media
|
||||
* type defined in the old format:
|
||||
* application/vnd.dev.sigstore.trustedroot+json;version=0.1
|
||||
*/
|
||||
mediaType: string;
|
||||
/** A set of trusted Rekor servers. */
|
||||
tlogs: TransparencyLogInstance[];
|
||||
/**
|
||||
* A set of trusted certificate authorities (e.g Fulcio), and any
|
||||
* intermediate certificates they provide.
|
||||
* If a CA is issuing multiple intermediate certificate, each
|
||||
* combination shall be represented as separate chain. I.e, a single
|
||||
* root cert may appear in multiple chains but with different
|
||||
* intermediate and/or leaf certificates.
|
||||
* The certificates are intended to be used for verifying artifact
|
||||
* signatures.
|
||||
*/
|
||||
certificateAuthorities: CertificateAuthority[];
|
||||
/** A set of trusted certificate transparency logs. */
|
||||
ctlogs: TransparencyLogInstance[];
|
||||
/** A set of trusted timestamping authorities. */
|
||||
timestampAuthorities: CertificateAuthority[];
|
||||
}
|
||||
/**
|
||||
* SigningConfig represents the trusted entities/state needed by Sigstore
|
||||
* signing. In particular, it primarily contains service URLs that a Sigstore
|
||||
* signer may need to connect to for the online aspects of signing.
|
||||
*/
|
||||
export interface SigningConfig {
|
||||
/**
|
||||
* A URL to a Fulcio-compatible CA, capable of receiving
|
||||
* Certificate Signing Requests (CSRs) and responding with
|
||||
* issued certificates.
|
||||
*
|
||||
* This URL **MUST** be the "base" URL for the CA, which clients
|
||||
* should construct an appropriate CSR endpoint on top of.
|
||||
* For example, if `ca_url` is `https://example.com/ca`, then
|
||||
* the client **MAY** construct the CSR endpoint as
|
||||
* `https://example.com/ca/api/v2/signingCert`.
|
||||
*/
|
||||
caUrl: string;
|
||||
/**
|
||||
* A URL to an OpenID Connect identity provider.
|
||||
*
|
||||
* This URL **MUST** be the "base" URL for the OIDC IdP, which clients
|
||||
* should perform well-known OpenID Connect discovery against.
|
||||
*/
|
||||
oidcUrl: string;
|
||||
/**
|
||||
* One or more URLs to Rekor-compatible transparency log.
|
||||
*
|
||||
* Each URL **MUST** be the "base" URL for the transparency log,
|
||||
* which clients should construct appropriate API endpoints on top of.
|
||||
*/
|
||||
tlogUrls: string[];
|
||||
/**
|
||||
* One ore more URLs to RFC 3161 Time Stamping Authority (TSA).
|
||||
*
|
||||
* Each URL **MUST** be the **full** URL for the TSA, meaning that it
|
||||
* should be suitable for submitting Time Stamp Requests (TSRs) to
|
||||
* via HTTP, per RFC 3161.
|
||||
*/
|
||||
tsaUrls: string[];
|
||||
}
|
||||
/**
|
||||
* ClientTrustConfig describes the complete state needed by a client
|
||||
* to perform both signing and verification operations against a particular
|
||||
* instance of Sigstore.
|
||||
*/
|
||||
export interface ClientTrustConfig {
|
||||
/** MUST be application/vnd.dev.sigstore.clienttrustconfig.v0.1+json */
|
||||
mediaType: string;
|
||||
/** The root of trust, which MUST be present. */
|
||||
trustedRoot: TrustedRoot | undefined;
|
||||
/** Configuration for signing clients, which MUST be present. */
|
||||
signingConfig: SigningConfig | undefined;
|
||||
}
|
||||
export declare const TransparencyLogInstance: {
|
||||
fromJSON(object: any): TransparencyLogInstance;
|
||||
toJSON(message: TransparencyLogInstance): unknown;
|
||||
};
|
||||
export declare const CertificateAuthority: {
|
||||
fromJSON(object: any): CertificateAuthority;
|
||||
toJSON(message: CertificateAuthority): unknown;
|
||||
};
|
||||
export declare const TrustedRoot: {
|
||||
fromJSON(object: any): TrustedRoot;
|
||||
toJSON(message: TrustedRoot): unknown;
|
||||
};
|
||||
export declare const SigningConfig: {
|
||||
fromJSON(object: any): SigningConfig;
|
||||
toJSON(message: SigningConfig): unknown;
|
||||
};
|
||||
export declare const ClientTrustConfig: {
|
||||
fromJSON(object: any): ClientTrustConfig;
|
||||
toJSON(message: ClientTrustConfig): unknown;
|
||||
};
|
||||
158
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
generated
vendored
Normal file
158
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
generated
vendored
Normal file
|
|
@ -0,0 +1,158 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ClientTrustConfig = exports.SigningConfig = exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = void 0;
|
||||
/* eslint-disable */
|
||||
const sigstore_common_1 = require("./sigstore_common");
|
||||
function createBaseTransparencyLogInstance() {
|
||||
return { baseUrl: "", hashAlgorithm: 0, publicKey: undefined, logId: undefined, checkpointKeyId: undefined };
|
||||
}
|
||||
exports.TransparencyLogInstance = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
baseUrl: isSet(object.baseUrl) ? String(object.baseUrl) : "",
|
||||
hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
|
||||
publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
|
||||
logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
|
||||
checkpointKeyId: isSet(object.checkpointKeyId) ? sigstore_common_1.LogId.fromJSON(object.checkpointKeyId) : undefined,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.baseUrl !== undefined && (obj.baseUrl = message.baseUrl);
|
||||
message.hashAlgorithm !== undefined && (obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm));
|
||||
message.publicKey !== undefined &&
|
||||
(obj.publicKey = message.publicKey ? sigstore_common_1.PublicKey.toJSON(message.publicKey) : undefined);
|
||||
message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
|
||||
message.checkpointKeyId !== undefined &&
|
||||
(obj.checkpointKeyId = message.checkpointKeyId ? sigstore_common_1.LogId.toJSON(message.checkpointKeyId) : undefined);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseCertificateAuthority() {
|
||||
return { subject: undefined, uri: "", certChain: undefined, validFor: undefined };
|
||||
}
|
||||
exports.CertificateAuthority = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
|
||||
uri: isSet(object.uri) ? String(object.uri) : "",
|
||||
certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
|
||||
validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.subject !== undefined &&
|
||||
(obj.subject = message.subject ? sigstore_common_1.DistinguishedName.toJSON(message.subject) : undefined);
|
||||
message.uri !== undefined && (obj.uri = message.uri);
|
||||
message.certChain !== undefined &&
|
||||
(obj.certChain = message.certChain ? sigstore_common_1.X509CertificateChain.toJSON(message.certChain) : undefined);
|
||||
message.validFor !== undefined &&
|
||||
(obj.validFor = message.validFor ? sigstore_common_1.TimeRange.toJSON(message.validFor) : undefined);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseTrustedRoot() {
|
||||
return { mediaType: "", tlogs: [], certificateAuthorities: [], ctlogs: [], timestampAuthorities: [] };
|
||||
}
|
||||
exports.TrustedRoot = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
|
||||
tlogs: Array.isArray(object?.tlogs) ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) : [],
|
||||
certificateAuthorities: Array.isArray(object?.certificateAuthorities)
|
||||
? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
|
||||
: [],
|
||||
ctlogs: Array.isArray(object?.ctlogs)
|
||||
? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
|
||||
: [],
|
||||
timestampAuthorities: Array.isArray(object?.timestampAuthorities)
|
||||
? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
|
||||
: [],
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.mediaType !== undefined && (obj.mediaType = message.mediaType);
|
||||
if (message.tlogs) {
|
||||
obj.tlogs = message.tlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
|
||||
}
|
||||
else {
|
||||
obj.tlogs = [];
|
||||
}
|
||||
if (message.certificateAuthorities) {
|
||||
obj.certificateAuthorities = message.certificateAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
|
||||
}
|
||||
else {
|
||||
obj.certificateAuthorities = [];
|
||||
}
|
||||
if (message.ctlogs) {
|
||||
obj.ctlogs = message.ctlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
|
||||
}
|
||||
else {
|
||||
obj.ctlogs = [];
|
||||
}
|
||||
if (message.timestampAuthorities) {
|
||||
obj.timestampAuthorities = message.timestampAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
|
||||
}
|
||||
else {
|
||||
obj.timestampAuthorities = [];
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseSigningConfig() {
|
||||
return { caUrl: "", oidcUrl: "", tlogUrls: [], tsaUrls: [] };
|
||||
}
|
||||
exports.SigningConfig = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
caUrl: isSet(object.caUrl) ? String(object.caUrl) : "",
|
||||
oidcUrl: isSet(object.oidcUrl) ? String(object.oidcUrl) : "",
|
||||
tlogUrls: Array.isArray(object?.tlogUrls) ? object.tlogUrls.map((e) => String(e)) : [],
|
||||
tsaUrls: Array.isArray(object?.tsaUrls) ? object.tsaUrls.map((e) => String(e)) : [],
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.caUrl !== undefined && (obj.caUrl = message.caUrl);
|
||||
message.oidcUrl !== undefined && (obj.oidcUrl = message.oidcUrl);
|
||||
if (message.tlogUrls) {
|
||||
obj.tlogUrls = message.tlogUrls.map((e) => e);
|
||||
}
|
||||
else {
|
||||
obj.tlogUrls = [];
|
||||
}
|
||||
if (message.tsaUrls) {
|
||||
obj.tsaUrls = message.tsaUrls.map((e) => e);
|
||||
}
|
||||
else {
|
||||
obj.tsaUrls = [];
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseClientTrustConfig() {
|
||||
return { mediaType: "", trustedRoot: undefined, signingConfig: undefined };
|
||||
}
|
||||
exports.ClientTrustConfig = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
|
||||
trustedRoot: isSet(object.trustedRoot) ? exports.TrustedRoot.fromJSON(object.trustedRoot) : undefined,
|
||||
signingConfig: isSet(object.signingConfig) ? exports.SigningConfig.fromJSON(object.signingConfig) : undefined,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.mediaType !== undefined && (obj.mediaType = message.mediaType);
|
||||
message.trustedRoot !== undefined &&
|
||||
(obj.trustedRoot = message.trustedRoot ? exports.TrustedRoot.toJSON(message.trustedRoot) : undefined);
|
||||
message.signingConfig !== undefined &&
|
||||
(obj.signingConfig = message.signingConfig ? exports.SigningConfig.toJSON(message.signingConfig) : undefined);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function isSet(value) {
|
||||
return value !== null && value !== undefined;
|
||||
}
|
||||
189
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.d.ts
generated
vendored
Normal file
189
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,189 @@
|
|||
/// <reference types="node" />
|
||||
import { Bundle } from "./sigstore_bundle";
|
||||
import { ObjectIdentifierValuePair, PublicKey, SubjectAlternativeName } from "./sigstore_common";
|
||||
import { TrustedRoot } from "./sigstore_trustroot";
|
||||
/** The identity of a X.509 Certificate signer. */
|
||||
export interface CertificateIdentity {
|
||||
/** The X.509v3 issuer extension (OID 1.3.6.1.4.1.57264.1.1) */
|
||||
issuer: string;
|
||||
san: SubjectAlternativeName | undefined;
|
||||
/**
|
||||
* An unordered list of OIDs that must be verified.
|
||||
* All OID/values provided in this list MUST exactly match against
|
||||
* the values in the certificate for verification to be successful.
|
||||
*/
|
||||
oids: ObjectIdentifierValuePair[];
|
||||
}
|
||||
export interface CertificateIdentities {
|
||||
identities: CertificateIdentity[];
|
||||
}
|
||||
export interface PublicKeyIdentities {
|
||||
publicKeys: PublicKey[];
|
||||
}
|
||||
/**
|
||||
* A light-weight set of options/policies for identifying trusted signers,
|
||||
* used during verification of a single artifact.
|
||||
*/
|
||||
export interface ArtifactVerificationOptions {
|
||||
signers?: {
|
||||
$case: "certificateIdentities";
|
||||
certificateIdentities: CertificateIdentities;
|
||||
} | {
|
||||
$case: "publicKeys";
|
||||
publicKeys: PublicKeyIdentities;
|
||||
};
|
||||
/**
|
||||
* Optional options for artifact transparency log verification.
|
||||
* If none is provided, the default verification options are:
|
||||
* Threshold: 1
|
||||
* Online verification: false
|
||||
* Disable: false
|
||||
*/
|
||||
tlogOptions?: ArtifactVerificationOptions_TlogOptions | undefined;
|
||||
/**
|
||||
* Optional options for certificate transparency log verification.
|
||||
* If none is provided, the default verification options are:
|
||||
* Threshold: 1
|
||||
* Disable: false
|
||||
*/
|
||||
ctlogOptions?: ArtifactVerificationOptions_CtlogOptions | undefined;
|
||||
/**
|
||||
* Optional options for certificate signed timestamp verification.
|
||||
* If none is provided, the default verification options are:
|
||||
* Threshold: 0
|
||||
* Disable: true
|
||||
*/
|
||||
tsaOptions?: ArtifactVerificationOptions_TimestampAuthorityOptions | undefined;
|
||||
/**
|
||||
* Optional options for integrated timestamp verification.
|
||||
* If none is provided, the default verification options are:
|
||||
* Threshold: 0
|
||||
* Disable: true
|
||||
*/
|
||||
integratedTsOptions?: ArtifactVerificationOptions_TlogIntegratedTimestampOptions | undefined;
|
||||
/**
|
||||
* Optional options for observed timestamp verification.
|
||||
* If none is provided, the default verification options are:
|
||||
* Threshold 1
|
||||
* Disable: false
|
||||
*/
|
||||
observerOptions?: ArtifactVerificationOptions_ObserverTimestampOptions | undefined;
|
||||
}
|
||||
export interface ArtifactVerificationOptions_TlogOptions {
|
||||
/** Number of transparency logs the entry must appear on. */
|
||||
threshold: number;
|
||||
/** Perform an online inclusion proof. */
|
||||
performOnlineVerification: boolean;
|
||||
/** Disable verification for transparency logs. */
|
||||
disable: boolean;
|
||||
}
|
||||
export interface ArtifactVerificationOptions_CtlogOptions {
|
||||
/**
|
||||
* The number of ct transparency logs the certificate must
|
||||
* appear on.
|
||||
*/
|
||||
threshold: number;
|
||||
/** Disable ct transparency log verification */
|
||||
disable: boolean;
|
||||
}
|
||||
export interface ArtifactVerificationOptions_TimestampAuthorityOptions {
|
||||
/** The number of signed timestamps that are expected. */
|
||||
threshold: number;
|
||||
/** Disable signed timestamp verification. */
|
||||
disable: boolean;
|
||||
}
|
||||
export interface ArtifactVerificationOptions_TlogIntegratedTimestampOptions {
|
||||
/** The number of integrated timestamps that are expected. */
|
||||
threshold: number;
|
||||
/** Disable integrated timestamp verification. */
|
||||
disable: boolean;
|
||||
}
|
||||
export interface ArtifactVerificationOptions_ObserverTimestampOptions {
|
||||
/**
|
||||
* The number of external observers of the timestamp.
|
||||
* This is a union of RFC3161 signed timestamps, and
|
||||
* integrated timestamps from a transparency log, that
|
||||
* could include additional timestamp sources in the
|
||||
* future.
|
||||
*/
|
||||
threshold: number;
|
||||
/** Disable observer timestamp verification. */
|
||||
disable: boolean;
|
||||
}
|
||||
export interface Artifact {
|
||||
data?: {
|
||||
$case: "artifactUri";
|
||||
artifactUri: string;
|
||||
} | {
|
||||
$case: "artifact";
|
||||
artifact: Buffer;
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Input captures all that is needed to call the bundle verification method,
|
||||
* to verify a single artifact referenced by the bundle.
|
||||
*/
|
||||
export interface Input {
|
||||
/**
|
||||
* The verification materials provided during a bundle verification.
|
||||
* The running process is usually preloaded with a "global"
|
||||
* dev.sisgtore.trustroot.TrustedRoot.v1 instance. Prior to
|
||||
* verifying an artifact (i.e a bundle), and/or based on current
|
||||
* policy, some selection is expected to happen, to filter out the
|
||||
* exact certificate authority to use, which transparency logs are
|
||||
* relevant etc. The result should b ecaptured in the
|
||||
* `artifact_trust_root`.
|
||||
*/
|
||||
artifactTrustRoot: TrustedRoot | undefined;
|
||||
artifactVerificationOptions: ArtifactVerificationOptions | undefined;
|
||||
bundle: Bundle | undefined;
|
||||
/**
|
||||
* If the bundle contains a message signature, the artifact must be
|
||||
* provided.
|
||||
*/
|
||||
artifact?: Artifact | undefined;
|
||||
}
|
||||
export declare const CertificateIdentity: {
|
||||
fromJSON(object: any): CertificateIdentity;
|
||||
toJSON(message: CertificateIdentity): unknown;
|
||||
};
|
||||
export declare const CertificateIdentities: {
|
||||
fromJSON(object: any): CertificateIdentities;
|
||||
toJSON(message: CertificateIdentities): unknown;
|
||||
};
|
||||
export declare const PublicKeyIdentities: {
|
||||
fromJSON(object: any): PublicKeyIdentities;
|
||||
toJSON(message: PublicKeyIdentities): unknown;
|
||||
};
|
||||
export declare const ArtifactVerificationOptions: {
|
||||
fromJSON(object: any): ArtifactVerificationOptions;
|
||||
toJSON(message: ArtifactVerificationOptions): unknown;
|
||||
};
|
||||
export declare const ArtifactVerificationOptions_TlogOptions: {
|
||||
fromJSON(object: any): ArtifactVerificationOptions_TlogOptions;
|
||||
toJSON(message: ArtifactVerificationOptions_TlogOptions): unknown;
|
||||
};
|
||||
export declare const ArtifactVerificationOptions_CtlogOptions: {
|
||||
fromJSON(object: any): ArtifactVerificationOptions_CtlogOptions;
|
||||
toJSON(message: ArtifactVerificationOptions_CtlogOptions): unknown;
|
||||
};
|
||||
export declare const ArtifactVerificationOptions_TimestampAuthorityOptions: {
|
||||
fromJSON(object: any): ArtifactVerificationOptions_TimestampAuthorityOptions;
|
||||
toJSON(message: ArtifactVerificationOptions_TimestampAuthorityOptions): unknown;
|
||||
};
|
||||
export declare const ArtifactVerificationOptions_TlogIntegratedTimestampOptions: {
|
||||
fromJSON(object: any): ArtifactVerificationOptions_TlogIntegratedTimestampOptions;
|
||||
toJSON(message: ArtifactVerificationOptions_TlogIntegratedTimestampOptions): unknown;
|
||||
};
|
||||
export declare const ArtifactVerificationOptions_ObserverTimestampOptions: {
|
||||
fromJSON(object: any): ArtifactVerificationOptions_ObserverTimestampOptions;
|
||||
toJSON(message: ArtifactVerificationOptions_ObserverTimestampOptions): unknown;
|
||||
};
|
||||
export declare const Artifact: {
|
||||
fromJSON(object: any): Artifact;
|
||||
toJSON(message: Artifact): unknown;
|
||||
};
|
||||
export declare const Input: {
|
||||
fromJSON(object: any): Input;
|
||||
toJSON(message: Input): unknown;
|
||||
};
|
||||
324
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
generated
vendored
Normal file
324
node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
generated
vendored
Normal file
|
|
@ -0,0 +1,324 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_ObserverTimestampOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
|
||||
/* eslint-disable */
|
||||
const sigstore_bundle_1 = require("./sigstore_bundle");
|
||||
const sigstore_common_1 = require("./sigstore_common");
|
||||
const sigstore_trustroot_1 = require("./sigstore_trustroot");
|
||||
function createBaseCertificateIdentity() {
|
||||
return { issuer: "", san: undefined, oids: [] };
|
||||
}
|
||||
exports.CertificateIdentity = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
issuer: isSet(object.issuer) ? String(object.issuer) : "",
|
||||
san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
|
||||
oids: Array.isArray(object?.oids) ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e)) : [],
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.issuer !== undefined && (obj.issuer = message.issuer);
|
||||
message.san !== undefined && (obj.san = message.san ? sigstore_common_1.SubjectAlternativeName.toJSON(message.san) : undefined);
|
||||
if (message.oids) {
|
||||
obj.oids = message.oids.map((e) => e ? sigstore_common_1.ObjectIdentifierValuePair.toJSON(e) : undefined);
|
||||
}
|
||||
else {
|
||||
obj.oids = [];
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseCertificateIdentities() {
|
||||
return { identities: [] };
|
||||
}
|
||||
exports.CertificateIdentities = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
identities: Array.isArray(object?.identities)
|
||||
? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
|
||||
: [],
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
if (message.identities) {
|
||||
obj.identities = message.identities.map((e) => e ? exports.CertificateIdentity.toJSON(e) : undefined);
|
||||
}
|
||||
else {
|
||||
obj.identities = [];
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBasePublicKeyIdentities() {
|
||||
return { publicKeys: [] };
|
||||
}
|
||||
exports.PublicKeyIdentities = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
publicKeys: Array.isArray(object?.publicKeys) ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e)) : [],
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
if (message.publicKeys) {
|
||||
obj.publicKeys = message.publicKeys.map((e) => e ? sigstore_common_1.PublicKey.toJSON(e) : undefined);
|
||||
}
|
||||
else {
|
||||
obj.publicKeys = [];
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseArtifactVerificationOptions() {
|
||||
return {
|
||||
signers: undefined,
|
||||
tlogOptions: undefined,
|
||||
ctlogOptions: undefined,
|
||||
tsaOptions: undefined,
|
||||
integratedTsOptions: undefined,
|
||||
observerOptions: undefined,
|
||||
};
|
||||
}
|
||||
exports.ArtifactVerificationOptions = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
signers: isSet(object.certificateIdentities)
|
||||
? {
|
||||
$case: "certificateIdentities",
|
||||
certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
|
||||
}
|
||||
: isSet(object.publicKeys)
|
||||
? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
|
||||
: undefined,
|
||||
tlogOptions: isSet(object.tlogOptions)
|
||||
? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
|
||||
: undefined,
|
||||
ctlogOptions: isSet(object.ctlogOptions)
|
||||
? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
|
||||
: undefined,
|
||||
tsaOptions: isSet(object.tsaOptions)
|
||||
? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
|
||||
: undefined,
|
||||
integratedTsOptions: isSet(object.integratedTsOptions)
|
||||
? exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.fromJSON(object.integratedTsOptions)
|
||||
: undefined,
|
||||
observerOptions: isSet(object.observerOptions)
|
||||
? exports.ArtifactVerificationOptions_ObserverTimestampOptions.fromJSON(object.observerOptions)
|
||||
: undefined,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.signers?.$case === "certificateIdentities" &&
|
||||
(obj.certificateIdentities = message.signers?.certificateIdentities
|
||||
? exports.CertificateIdentities.toJSON(message.signers?.certificateIdentities)
|
||||
: undefined);
|
||||
message.signers?.$case === "publicKeys" && (obj.publicKeys = message.signers?.publicKeys
|
||||
? exports.PublicKeyIdentities.toJSON(message.signers?.publicKeys)
|
||||
: undefined);
|
||||
message.tlogOptions !== undefined && (obj.tlogOptions = message.tlogOptions
|
||||
? exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions)
|
||||
: undefined);
|
||||
message.ctlogOptions !== undefined && (obj.ctlogOptions = message.ctlogOptions
|
||||
? exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions)
|
||||
: undefined);
|
||||
message.tsaOptions !== undefined && (obj.tsaOptions = message.tsaOptions
|
||||
? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions)
|
||||
: undefined);
|
||||
message.integratedTsOptions !== undefined && (obj.integratedTsOptions = message.integratedTsOptions
|
||||
? exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.toJSON(message.integratedTsOptions)
|
||||
: undefined);
|
||||
message.observerOptions !== undefined && (obj.observerOptions = message.observerOptions
|
||||
? exports.ArtifactVerificationOptions_ObserverTimestampOptions.toJSON(message.observerOptions)
|
||||
: undefined);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseArtifactVerificationOptions_TlogOptions() {
|
||||
return { threshold: 0, performOnlineVerification: false, disable: false };
|
||||
}
|
||||
exports.ArtifactVerificationOptions_TlogOptions = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
|
||||
performOnlineVerification: isSet(object.performOnlineVerification)
|
||||
? Boolean(object.performOnlineVerification)
|
||||
: false,
|
||||
disable: isSet(object.disable) ? Boolean(object.disable) : false,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
|
||||
message.performOnlineVerification !== undefined &&
|
||||
(obj.performOnlineVerification = message.performOnlineVerification);
|
||||
message.disable !== undefined && (obj.disable = message.disable);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseArtifactVerificationOptions_CtlogOptions() {
|
||||
return { threshold: 0, disable: false };
|
||||
}
|
||||
exports.ArtifactVerificationOptions_CtlogOptions = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
|
||||
disable: isSet(object.disable) ? Boolean(object.disable) : false,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
|
||||
message.disable !== undefined && (obj.disable = message.disable);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseArtifactVerificationOptions_TimestampAuthorityOptions() {
|
||||
return { threshold: 0, disable: false };
|
||||
}
|
||||
exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
|
||||
disable: isSet(object.disable) ? Boolean(object.disable) : false,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
|
||||
message.disable !== undefined && (obj.disable = message.disable);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseArtifactVerificationOptions_TlogIntegratedTimestampOptions() {
|
||||
return { threshold: 0, disable: false };
|
||||
}
|
||||
exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
|
||||
disable: isSet(object.disable) ? Boolean(object.disable) : false,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
|
||||
message.disable !== undefined && (obj.disable = message.disable);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseArtifactVerificationOptions_ObserverTimestampOptions() {
|
||||
return { threshold: 0, disable: false };
|
||||
}
|
||||
exports.ArtifactVerificationOptions_ObserverTimestampOptions = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
|
||||
disable: isSet(object.disable) ? Boolean(object.disable) : false,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
|
||||
message.disable !== undefined && (obj.disable = message.disable);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseArtifact() {
|
||||
return { data: undefined };
|
||||
}
|
||||
exports.Artifact = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
data: isSet(object.artifactUri)
|
||||
? { $case: "artifactUri", artifactUri: String(object.artifactUri) }
|
||||
: isSet(object.artifact)
|
||||
? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
|
||||
: undefined,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.data?.$case === "artifactUri" && (obj.artifactUri = message.data?.artifactUri);
|
||||
message.data?.$case === "artifact" &&
|
||||
(obj.artifact = message.data?.artifact !== undefined ? base64FromBytes(message.data?.artifact) : undefined);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
function createBaseInput() {
|
||||
return {
|
||||
artifactTrustRoot: undefined,
|
||||
artifactVerificationOptions: undefined,
|
||||
bundle: undefined,
|
||||
artifact: undefined,
|
||||
};
|
||||
}
|
||||
exports.Input = {
|
||||
fromJSON(object) {
|
||||
return {
|
||||
artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
|
||||
artifactVerificationOptions: isSet(object.artifactVerificationOptions)
|
||||
? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
|
||||
: undefined,
|
||||
bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
|
||||
artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
|
||||
};
|
||||
},
|
||||
toJSON(message) {
|
||||
const obj = {};
|
||||
message.artifactTrustRoot !== undefined &&
|
||||
(obj.artifactTrustRoot = message.artifactTrustRoot ? sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot) : undefined);
|
||||
message.artifactVerificationOptions !== undefined &&
|
||||
(obj.artifactVerificationOptions = message.artifactVerificationOptions
|
||||
? exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions)
|
||||
: undefined);
|
||||
message.bundle !== undefined && (obj.bundle = message.bundle ? sigstore_bundle_1.Bundle.toJSON(message.bundle) : undefined);
|
||||
message.artifact !== undefined && (obj.artifact = message.artifact ? exports.Artifact.toJSON(message.artifact) : undefined);
|
||||
return obj;
|
||||
},
|
||||
};
|
||||
var tsProtoGlobalThis = (() => {
|
||||
if (typeof globalThis !== "undefined") {
|
||||
return globalThis;
|
||||
}
|
||||
if (typeof self !== "undefined") {
|
||||
return self;
|
||||
}
|
||||
if (typeof window !== "undefined") {
|
||||
return window;
|
||||
}
|
||||
if (typeof global !== "undefined") {
|
||||
return global;
|
||||
}
|
||||
throw "Unable to locate global object";
|
||||
})();
|
||||
function bytesFromBase64(b64) {
|
||||
if (tsProtoGlobalThis.Buffer) {
|
||||
return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
|
||||
}
|
||||
else {
|
||||
const bin = tsProtoGlobalThis.atob(b64);
|
||||
const arr = new Uint8Array(bin.length);
|
||||
for (let i = 0; i < bin.length; ++i) {
|
||||
arr[i] = bin.charCodeAt(i);
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
}
|
||||
function base64FromBytes(arr) {
|
||||
if (tsProtoGlobalThis.Buffer) {
|
||||
return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
|
||||
}
|
||||
else {
|
||||
const bin = [];
|
||||
arr.forEach((byte) => {
|
||||
bin.push(String.fromCharCode(byte));
|
||||
});
|
||||
return tsProtoGlobalThis.btoa(bin.join(""));
|
||||
}
|
||||
}
|
||||
function isSet(value) {
|
||||
return value !== null && value !== undefined;
|
||||
}
|
||||
6
node_modules/@sigstore/protobuf-specs/dist/index.d.ts
generated
vendored
Normal file
6
node_modules/@sigstore/protobuf-specs/dist/index.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
export * from './__generated__/envelope';
|
||||
export * from './__generated__/sigstore_bundle';
|
||||
export * from './__generated__/sigstore_common';
|
||||
export * from './__generated__/sigstore_rekor';
|
||||
export * from './__generated__/sigstore_trustroot';
|
||||
export * from './__generated__/sigstore_verification';
|
||||
37
node_modules/@sigstore/protobuf-specs/dist/index.js
generated
vendored
Normal file
37
node_modules/@sigstore/protobuf-specs/dist/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
||||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
__exportStar(require("./__generated__/envelope"), exports);
|
||||
__exportStar(require("./__generated__/sigstore_bundle"), exports);
|
||||
__exportStar(require("./__generated__/sigstore_common"), exports);
|
||||
__exportStar(require("./__generated__/sigstore_rekor"), exports);
|
||||
__exportStar(require("./__generated__/sigstore_trustroot"), exports);
|
||||
__exportStar(require("./__generated__/sigstore_verification"), exports);
|
||||
31
node_modules/@sigstore/protobuf-specs/package.json
generated
vendored
Normal file
31
node_modules/@sigstore/protobuf-specs/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
{
|
||||
"name": "@sigstore/protobuf-specs",
|
||||
"version": "0.3.2",
|
||||
"description": "code-signing for npm packages",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"scripts": {
|
||||
"build": "tsc"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/sigstore/protobuf-specs.git"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"author": "bdehamer@github.com",
|
||||
"license": "Apache-2.0",
|
||||
"bugs": {
|
||||
"url": "https://github.com/sigstore/protobuf-specs/issues"
|
||||
},
|
||||
"homepage": "https://github.com/sigstore/protobuf-specs#readme",
|
||||
"devDependencies": {
|
||||
"@tsconfig/node16": "^16.1.1",
|
||||
"@types/node": "^18.14.0",
|
||||
"typescript": "^4.9.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^16.14.0 || >=18.0.0"
|
||||
}
|
||||
}
|
||||
202
node_modules/@sigstore/sign/LICENSE
generated
vendored
Normal file
202
node_modules/@sigstore/sign/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,202 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2023 The Sigstore Authors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
185
node_modules/@sigstore/sign/README.md
generated
vendored
Normal file
185
node_modules/@sigstore/sign/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,185 @@
|
|||
# @sigstore/sign · [](https://www.npmjs.com/package/@sigstore/sign) [](https://github.com/sigstore/sigstore-js/actions/workflows/ci.yml) [](https://github.com/sigstore/sigstore-js/actions/workflows/smoke-test.yml)
|
||||
|
||||
A library for generating [Sigstore][1] signatures.
|
||||
|
||||
## Features
|
||||
|
||||
- Support for keyless signature generation with [Fulcio][2]-issued signing
|
||||
certificates
|
||||
- Support for ambient OIDC credential detection in CI/CD environments
|
||||
- Support for recording signatures to the [Rekor][3] transparency log
|
||||
- Support for requesting timestamped countersignature from a [Timestamp
|
||||
Authority][4]
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Node.js version >= 16.14.0
|
||||
|
||||
## Installation
|
||||
|
||||
```
|
||||
npm install @sigstore/sign
|
||||
```
|
||||
|
||||
## Overview
|
||||
|
||||
This library provides the building blocks for composing custom Sigstore signing
|
||||
workflows.
|
||||
|
||||
### BundleBuilder
|
||||
|
||||
The top-level component is the `BundleBuilder` which has responsibility for
|
||||
taking some artifact and returning a [Sigstore bundle][5] containing the
|
||||
signature for that artifact and the various materials necessary to verify that
|
||||
signature.
|
||||
|
||||
```typescript
|
||||
interface BundleBuilder {
|
||||
create: (artifact: Artifact) => Promise<Bundle>;
|
||||
}
|
||||
```
|
||||
|
||||
The artifact to be signed is simply an array of bytes and an optional mimetype.
|
||||
The type is necessary when the signature is packaged as a [DSSE][6] envelope.
|
||||
|
||||
```typescript
|
||||
type Artifact = {
|
||||
data: Buffer;
|
||||
type?: string;
|
||||
};
|
||||
```
|
||||
|
||||
There are two `BundleBuilder` implementations provided as part of this package:
|
||||
|
||||
- [`DSSEBundleBuilder`](./src/bundler/dsse.ts) - Combines the verification material and
|
||||
artifact signature into a [`dsse_envelope`][7] -style Sigstore bundle
|
||||
- [`MessageBundleBuilder`](./src/bundler/message.ts) - Combines the verification
|
||||
material and artifact signature into a [`message_signature`][8]-style Sigstore
|
||||
bundle.
|
||||
|
||||
### Signer
|
||||
|
||||
Every `BundleBuilder` must be instantiated with a `Signer` implementation. The
|
||||
`Signer` is responsible for taking a `Buffer` and returning an `Signature`.
|
||||
|
||||
```typescript
|
||||
interface Signer {
|
||||
sign: (data: Buffer) => Promise<Signature>;
|
||||
}
|
||||
```
|
||||
|
||||
The returned `Signature` contains a signature and the public key which can be
|
||||
used to verify that signature -- the key may either take the form of a x509
|
||||
certificate or public key.
|
||||
|
||||
```typescript
|
||||
type Signature = {
|
||||
signature: Buffer;
|
||||
key: KeyMaterial;
|
||||
};
|
||||
|
||||
type KeyMaterial =
|
||||
| {
|
||||
$case: 'x509Certificate';
|
||||
certificate: string;
|
||||
}
|
||||
| {
|
||||
$case: 'publicKey';
|
||||
publicKey: string;
|
||||
hint?: string;
|
||||
};
|
||||
```
|
||||
|
||||
This package provides the [`FulcioSigner`](./src/signer/fulcio/index.ts)
|
||||
which implements the `Signer` interface and signs the artifact with an
|
||||
ephemeral keypair. It will also retrieve an OIDC token from the configured
|
||||
`IdentityProvider` and then request a signing certificate from Fulcio which binds
|
||||
the ephemeral key to the identity embedded in the token. This signing
|
||||
certificate is returned as part of the `Signature`.
|
||||
|
||||
### Witness
|
||||
|
||||
The `BundleBuilder` may also be configured with zero-or-more `Witness`
|
||||
instances. Each `Witness` receives the artifact signature and the public key
|
||||
and returns an `VerificationMaterial` which represents some sort of
|
||||
counter-signature for the artifact's signature.
|
||||
|
||||
```typescript
|
||||
interface Witness {
|
||||
testify: (
|
||||
signature: SignatureBundle,
|
||||
publicKey: string
|
||||
) => Promise<VerificationMaterial>;
|
||||
}
|
||||
```
|
||||
|
||||
The returned `VerificationMaterial` may contain either Rekor transparency log
|
||||
entries or RFC3161 timestamps.
|
||||
|
||||
```typescript
|
||||
type VerificationMaterial = {
|
||||
tlogEntries?: TransparencyLogEntry[];
|
||||
rfc3161Timestamps?: RFC3161SignedTimestamp[];
|
||||
};
|
||||
```
|
||||
|
||||
The entries in the returned `VerificationMaterial` are automatically added to
|
||||
the Sigstore `Bundle` by the `BundleBuilder`.
|
||||
|
||||
The package provides two different `Witness` implementations:
|
||||
|
||||
- [`RekorWitness`](./src/witness/tlog/index.ts) - Adds an entry to the Rekor
|
||||
transparency log and returns a `TransparencyLogEntry` to be included in the
|
||||
`Bundle`
|
||||
- [`TSAWitness`](./src/witness/tsa/index.ts) - Requests an RFC3161 timestamp
|
||||
over the artifact signature and returns an `RFC3161SignedTimestamp` to be
|
||||
included in the `Bundle`
|
||||
|
||||
## Usage Example
|
||||
|
||||
```typescript
|
||||
const {
|
||||
CIContextProvider,
|
||||
DSSEBundleBuilder,
|
||||
FulcioSigner,
|
||||
RekorWitness,
|
||||
TSAWitness,
|
||||
} = require('@sigstore/sign');
|
||||
|
||||
// Set-up the signer
|
||||
const signer = new FulcioSigner({
|
||||
fulcioBaseURL: 'https://fulcio.sigstore.dev',
|
||||
identityProvider: new CIContextProvider('sigstore'),
|
||||
});
|
||||
|
||||
// Set-up the witnesses
|
||||
const rekorWitness = new RekorWitness({
|
||||
rekorBaseURL: 'https://rekor.sigstore.dev',
|
||||
});
|
||||
|
||||
const tsaWitness = new TSAWitness({
|
||||
tsaBaseURL: 'https://tsa.github.com',
|
||||
});
|
||||
|
||||
// Instantiate a bundle builder
|
||||
const bundler = new DSSEBundleBuilder({
|
||||
signer,
|
||||
witnesses: [rekorWitness, tsaWitness],
|
||||
});
|
||||
|
||||
// Sign a thing
|
||||
const artifact = {
|
||||
type: 'text/plain',
|
||||
data: Buffer.from('something to be signed'),
|
||||
};
|
||||
const bundle = await bundler.create(artifact);
|
||||
```
|
||||
|
||||
[1]: https://www.sigstore.dev
|
||||
[2]: https://github.com/sigstore/fulcio
|
||||
[3]: https://github.com/sigstore/rekor
|
||||
[4]: https://github.com/sigstore/timestamp-authority
|
||||
[5]: https://github.com/sigstore/protobuf-specs/blob/main/protos/sigstore_bundle.proto
|
||||
[6]: https://github.com/secure-systems-lab/dsse
|
||||
[7]: https://github.com/sigstore/protobuf-specs/blob/5ef54068bb534152474c5685f5cd248f38549fbd/protos/sigstore_bundle.proto#L80
|
||||
[8]: https://github.com/sigstore/protobuf-specs/blob/5ef54068bb534152474c5685f5cd248f38549fbd/protos/sigstore_bundle.proto#L74
|
||||
23
node_modules/@sigstore/sign/dist/bundler/base.d.ts
generated
vendored
Normal file
23
node_modules/@sigstore/sign/dist/bundler/base.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
/// <reference types="node" />
|
||||
import type { Bundle } from '@sigstore/bundle';
|
||||
import type { Signature, Signer } from '../signer';
|
||||
import type { Witness } from '../witness';
|
||||
export interface BundleBuilderOptions {
|
||||
signer: Signer;
|
||||
witnesses: Witness[];
|
||||
}
|
||||
export interface Artifact {
|
||||
data: Buffer;
|
||||
type?: string;
|
||||
}
|
||||
export interface BundleBuilder {
|
||||
create: (artifact: Artifact) => Promise<Bundle>;
|
||||
}
|
||||
export declare abstract class BaseBundleBuilder<T extends Bundle> implements BundleBuilder {
|
||||
protected signer: Signer;
|
||||
private witnesses;
|
||||
constructor(options: BundleBuilderOptions);
|
||||
create(artifact: Artifact): Promise<T>;
|
||||
protected prepare(artifact: Artifact): Promise<Buffer>;
|
||||
protected abstract package(artifact: Artifact, signature: Signature): Promise<T>;
|
||||
}
|
||||
50
node_modules/@sigstore/sign/dist/bundler/base.js
generated
vendored
Normal file
50
node_modules/@sigstore/sign/dist/bundler/base.js
generated
vendored
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.BaseBundleBuilder = void 0;
|
||||
// BaseBundleBuilder is a base class for BundleBuilder implementations. It
|
||||
// provides a the basic wokflow for signing and witnessing an artifact.
|
||||
// Subclasses must implement the `package` method to assemble a valid bundle
|
||||
// with the generated signature and verification material.
|
||||
class BaseBundleBuilder {
|
||||
constructor(options) {
|
||||
this.signer = options.signer;
|
||||
this.witnesses = options.witnesses;
|
||||
}
|
||||
// Executes the signing/witnessing process for the given artifact.
|
||||
async create(artifact) {
|
||||
const signature = await this.prepare(artifact).then((blob) => this.signer.sign(blob));
|
||||
const bundle = await this.package(artifact, signature);
|
||||
// Invoke all of the witnesses in parallel
|
||||
const verificationMaterials = await Promise.all(this.witnesses.map((witness) => witness.testify(bundle.content, publicKey(signature.key))));
|
||||
// Collect the verification material from all of the witnesses
|
||||
const tlogEntryList = [];
|
||||
const timestampList = [];
|
||||
verificationMaterials.forEach(({ tlogEntries, rfc3161Timestamps }) => {
|
||||
tlogEntryList.push(...(tlogEntries ?? []));
|
||||
timestampList.push(...(rfc3161Timestamps ?? []));
|
||||
});
|
||||
// Merge the collected verification material into the bundle
|
||||
bundle.verificationMaterial.tlogEntries = tlogEntryList;
|
||||
bundle.verificationMaterial.timestampVerificationData = {
|
||||
rfc3161Timestamps: timestampList,
|
||||
};
|
||||
return bundle;
|
||||
}
|
||||
// Override this function to apply any pre-signing transformations to the
|
||||
// artifact. The returned buffer will be signed by the signer. The default
|
||||
// implementation simply returns the artifact data.
|
||||
async prepare(artifact) {
|
||||
return artifact.data;
|
||||
}
|
||||
}
|
||||
exports.BaseBundleBuilder = BaseBundleBuilder;
|
||||
// Extracts the public key from a KeyMaterial. Returns either the public key
|
||||
// or the certificate, depending on the type of key material.
|
||||
function publicKey(key) {
|
||||
switch (key.$case) {
|
||||
case 'publicKey':
|
||||
return key.publicKey;
|
||||
case 'x509Certificate':
|
||||
return key.certificate;
|
||||
}
|
||||
}
|
||||
5
node_modules/@sigstore/sign/dist/bundler/bundle.d.ts
generated
vendored
Normal file
5
node_modules/@sigstore/sign/dist/bundler/bundle.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
import * as sigstore from '@sigstore/bundle';
|
||||
import type { Signature } from '../signer';
|
||||
import type { Artifact } from './base';
|
||||
export declare function toMessageSignatureBundle(artifact: Artifact, signature: Signature): sigstore.BundleWithMessageSignature;
|
||||
export declare function toDSSEBundle(artifact: Required<Artifact>, signature: Signature, singleCertificate?: boolean): sigstore.BundleWithDsseEnvelope;
|
||||
71
node_modules/@sigstore/sign/dist/bundler/bundle.js
generated
vendored
Normal file
71
node_modules/@sigstore/sign/dist/bundler/bundle.js
generated
vendored
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.toDSSEBundle = exports.toMessageSignatureBundle = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
const sigstore = __importStar(require("@sigstore/bundle"));
|
||||
const util_1 = require("../util");
|
||||
// Helper functions for assembling the parts of a Sigstore bundle
|
||||
// Message signature bundle - $case: 'messageSignature'
|
||||
function toMessageSignatureBundle(artifact, signature) {
|
||||
const digest = util_1.crypto.hash(artifact.data);
|
||||
return sigstore.toMessageSignatureBundle({
|
||||
digest,
|
||||
signature: signature.signature,
|
||||
certificate: signature.key.$case === 'x509Certificate'
|
||||
? util_1.pem.toDER(signature.key.certificate)
|
||||
: undefined,
|
||||
keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined,
|
||||
});
|
||||
}
|
||||
exports.toMessageSignatureBundle = toMessageSignatureBundle;
|
||||
// DSSE envelope bundle - $case: 'dsseEnvelope'
|
||||
function toDSSEBundle(artifact, signature, singleCertificate) {
|
||||
return sigstore.toDSSEBundle({
|
||||
artifact: artifact.data,
|
||||
artifactType: artifact.type,
|
||||
signature: signature.signature,
|
||||
certificate: signature.key.$case === 'x509Certificate'
|
||||
? util_1.pem.toDER(signature.key.certificate)
|
||||
: undefined,
|
||||
keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined,
|
||||
singleCertificate,
|
||||
});
|
||||
}
|
||||
exports.toDSSEBundle = toDSSEBundle;
|
||||
14
node_modules/@sigstore/sign/dist/bundler/dsse.d.ts
generated
vendored
Normal file
14
node_modules/@sigstore/sign/dist/bundler/dsse.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
/// <reference types="node" />
|
||||
import { Artifact, BaseBundleBuilder, BundleBuilderOptions } from './base';
|
||||
import type { BundleWithDsseEnvelope } from '@sigstore/bundle';
|
||||
import type { Signature } from '../signer';
|
||||
type DSSEBundleBuilderOptions = BundleBuilderOptions & {
|
||||
singleCertificate?: boolean;
|
||||
};
|
||||
export declare class DSSEBundleBuilder extends BaseBundleBuilder<BundleWithDsseEnvelope> {
|
||||
private singleCertificate?;
|
||||
constructor(options: DSSEBundleBuilderOptions);
|
||||
protected prepare(artifact: Artifact): Promise<Buffer>;
|
||||
protected package(artifact: Artifact, signature: Signature): Promise<BundleWithDsseEnvelope>;
|
||||
}
|
||||
export {};
|
||||
46
node_modules/@sigstore/sign/dist/bundler/dsse.js
generated
vendored
Normal file
46
node_modules/@sigstore/sign/dist/bundler/dsse.js
generated
vendored
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.DSSEBundleBuilder = void 0;
|
||||
/*
|
||||
Copyright 2023 The Sigstore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
const util_1 = require("../util");
|
||||
const base_1 = require("./base");
|
||||
const bundle_1 = require("./bundle");
|
||||
// BundleBuilder implementation for DSSE wrapped attestations
|
||||
class DSSEBundleBuilder extends base_1.BaseBundleBuilder {
|
||||
constructor(options) {
|
||||
super(options);
|
||||
this.singleCertificate = options.singleCertificate ?? false;
|
||||
}
|
||||
// DSSE requires the artifact to be pre-encoded with the payload type
|
||||
// before the signature is generated.
|
||||
async prepare(artifact) {
|
||||
const a = artifactDefaults(artifact);
|
||||
return util_1.dsse.preAuthEncoding(a.type, a.data);
|
||||
}
|
||||
// Packages the artifact and signature into a DSSE bundle
|
||||
async package(artifact, signature) {
|
||||
return (0, bundle_1.toDSSEBundle)(artifactDefaults(artifact), signature, this.singleCertificate);
|
||||
}
|
||||
}
|
||||
exports.DSSEBundleBuilder = DSSEBundleBuilder;
|
||||
// Defaults the artifact type to an empty string if not provided
|
||||
function artifactDefaults(artifact) {
|
||||
return {
|
||||
...artifact,
|
||||
type: artifact.type ?? '',
|
||||
};
|
||||
}
|
||||
3
node_modules/@sigstore/sign/dist/bundler/index.d.ts
generated
vendored
Normal file
3
node_modules/@sigstore/sign/dist/bundler/index.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
export type { Artifact, BundleBuilder, BundleBuilderOptions } from './base';
|
||||
export { DSSEBundleBuilder } from './dsse';
|
||||
export { MessageSignatureBundleBuilder } from './message';
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue