Deployed the page to Github Pages.

This commit is contained in:
Batuhan Berk Başoğlu 2024-11-03 21:30:09 -05:00
parent 1d79754e93
commit 2c89899458
Signed by: batuhan-basoglu
SSH key fingerprint: SHA256:kEsnuHX+qbwhxSAXPUQ4ox535wFHu/hIRaa53FzxRpo
62797 changed files with 6551425 additions and 15279 deletions

View file

@ -0,0 +1,3 @@
import { TLogAuthority } from '../trust';
import type { TLogEntryWithInclusionProof } from '@sigstore/bundle';
export declare function verifyCheckpoint(entry: TLogEntryWithInclusionProof, tlogs: TLogAuthority[]): void;

View file

@ -0,0 +1,158 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.verifyCheckpoint = void 0;
/*
Copyright 2023 The Sigstore Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
const core_1 = require("@sigstore/core");
const error_1 = require("../error");
const trust_1 = require("../trust");
// Separator between the note and the signatures in a checkpoint
const CHECKPOINT_SEPARATOR = '\n\n';
// Checkpoint signatures are of the following form:
// " <identity> <key_hint+signature_bytes>\n"
// where:
// - the prefix is an emdash (U+2014).
// - <identity> gives a human-readable representation of the signing ID.
// - <key_hint+signature_bytes> is the first 4 bytes of the SHA256 hash of the
// associated public key followed by the signature bytes.
const SIGNATURE_REGEX = /\u2014 (\S+) (\S+)\n/g;
// Verifies the checkpoint value in the given tlog entry. There are two steps
// to the verification:
// 1. Verify that all signatures in the checkpoint can be verified against a
// trusted public key
// 2. Verify that the root hash in the checkpoint matches the root hash in the
// inclusion proof
// See: https://github.com/transparency-dev/formats/blob/main/log/README.md
function verifyCheckpoint(entry, tlogs) {
// Filter tlog instances to just those which were valid at the time of the
// entry
const validTLogs = (0, trust_1.filterTLogAuthorities)(tlogs, {
targetDate: new Date(Number(entry.integratedTime) * 1000),
});
const inclusionProof = entry.inclusionProof;
const signedNote = SignedNote.fromString(inclusionProof.checkpoint.envelope);
const checkpoint = LogCheckpoint.fromString(signedNote.note);
// Verify that the signatures in the checkpoint are all valid
if (!verifySignedNote(signedNote, validTLogs)) {
throw new error_1.VerificationError({
code: 'TLOG_INCLUSION_PROOF_ERROR',
message: 'invalid checkpoint signature',
});
}
// Verify that the root hash from the checkpoint matches the root hash in the
// inclusion proof
if (!core_1.crypto.bufferEqual(checkpoint.logHash, inclusionProof.rootHash)) {
throw new error_1.VerificationError({
code: 'TLOG_INCLUSION_PROOF_ERROR',
message: 'root hash mismatch',
});
}
}
exports.verifyCheckpoint = verifyCheckpoint;
// Verifies the signatures in the SignedNote. For each signature, the
// corresponding transparency log is looked up by the key hint and the
// signature is verified against the public key in the transparency log.
// Throws an error if any of the signatures are invalid.
function verifySignedNote(signedNote, tlogs) {
const data = Buffer.from(signedNote.note, 'utf-8');
return signedNote.signatures.every((signature) => {
// Find the transparency log instance with the matching key hint
const tlog = tlogs.find((tlog) => core_1.crypto.bufferEqual(tlog.logID.subarray(0, 4), signature.keyHint));
if (!tlog) {
return false;
}
return core_1.crypto.verify(data, tlog.publicKey, signature.signature);
});
}
// SignedNote represents a signed note from a transparency log checkpoint. Consists
// of a body (or note) and one more signatures calculated over the body. See
// https://github.com/transparency-dev/formats/blob/main/log/README.md#signed-envelope
class SignedNote {
constructor(note, signatures) {
this.note = note;
this.signatures = signatures;
}
// Deserialize a SignedNote from a string
static fromString(envelope) {
if (!envelope.includes(CHECKPOINT_SEPARATOR)) {
throw new error_1.VerificationError({
code: 'TLOG_INCLUSION_PROOF_ERROR',
message: 'missing checkpoint separator',
});
}
// Split the note into the header and the data portions at the separator
const split = envelope.indexOf(CHECKPOINT_SEPARATOR);
const header = envelope.slice(0, split + 1);
const data = envelope.slice(split + CHECKPOINT_SEPARATOR.length);
// Find all the signature lines in the data portion
const matches = data.matchAll(SIGNATURE_REGEX);
// Parse each of the matched signature lines into the name and signature.
// The first four bytes of the signature are the key hint (should match the
// first four bytes of the log ID), and the rest is the signature itself.
const signatures = Array.from(matches, (match) => {
const [, name, signature] = match;
const sigBytes = Buffer.from(signature, 'base64');
if (sigBytes.length < 5) {
throw new error_1.VerificationError({
code: 'TLOG_INCLUSION_PROOF_ERROR',
message: 'malformed checkpoint signature',
});
}
return {
name,
keyHint: sigBytes.subarray(0, 4),
signature: sigBytes.subarray(4),
};
});
if (signatures.length === 0) {
throw new error_1.VerificationError({
code: 'TLOG_INCLUSION_PROOF_ERROR',
message: 'no signatures found in checkpoint',
});
}
return new SignedNote(header, signatures);
}
}
// LogCheckpoint represents a transparency log checkpoint. Consists of the
// following:
// - origin: the name of the transparency log
// - logSize: the size of the log at the time of the checkpoint
// - logHash: the root hash of the log at the time of the checkpoint
// - rest: the rest of the checkpoint body, which is a list of log entries
// See:
// https://github.com/transparency-dev/formats/blob/main/log/README.md#checkpoint-body
class LogCheckpoint {
constructor(origin, logSize, logHash, rest) {
this.origin = origin;
this.logSize = logSize;
this.logHash = logHash;
this.rest = rest;
}
static fromString(note) {
const lines = note.trimEnd().split('\n');
if (lines.length < 3) {
throw new error_1.VerificationError({
code: 'TLOG_INCLUSION_PROOF_ERROR',
message: 'too few lines in checkpoint header',
});
}
const origin = lines[0];
const logSize = BigInt(lines[1]);
const rootHash = Buffer.from(lines[2], 'base64');
const rest = lines.slice(3);
return new LogCheckpoint(origin, logSize, rootHash, rest);
}
}

View file

@ -0,0 +1,12 @@
/// <reference types="node" />
import { RFC3161Timestamp } from '@sigstore/core';
import type { TransparencyLogEntry } from '@sigstore/bundle';
import type { CertAuthority, TLogAuthority } from '../trust';
export type TimestampType = 'transparency-log' | 'timestamp-authority';
export type TimestampVerificationResult = {
type: TimestampType;
logID: Buffer;
timestamp: Date;
};
export declare function verifyTSATimestamp(timestamp: RFC3161Timestamp, data: Buffer, timestampAuthorities: CertAuthority[]): TimestampVerificationResult;
export declare function verifyTLogTimestamp(entry: TransparencyLogEntry, tlogAuthorities: TLogAuthority[]): TimestampVerificationResult;

47
node_modules/@sigstore/verify/dist/timestamp/index.js generated vendored Normal file
View file

@ -0,0 +1,47 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.verifyTLogTimestamp = exports.verifyTSATimestamp = void 0;
const error_1 = require("../error");
const checkpoint_1 = require("./checkpoint");
const merkle_1 = require("./merkle");
const set_1 = require("./set");
const tsa_1 = require("./tsa");
function verifyTSATimestamp(timestamp, data, timestampAuthorities) {
(0, tsa_1.verifyRFC3161Timestamp)(timestamp, data, timestampAuthorities);
return {
type: 'timestamp-authority',
logID: timestamp.signerSerialNumber,
timestamp: timestamp.signingTime,
};
}
exports.verifyTSATimestamp = verifyTSATimestamp;
function verifyTLogTimestamp(entry, tlogAuthorities) {
let inclusionVerified = false;
if (isTLogEntryWithInclusionPromise(entry)) {
(0, set_1.verifyTLogSET)(entry, tlogAuthorities);
inclusionVerified = true;
}
if (isTLogEntryWithInclusionProof(entry)) {
(0, merkle_1.verifyMerkleInclusion)(entry);
(0, checkpoint_1.verifyCheckpoint)(entry, tlogAuthorities);
inclusionVerified = true;
}
if (!inclusionVerified) {
throw new error_1.VerificationError({
code: 'TLOG_MISSING_INCLUSION_ERROR',
message: 'inclusion could not be verified',
});
}
return {
type: 'transparency-log',
logID: entry.logId.keyId,
timestamp: new Date(Number(entry.integratedTime) * 1000),
};
}
exports.verifyTLogTimestamp = verifyTLogTimestamp;
function isTLogEntryWithInclusionPromise(entry) {
return entry.inclusionPromise !== undefined;
}
function isTLogEntryWithInclusionProof(entry) {
return entry.inclusionProof !== undefined;
}

View file

@ -0,0 +1,2 @@
import type { TLogEntryWithInclusionProof } from '@sigstore/bundle';
export declare function verifyMerkleInclusion(entry: TLogEntryWithInclusionProof): void;

105
node_modules/@sigstore/verify/dist/timestamp/merkle.js generated vendored Normal file
View file

@ -0,0 +1,105 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.verifyMerkleInclusion = void 0;
/*
Copyright 2023 The Sigstore Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
const core_1 = require("@sigstore/core");
const error_1 = require("../error");
const RFC6962_LEAF_HASH_PREFIX = Buffer.from([0x00]);
const RFC6962_NODE_HASH_PREFIX = Buffer.from([0x01]);
function verifyMerkleInclusion(entry) {
const inclusionProof = entry.inclusionProof;
const logIndex = BigInt(inclusionProof.logIndex);
const treeSize = BigInt(inclusionProof.treeSize);
if (logIndex < 0n || logIndex >= treeSize) {
throw new error_1.VerificationError({
code: 'TLOG_INCLUSION_PROOF_ERROR',
message: `invalid index: ${logIndex}`,
});
}
// Figure out which subset of hashes corresponds to the inner and border
// nodes
const { inner, border } = decompInclProof(logIndex, treeSize);
if (inclusionProof.hashes.length !== inner + border) {
throw new error_1.VerificationError({
code: 'TLOG_INCLUSION_PROOF_ERROR',
message: 'invalid hash count',
});
}
const innerHashes = inclusionProof.hashes.slice(0, inner);
const borderHashes = inclusionProof.hashes.slice(inner);
// The entry's hash is the leaf hash
const leafHash = hashLeaf(entry.canonicalizedBody);
// Chain the hashes belonging to the inner and border portions
const calculatedHash = chainBorderRight(chainInner(leafHash, innerHashes, logIndex), borderHashes);
// Calculated hash should match the root hash in the inclusion proof
if (!core_1.crypto.bufferEqual(calculatedHash, inclusionProof.rootHash)) {
throw new error_1.VerificationError({
code: 'TLOG_INCLUSION_PROOF_ERROR',
message: 'calculated root hash does not match inclusion proof',
});
}
}
exports.verifyMerkleInclusion = verifyMerkleInclusion;
// Breaks down inclusion proof for a leaf at the specified index in a tree of
// the specified size. The split point is where paths to the index leaf and
// the (size - 1) leaf diverge. Returns lengths of the bottom and upper proof
// parts.
function decompInclProof(index, size) {
const inner = innerProofSize(index, size);
const border = onesCount(index >> BigInt(inner));
return { inner, border };
}
// Computes a subtree hash for a node on or below the tree's right border.
// Assumes the provided proof hashes are ordered from lower to higher levels
// and seed is the initial hash of the node specified by the index.
function chainInner(seed, hashes, index) {
return hashes.reduce((acc, h, i) => {
if ((index >> BigInt(i)) & BigInt(1)) {
return hashChildren(h, acc);
}
else {
return hashChildren(acc, h);
}
}, seed);
}
// Computes a subtree hash for nodes along the tree's right border.
function chainBorderRight(seed, hashes) {
return hashes.reduce((acc, h) => hashChildren(h, acc), seed);
}
function innerProofSize(index, size) {
return bitLength(index ^ (size - BigInt(1)));
}
// Counts the number of ones in the binary representation of the given number.
// https://en.wikipedia.org/wiki/Hamming_weight
function onesCount(num) {
return num.toString(2).split('1').length - 1;
}
// Returns the number of bits necessary to represent an integer in binary.
function bitLength(n) {
if (n === 0n) {
return 0;
}
return n.toString(2).length;
}
// Hashing logic according to RFC6962.
// https://datatracker.ietf.org/doc/html/rfc6962#section-2
function hashChildren(left, right) {
return core_1.crypto.hash(RFC6962_NODE_HASH_PREFIX, left, right);
}
function hashLeaf(leaf) {
return core_1.crypto.hash(RFC6962_LEAF_HASH_PREFIX, leaf);
}

View file

@ -0,0 +1,3 @@
import { TLogAuthority } from '../trust';
import type { TLogEntryWithInclusionPromise } from '@sigstore/bundle';
export declare function verifyTLogSET(entry: TLogEntryWithInclusionPromise, tlogs: TLogAuthority[]): void;

61
node_modules/@sigstore/verify/dist/timestamp/set.js generated vendored Normal file
View file

@ -0,0 +1,61 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.verifyTLogSET = void 0;
/*
Copyright 2023 The Sigstore Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
const core_1 = require("@sigstore/core");
const error_1 = require("../error");
const trust_1 = require("../trust");
// Verifies the SET for the given entry against the list of trusted
// transparency logs. Returns true if the SET can be verified against at least
// one of the trusted logs; otherwise, returns false.
function verifyTLogSET(entry, tlogs) {
// Filter the list of tlog instances to only those which might be able to
// verify the SET
const validTLogs = (0, trust_1.filterTLogAuthorities)(tlogs, {
logID: entry.logId.keyId,
targetDate: new Date(Number(entry.integratedTime) * 1000),
});
// Check to see if we can verify the SET against any of the valid tlogs
const verified = validTLogs.some((tlog) => {
// Re-create the original Rekor verification payload
const payload = toVerificationPayload(entry);
// Canonicalize the payload and turn into a buffer for verification
const data = Buffer.from(core_1.json.canonicalize(payload), 'utf8');
// Extract the SET from the tlog entry
const signature = entry.inclusionPromise.signedEntryTimestamp;
return core_1.crypto.verify(data, tlog.publicKey, signature);
});
if (!verified) {
throw new error_1.VerificationError({
code: 'TLOG_INCLUSION_PROMISE_ERROR',
message: 'inclusion promise could not be verified',
});
}
}
exports.verifyTLogSET = verifyTLogSET;
// Returns a properly formatted "VerificationPayload" for one of the
// transaction log entires in the given bundle which can be used for SET
// verification.
function toVerificationPayload(entry) {
const { integratedTime, logIndex, logId, canonicalizedBody } = entry;
return {
body: canonicalizedBody.toString('base64'),
integratedTime: Number(integratedTime),
logIndex: Number(logIndex),
logID: logId.keyId.toString('hex'),
};
}

View file

@ -0,0 +1,4 @@
/// <reference types="node" />
import { RFC3161Timestamp } from '@sigstore/core';
import { CertAuthority } from '../trust';
export declare function verifyRFC3161Timestamp(timestamp: RFC3161Timestamp, data: Buffer, timestampAuthorities: CertAuthority[]): void;

74
node_modules/@sigstore/verify/dist/timestamp/tsa.js generated vendored Normal file
View file

@ -0,0 +1,74 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.verifyRFC3161Timestamp = void 0;
const core_1 = require("@sigstore/core");
const error_1 = require("../error");
const certificate_1 = require("../key/certificate");
const trust_1 = require("../trust");
function verifyRFC3161Timestamp(timestamp, data, timestampAuthorities) {
const signingTime = timestamp.signingTime;
// Filter for CAs which were valid at the time of signing
timestampAuthorities = (0, trust_1.filterCertAuthorities)(timestampAuthorities, {
start: signingTime,
end: signingTime,
});
// Filter for CAs which match serial and issuer embedded in the timestamp
timestampAuthorities = filterCAsBySerialAndIssuer(timestampAuthorities, {
serialNumber: timestamp.signerSerialNumber,
issuer: timestamp.signerIssuer,
});
// Check that we can verify the timestamp with AT LEAST ONE of the remaining
// CAs
const verified = timestampAuthorities.some((ca) => {
try {
verifyTimestampForCA(timestamp, data, ca);
return true;
}
catch (e) {
return false;
}
});
if (!verified) {
throw new error_1.VerificationError({
code: 'TIMESTAMP_ERROR',
message: 'timestamp could not be verified',
});
}
}
exports.verifyRFC3161Timestamp = verifyRFC3161Timestamp;
function verifyTimestampForCA(timestamp, data, ca) {
const [leaf, ...cas] = ca.certChain;
const signingKey = core_1.crypto.createPublicKey(leaf.publicKey);
const signingTime = timestamp.signingTime;
// Verify the certificate chain for the provided CA
try {
new certificate_1.CertificateChainVerifier({
untrustedCert: leaf,
trustedCerts: cas,
}).verify();
}
catch (e) {
throw new error_1.VerificationError({
code: 'TIMESTAMP_ERROR',
message: 'invalid certificate chain',
});
}
// Check that all of the CA certs were valid at the time of signing
const validAtSigningTime = ca.certChain.every((cert) => cert.validForDate(signingTime));
if (!validAtSigningTime) {
throw new error_1.VerificationError({
code: 'TIMESTAMP_ERROR',
message: 'timestamp was signed with an expired certificate',
});
}
// Check that the signing certificate's key can be used to verify the
// timestamp signature.
timestamp.verify(data, signingKey);
}
// Filters the list of CAs to those which have a leaf signing certificate which
// matches the given serial number and issuer.
function filterCAsBySerialAndIssuer(timestampAuthorities, criteria) {
return timestampAuthorities.filter((ca) => ca.certChain.length > 0 &&
core_1.crypto.bufferEqual(ca.certChain[0].serialNumber, criteria.serialNumber) &&
core_1.crypto.bufferEqual(ca.certChain[0].issuer, criteria.issuer));
}