Skip to content

Commit 9b97310

Browse files
committed
cleanup
1 parent 604a419 commit 9b97310

File tree

2 files changed

+0
-36
lines changed

2 files changed

+0
-36
lines changed

packages/ai/integration/constants.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,6 @@ const backendNames: Map<BackendType, string> = new Map([
5454

5555
const modelNames: readonly string[] = [
5656
'gemini-2.0-flash'
57-
// 'gemini-2.0-flash-exp'
5857
];
5958

6059
/**

packages/ai/integration/generate-content.test.ts

Lines changed: 0 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -135,41 +135,6 @@ describe('Generate Content', () => {
135135
const trimmedText = response.text().trim();
136136
expect(trimmedText).to.equal('Mountain View');
137137
expect(response.usageMetadata).to.be.undefined; // Note: This is incorrect behavior.
138-
139-
/*
140-
expect(response.usageMetadata).to.exist;
141-
expect(response.usageMetadata!.promptTokenCount).to.be.closeTo(
142-
21,
143-
TOKEN_COUNT_DELTA
144-
); // TODO: fix promptTokenToke is undefined
145-
// Candidate token count can be slightly different in streaming
146-
expect(response.usageMetadata!.candidatesTokenCount).to.be.closeTo(
147-
4,
148-
TOKEN_COUNT_DELTA + 1 // Allow slightly more variance for stream
149-
);
150-
expect(response.usageMetadata!.totalTokenCount).to.be.closeTo(
151-
25,
152-
TOKEN_COUNT_DELTA * 2 + 1 // Allow slightly more variance for stream
153-
);
154-
expect(response.usageMetadata!.promptTokensDetails).to.not.be.null;
155-
expect(response.usageMetadata!.promptTokensDetails!.length).to.equal(1);
156-
expect(
157-
response.usageMetadata!.promptTokensDetails![0].modality
158-
).to.equal(Modality.TEXT);
159-
expect(
160-
response.usageMetadata!.promptTokensDetails![0].tokenCount
161-
).to.equal(21);
162-
expect(response.usageMetadata!.candidatesTokensDetails).to.not.be.null;
163-
expect(
164-
response.usageMetadata!.candidatesTokensDetails!.length
165-
).to.equal(1);
166-
expect(
167-
response.usageMetadata!.candidatesTokensDetails![0].modality
168-
).to.equal(Modality.TEXT);
169-
expect(
170-
response.usageMetadata!.candidatesTokensDetails![0].tokenCount
171-
).to.be.closeTo(4, TOKEN_COUNT_DELTA + 1); // Allow slightly more variance for stream
172-
*/
173138
});
174139
});
175140
});

0 commit comments

Comments
 (0)