Gemini
Usage
GEMINI_API_KEY=
import { TokenJS } from 'token.js'
// Create the Token.js client
const tokenjs = new TokenJS()
async function main() {
// Create a model response
const completion = await tokenjs.chat.completions.create({
// Specify the provider and model
provider: 'gemini',
model: 'gemini-1.5-pro',
// Define your message
messages: [
{
role: 'user',
content: 'Hello!',
},
],
})
console.log(completion.choices[0])
}
main()
Supported Models
Model
Chat Completion
Streaming
JSON Output
Image Input
Function Calling
N > 1
gemini-2.0-flash-001
✅
✅
✅
✅
✅
✅
gemini-2.0-flash-lite-preview-02-05
✅
✅
✅
✅
➖
✅
gemini-1.5-pro
✅
✅
✅
✅
✅
✅
gemini-1.5-flash
✅
✅
✅
✅
✅
✅
gemini-1.5-flash-8b
✅
✅
✅
✅
✅
✅
gemini-1.0-pro
✅
✅
➖
➖
✅
✅
Legend
Symbol
Description
✅
Supported by Token.js
➖
Not supported by the LLM provider, so Token.js cannot support it
Additional Resources
Last updated
Was this helpful?