import { TokenJS } from'token.js'// Create the Token.js clientconsttokenjs=newTokenJS()asyncfunctionmain() {// Create a model responseconstcompletion=awaittokenjs.chat.completions.create({// Specify the provider and model provider:'gemini', model:'gemini-1.5-pro',// Define your message messages: [ { role:'user', content:'Hello!', }, ], })console.log(completion.choices[0])}main()
Supported by Token.js
Not supported by the LLM provider, so Token.js cannot support it