mirror of
https://github.com/HeyPuter/puter.git
synced 2025-02-02 23:28:39 +08:00
dev: get token counts on all services
This commit is contained in:
parent
f3d270ccbc
commit
2dd9417234
28
package-lock.json
generated
28
package-lock.json
generated
@ -3412,9 +3412,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@mistralai/mistralai": {
|
"node_modules/@mistralai/mistralai": {
|
||||||
"version": "1.0.3",
|
"version": "1.3.4",
|
||||||
"resolved": "https://registry.npmjs.org/@mistralai/mistralai/-/mistralai-1.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/@mistralai/mistralai/-/mistralai-1.3.4.tgz",
|
||||||
"integrity": "sha512-161lmlaMrQvQeC97LG3GVpQi+LKKmGb6VweEFq6otc4J4kEVaJu6RzmH5UuLCt2eSes1Q5faY6YAPTkGOo0atw==",
|
"integrity": "sha512-db5UhCXqH0N05XbXMR/2bSiGKIFUzS6p0sI9Nl2XDmJuDZIm+WRGTlsq60ALwhvKpHcQKzN5L58HIneksRrn9g==",
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"zod": ">= 3"
|
"zod": ">= 3"
|
||||||
}
|
}
|
||||||
@ -13728,10 +13728,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/openai": {
|
"node_modules/openai": {
|
||||||
"version": "4.52.3",
|
"version": "4.73.1",
|
||||||
"resolved": "https://registry.npmjs.org/openai/-/openai-4.52.3.tgz",
|
"resolved": "https://registry.npmjs.org/openai/-/openai-4.73.1.tgz",
|
||||||
"integrity": "sha512-IyQLYKGYoEEkUCEm2frPzwHDJ3Ym663KtivnY6pWCzuoi6/HgSIMMxpcuTRS81GH6tiULPYGmTxIvzXdmPIWOw==",
|
"integrity": "sha512-nWImDJBcUsqrhy7yJScXB4+iqjzbUEgzfA3un/6UnHFdwWhjX24oztj69Ped/njABfOdLcO/F7CeWTI5dt8Xmg==",
|
||||||
"license": "Apache-2.0",
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/node": "^18.11.18",
|
"@types/node": "^18.11.18",
|
||||||
"@types/node-fetch": "^2.6.4",
|
"@types/node-fetch": "^2.6.4",
|
||||||
@ -13739,11 +13738,18 @@
|
|||||||
"agentkeepalive": "^4.2.1",
|
"agentkeepalive": "^4.2.1",
|
||||||
"form-data-encoder": "1.7.2",
|
"form-data-encoder": "1.7.2",
|
||||||
"formdata-node": "^4.3.2",
|
"formdata-node": "^4.3.2",
|
||||||
"node-fetch": "^2.6.7",
|
"node-fetch": "^2.6.7"
|
||||||
"web-streams-polyfill": "^3.2.1"
|
|
||||||
},
|
},
|
||||||
"bin": {
|
"bin": {
|
||||||
"openai": "bin/cli"
|
"openai": "bin/cli"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"zod": "^3.23.8"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"zod": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/openai/node_modules/@types/node": {
|
"node_modules/openai/node_modules/@types/node": {
|
||||||
@ -17320,7 +17326,7 @@
|
|||||||
"@heyputer/kv.js": "^0.1.3",
|
"@heyputer/kv.js": "^0.1.3",
|
||||||
"@heyputer/multest": "^0.0.2",
|
"@heyputer/multest": "^0.0.2",
|
||||||
"@heyputer/putility": "^1.0.0",
|
"@heyputer/putility": "^1.0.0",
|
||||||
"@mistralai/mistralai": "^1.0.3",
|
"@mistralai/mistralai": "^1.3.4",
|
||||||
"@opentelemetry/api": "^1.4.1",
|
"@opentelemetry/api": "^1.4.1",
|
||||||
"@opentelemetry/auto-instrumentations-node": "^0.43.0",
|
"@opentelemetry/auto-instrumentations-node": "^0.43.0",
|
||||||
"@opentelemetry/exporter-trace-otlp-grpc": "^0.40.0",
|
"@opentelemetry/exporter-trace-otlp-grpc": "^0.40.0",
|
||||||
@ -17365,7 +17371,7 @@
|
|||||||
"murmurhash": "^2.0.1",
|
"murmurhash": "^2.0.1",
|
||||||
"nodemailer": "^6.9.3",
|
"nodemailer": "^6.9.3",
|
||||||
"on-finished": "^2.4.1",
|
"on-finished": "^2.4.1",
|
||||||
"openai": "^4.20.1",
|
"openai": "^4.73.1",
|
||||||
"otpauth": "9.2.4",
|
"otpauth": "9.2.4",
|
||||||
"prompt-sync": "^4.2.0",
|
"prompt-sync": "^4.2.0",
|
||||||
"recursive-readdir": "^2.2.3",
|
"recursive-readdir": "^2.2.3",
|
||||||
|
@ -13,7 +13,7 @@
|
|||||||
"@heyputer/kv.js": "^0.1.3",
|
"@heyputer/kv.js": "^0.1.3",
|
||||||
"@heyputer/multest": "^0.0.2",
|
"@heyputer/multest": "^0.0.2",
|
||||||
"@heyputer/putility": "^1.0.0",
|
"@heyputer/putility": "^1.0.0",
|
||||||
"@mistralai/mistralai": "^1.0.3",
|
"@mistralai/mistralai": "^1.3.4",
|
||||||
"@opentelemetry/api": "^1.4.1",
|
"@opentelemetry/api": "^1.4.1",
|
||||||
"@opentelemetry/auto-instrumentations-node": "^0.43.0",
|
"@opentelemetry/auto-instrumentations-node": "^0.43.0",
|
||||||
"@opentelemetry/exporter-trace-otlp-grpc": "^0.40.0",
|
"@opentelemetry/exporter-trace-otlp-grpc": "^0.40.0",
|
||||||
@ -58,7 +58,7 @@
|
|||||||
"murmurhash": "^2.0.1",
|
"murmurhash": "^2.0.1",
|
||||||
"nodemailer": "^6.9.3",
|
"nodemailer": "^6.9.3",
|
||||||
"on-finished": "^2.4.1",
|
"on-finished": "^2.4.1",
|
||||||
"openai": "^4.20.1",
|
"openai": "^4.73.1",
|
||||||
"otpauth": "9.2.4",
|
"otpauth": "9.2.4",
|
||||||
"prompt-sync": "^4.2.0",
|
"prompt-sync": "^4.2.0",
|
||||||
"recursive-readdir": "^2.2.3",
|
"recursive-readdir": "^2.2.3",
|
||||||
|
@ -150,6 +150,7 @@ class AIChatService extends BaseService {
|
|||||||
const svc_driver = this.services.get('driver');
|
const svc_driver = this.services.get('driver');
|
||||||
let ret, error, errors = [];
|
let ret, error, errors = [];
|
||||||
let service_used = intended_service;
|
let service_used = intended_service;
|
||||||
|
let model_used = this.get_model_from_request(parameters);
|
||||||
try {
|
try {
|
||||||
ret = await svc_driver.call_new_({
|
ret = await svc_driver.call_new_({
|
||||||
actor: Context.get('actor'),
|
actor: Context.get('actor'),
|
||||||
@ -160,7 +161,7 @@ class AIChatService extends BaseService {
|
|||||||
});
|
});
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
const tried = [];
|
const tried = [];
|
||||||
let model = this.get_model_from_request(parameters);
|
let model = model_used;
|
||||||
|
|
||||||
// TODO: if conflict models exist, add service name
|
// TODO: if conflict models exist, add service name
|
||||||
tried.push(model);
|
tried.push(model);
|
||||||
@ -205,6 +206,7 @@ class AIChatService extends BaseService {
|
|||||||
});
|
});
|
||||||
error = null;
|
error = null;
|
||||||
service_used = fallback_service_name;
|
service_used = fallback_service_name;
|
||||||
|
model_used = fallback_model_name;
|
||||||
response_metadata.fallback = {
|
response_metadata.fallback = {
|
||||||
service: fallback_service_name,
|
service: fallback_service_name,
|
||||||
model: fallback_model_name,
|
model: fallback_model_name,
|
||||||
@ -233,6 +235,8 @@ class AIChatService extends BaseService {
|
|||||||
intended_service,
|
intended_service,
|
||||||
parameters,
|
parameters,
|
||||||
result: ret.result,
|
result: ret.result,
|
||||||
|
model_used,
|
||||||
|
service_used,
|
||||||
});
|
});
|
||||||
|
|
||||||
return ret.result;
|
return ret.result;
|
||||||
|
@ -141,6 +141,7 @@ class ClaudeService extends BaseService {
|
|||||||
});
|
});
|
||||||
return {
|
return {
|
||||||
message: msg,
|
message: msg,
|
||||||
|
usage: msg.usage,
|
||||||
finish_reason: 'stop'
|
finish_reason: 'stop'
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -74,7 +74,12 @@ class GroqAIService extends BaseService {
|
|||||||
return retval;
|
return retval;
|
||||||
}
|
}
|
||||||
|
|
||||||
return completion.choices[0];
|
const ret = completion.choices[0];
|
||||||
|
ret.usage = {
|
||||||
|
input_tokens: completion.usage.prompt_tokens,
|
||||||
|
output_tokens: completion.usage.completion_tokens,
|
||||||
|
};
|
||||||
|
return ret;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -180,22 +180,17 @@ class MistralAIService extends BaseService {
|
|||||||
return retval;
|
return retval;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
const completion = await this.client.chat.complete({
|
||||||
const completion = await this.client.chat.complete({
|
model: model ?? this.get_default_model(),
|
||||||
model: model ?? this.get_default_model(),
|
messages,
|
||||||
messages,
|
});
|
||||||
});
|
// Expected case when mistralai/client-ts#23 is fixed
|
||||||
// Expected case when mistralai/client-ts#23 is fixed
|
const ret = completion.choices[0];
|
||||||
return completion.choices[0];
|
ret.usage = {
|
||||||
} catch (e) {
|
input_tokens: completion.usage.promptTokens,
|
||||||
if ( ! e?.rawValue?.choices[0] ) {
|
output_tokens: completion.usage.completionTokens,
|
||||||
throw e;
|
};
|
||||||
}
|
return ret;
|
||||||
// The SDK attempts to validate APIs response and throws
|
|
||||||
// an exception, even if the response was successful
|
|
||||||
// https://github.com/mistralai/client-ts/issues/23
|
|
||||||
return e.rawValue.choices[0];
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -361,7 +361,12 @@ class OpenAICompletionService extends BaseService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return completion.choices[0];
|
const ret = completion.choices[0];
|
||||||
|
ret.usage = {
|
||||||
|
input_tokens: completion.usage.prompt_tokens,
|
||||||
|
output_tokens: completion.usage.completion_tokens,
|
||||||
|
};
|
||||||
|
return ret;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -76,7 +76,13 @@ class TogetherAIService extends BaseService {
|
|||||||
return retval;
|
return retval;
|
||||||
}
|
}
|
||||||
|
|
||||||
return completion.choices[0];
|
// return completion.choices[0];
|
||||||
|
const ret = completion.choices[0];
|
||||||
|
ret.usage = {
|
||||||
|
input_tokens: completion.usage.prompt_tokens,
|
||||||
|
output_tokens: completion.usage.completion_tokens,
|
||||||
|
};
|
||||||
|
return ret;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -135,6 +135,7 @@ class XAIService extends BaseService {
|
|||||||
});
|
});
|
||||||
return {
|
return {
|
||||||
message: msg,
|
message: msg,
|
||||||
|
usage: msg.usage,
|
||||||
finish_reason: 'stop'
|
finish_reason: 'stop'
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user