Unlock queue, change token count, temperature & MORE!!
当前为
// ==UserScript==
// @name SpicyChat Utilities
// @version 0.0.2
// @namespace skibidi
// @description Unlock queue, change token count, temperature & MORE!!
// @author xin
// @license MIT
// @match *://*.spicychat.ai/*
// @grant none
// ==/UserScript==
// ===> Models <===
// default
// default-13b-8k
// default-70b
// mixtral-8x7B
// noromaid-45b
// midnightrose-70b
// (if they add new models, skill issue!)
// Majorly outdated but i can't be bothered to update it, lol
// vvv example vvv
// localStorage.setItem("model", "default");
// ==> Parameters <==
// vvv example(s) vvv
// localStorage.setItem("max_new_tokens", 180);
// localStorage.setItem("temperature", 0.7);
// localStorage.setItem("top_p", 0.7);
// localStorage.setItem("top_k", 90);
(function() {
'use strict';
console.log("Script loaded, probably!");
XMLHttpRequest.prototype.realSend = XMLHttpRequest.prototype.send // I love man in the middle attacks!
XMLHttpRequest.prototype.send = function(data) {
if (data != undefined) {
if (data.includes("inference_model")) { // Model changer
const payload = JSON.parse(data);
// Default parameters
payload['inference_model'] = localStorage.getItem("model") ?? "default";
payload['inference_settings']['max_new_tokens'] == localStorage.getItem("max_new_tokens") ?? 180;
payload['inference_settings']['temperature'] == localStorage.getItem("temperature") ?? 0.7;
payload['inference_settings']['top_p'] == localStorage.getItem("top_p") ?? 0.7;
payload['inference_settings']['top_k'] == localStorage.getItem("top_k") ?? 90;
data = JSON.stringify(payload);
}
}
this.addEventListener('readystatechange', function() {
if (this.responseURL.includes("queue")) { // Queue bypass
if (this.response != "") {
Object.defineProperty(this, "responseText", {writable: true});
this.responseText = '{"status":"access"}';
}
};
});
this.realSend(data)
};
})();