Hcaptcha Solver with Browser Trainer(Automatically solves Hcaptcha in browser)

Hcaptcha Solver in Browser | Automatically solves Hcaptcha in browser

当前为 2024-06-19 提交的版本,查看 最新版本

  1. // ==UserScript==
  2. // @name Hcaptcha Solver with Browser Trainer(Automatically solves Hcaptcha in browser)
  3. // @namespace Hcaptcha Solver
  4. // @version 10.0
  5. // @description Hcaptcha Solver in Browser | Automatically solves Hcaptcha in browser
  6. // @author Md ubeadulla
  7. // @match https://*.hcaptcha.com/*hcaptcha-challenge*
  8. // @match https://*.hcaptcha.com/*checkbox*
  9. // @grant GM_xmlhttpRequest
  10. // @grant GM_setValue
  11. // @grant GM_getValue
  12. // @run-at document-start
  13. // @connect www.imageidentify.com
  14. // @connect https://cdnjs.cloudflare.com
  15. // @connect https://cdn.jsdelivr.net
  16. // @connect https://unpkg.com
  17. // @connect https://*.hcaptcha.com/*
  18. // @require https://unpkg.com/jimp@0.5.2/browser/lib/jimp.min.js
  19. // @require https://cdnjs.cloudflare.com/ajax/libs/tesseract.js/2.0.0-alpha.2/tesseract.min.js
  20. // @require https://cdn.jsdelivr.net/npm/@tensorflow/tfjs@3.13.0/dist/tf.min.js
  21. // @require https://cdn.jsdelivr.net/npm/@tensorflow-models/coco-ssd@2.2.2/dist/coco-ssd.min.js
  22. // @require https://cdn.jsdelivr.net/npm/@tensorflow-models/mobilenet@2.1.0/dist/mobilenet.min.js
  23.  
  24. /*
  25. ██╗░░██╗░█████╗░░█████╗░██████╗░████████╗░█████╗░██╗░░██╗░█████╗░  ░██████╗░█████╗░██╗░░░░░██╗░░░██╗███████╗██████╗░
  26. ██║░░██║██╔══██╗██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗██║░░██║██╔══██╗  ██╔════╝██╔══██╗██║░░░░░██║░░░██║██╔════╝██╔══██╗
  27. ███████║██║░░╚═╝███████║██████╔╝░░░██║░░░██║░░╚═╝███████║███████║  ╚█████╗░██║░░██║██║░░░░░╚██╗░██╔╝█████╗░░██████╔╝
  28. ██╔══██║██║░░██╗██╔══██║██╔═══╝░░░░██║░░░██║░░██╗██╔══██║██╔══██║  ░╚═══██╗██║░░██║██║░░░░░░╚████╔╝░██╔══╝░░██╔══██╗
  29. ██║░░██║╚█████╔╝██║░░██║██║░░░░░░░░██║░░░╚█████╔╝██║░░██║██║░░██║  ██████╔╝╚█████╔╝███████╗░░╚██╔╝░░███████╗██║░░██║
  30. ╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝╚═╝░░░░░░░░╚═╝░░░░╚════╝░╚═╝░░╚═╝╚═╝░░╚═╝  ╚═════╝░░╚════╝░╚══════╝░░░╚═╝░░░╚══════╝╚═╝░░╚═╝
  31. */
  32. /** Note: This script is solely intended for the use of educational purposes only and not to abuse any website.
  33. * Sign Up using the referral links or consider a donation to the following addresses:
  34. ***************************************************************************************************
  35. * Faucets: *
  36. * Install the free cryporotator https://greasyfork.org/en/scripts/426599-free-cryptorotator *
  37. * Or Sign up using the referral link and solve captchas to earn crypto (Do not abuse the websites)*
  38. * 1.) https://get-bitcoin.net/?ref=9230 *
  39. * 2.) https://getdoge.io/?ref=34017 *
  40. * 3.) https://r.adbtc.top/1771513 *
  41. * 4.) https://cryptowin.io/ref/ubeadulla *
  42. * 5.) https://winalittle.fun/referral/02c7061877cec89e81a306303d36b77c *
  43. * 6.) https://faucetofbob.xyz/?ref=2121 *
  44. * 7.) https://free-litecoin.com/login?referer=1035367 *
  45. * 8.) https://free-ethereum.io/?referer=742436 *
  46. * 9.) https://litking.biz/signup?r=125431 *
  47. * 10.) https://bitking.biz/signup?r=75339 *
  48. ***************************************************************************************************
  49. * MicroWallets: *
  50. * 1.) FaucetPay: BTC: 1HeD2a11n8d9zBTaznNWfVxtw1dKuW2vT5 *
  51. * LTC: MHpCuD3zAFEkeuhbgLbuZKcfdqMFkaLSem *
  52. * BCH: bitcoincash:qp7ywra8h7lwatcuc7u65lv8x6rv5kn4sutrsnzrpx *
  53. * TRX: TLs3iQfXJs1rmUuG6pkLkUwcu32mFUwzgu *
  54. * Doge: DPtBQG9GNTYHUFkjB2zYWYah4nCCogVAt9 * *
  55. * 2.) Direct BTC: 35HbfGfvbdctzY6tT4jcHXRx4zonBTnDuC *
  56. ***************************************************************************************************
  57. * Cloud Mining Websites Just SignUp and earn passive income * *
  58. * 1.) https://tronrex.online/r/86733 *
  59. * *
  60. ***************************************************************************************************
  61. */
  62.  
  63. // ==/UserScript==
  64. (async function() {
  65.  
  66. //TODO: Enable debug mode to print console logs
  67. //TODO: Refactor Code for different models
  68. 'use strict';
  69. var selectedImageCount = 0;
  70. var tensorFlowModel = undefined;
  71. var tensorFlowMobileNetModel = undefined;
  72. var worker = undefined;
  73.  
  74. var identifiedObjectsList = [];
  75. var exampleImageList = [];
  76. var identifyObjectsFromImagesCompleted = false;
  77. var currentExampleUrls = [];
  78.  
  79. //Default Language for hcaptcha
  80. const LANG_ENGLISH = "English"
  81. const DEFAULT_LANGUAGE = LANG_ENGLISH;
  82. const ENABLE_DEFAULT_LANGUAGE = true;
  83.  
  84. //Guess/Match New Images
  85. const MATCH_IMAGES_USING_TRAINER = false;
  86. const GUESS_NEW_IMAGE_TYPE = false;
  87.  
  88. //Node Selectors
  89. const CHECK_BOX = "#checkbox";
  90. const SUBMIT_BUTTON = ".button-submit";
  91. const TASK_IMAGE_BORDER = ".task-image .border";
  92. const IMAGE = ".task-image .image";
  93. const TASK_IMAGE = ".task-image";
  94. const PROMPT_TEXT = ".prompt-text";
  95. const NO_SELECTION = ".no-selection";
  96. const CHALLENGE_INPUT_FIELD = ".challenge-input .input-field";
  97. const CHALLENGE_INPUT = ".challenge-input";
  98. const CHALLENGE_IMAGE = ".challenge-example .image .image";
  99. const IMAGE_FOR_OCR = ".challenge-image .zoom-image";
  100. const LANGUAGE_SELECTOR = "#language-list .scroll-container .option span";
  101.  
  102. //Attributes
  103. const ARIA_CHECKED = "aria-checked";
  104. const ARIA_HIDDEN = "aria-hidden";
  105.  
  106. //Values that can be changed for other languages
  107. const AIRPLANE = "airplane";
  108. const BICYCLE = "bicycle";
  109. const BOAT = "boat";
  110. const BUS = "bus";
  111. const CAR = "car";
  112. const MOTORBUS = "motorbus";
  113. const MOTORCYCLE = "motorcycle";
  114. const SURFBOARD = "surfboard";
  115. const TRAIN = "train";
  116. const TRUCK = "truck";
  117. const TRIMARAN = "trimaran";
  118. const SEAPLANE = "seaplane";
  119. const SPEEDBOAT = "speedboat";
  120.  
  121. //Living Room Objects
  122. const BED = "bed";
  123. const BOOK = "book";
  124. const CHAIR = "chair";
  125. const CLOCK = "clock";
  126. const COUCH = "couch";
  127. const DINING_TABLE = "dining table";
  128. const POTTED_PLANT = "potted plant";
  129. const TV = "tv";
  130.  
  131. //Animals
  132. const ZEBRA = "zebra";
  133. const CAT = "cat";
  134. const DOG = "dog";
  135.  
  136. // Vertical River
  137. const VALLEY = "valley";
  138. const VERTICAL_RIVER = "vertical river";
  139.  
  140.  
  141. const LIVING_ROOM_TYPES = [BED, BOOK, CHAIR, CLOCK, COUCH, DINING_TABLE, POTTED_PLANT, TV];
  142. const TRANSPORT_TYPES = [AIRPLANE, BICYCLE, BOAT, BUS, CAR, MOTORBUS, MOTORCYCLE, SEAPLANE, SPEEDBOAT, SURFBOARD, TRAIN, TRIMARAN, TRUCK];
  143. const ANIMAL_TYPES = [ZEBRA, CAT, DOG];
  144.  
  145. const SENTENCE_TEXT_A = "Please click each image containing a ";
  146. const SENTENCE_TEXT_AN = "Please click each image containing an ";
  147. const LANGUAGE_FOR_OCR = "eng";
  148.  
  149. // Option to override the default image matching
  150. // Enabling this by default
  151. const ENABLE_TENSORFLOW = true;
  152.  
  153. // Max Skips that can be done while solving the captcha
  154. // This is likely not to happen, if it occurs retry for new images
  155. const MAX_SKIPS = 10;
  156. var skipCount = 0;
  157.  
  158. var USE_MOBILE_NET = false;
  159. var USE_COLOUR_PATTERN = false;
  160. var NEW_WORD_IDENTIFIED = false;
  161.  
  162. //Probablility for objects
  163. var probabilityForObject = new Map();
  164. probabilityForObject.set("speedboat", 0.14);
  165. probabilityForObject.set("fireboat", 0.4);
  166. probabilityForObject.set("boathouse", 0.4);
  167. probabilityForObject.set("submarine", 0.5);
  168. probabilityForObject.set("printer", 0.05);
  169. probabilityForObject.set("stretcher", 0.05);
  170. probabilityForObject.set("rotisserie", 0.02);
  171. probabilityForObject.set("spatula", 0.05);
  172.  
  173.  
  174. String.prototype.includesOneOf = function(arrayOfStrings) {
  175.  
  176. //If this is not an Array, compare it as a String
  177. if (!Array.isArray(arrayOfStrings)) {
  178. return this.toLowerCase().includes(arrayOfStrings.toLowerCase());
  179. }
  180.  
  181. for (var i = 0; i < arrayOfStrings.length; i++) {
  182. if ((arrayOfStrings[i].substr(0, 1) == "=" && this.toLowerCase() == arrayOfStrings[i].substr(1).toLowerCase()) ||
  183. (this.toLowerCase().includes(arrayOfStrings[i].toLowerCase()))) {
  184. return true;
  185. }
  186. }
  187. return false;
  188. }
  189.  
  190. String.prototype.equalsOneOf = function(arrayOfStrings) {
  191.  
  192. //If this is not an Array, compare it as a String
  193. if (!Array.isArray(arrayOfStrings)) {
  194. return this.toLowerCase() == arrayOfStrings.toLowerCase();
  195. }
  196.  
  197. for (var i = 0; i < arrayOfStrings.length; i++) {
  198. if ((arrayOfStrings[i].substr(0, 1) == "=" && this.toLowerCase() == arrayOfStrings[i].substr(1).toLowerCase()) ||
  199. (this.toLowerCase() == arrayOfStrings[i].toLowerCase())) {
  200. return true;
  201. }
  202. }
  203. return false;
  204. }
  205.  
  206.  
  207.  
  208. // This script uses imageidentify API (wolfram) . You may also use TensorFlow.js, Yolo latest version to recognize common objects.
  209. //(When the cloud service is available for yolo, we can switch the API endpoint). Accuracy varies between Wolfram, Tensorflow and Yolo.
  210. // Use this as a reference to solve recaptcha/other captchas using scripts in browser. This is intended for learning purposes.
  211. // Using TensorFlow as fallback, but this requires good CPU in order to solve quickly.
  212. // CPU utilization and memory utlization may go high when using TensorFlow.js
  213. function matchImages(imageUrl, word, i) {
  214.  
  215. GM_xmlhttpRequest({
  216. method: "POST",
  217. url: "https://www.imageidentify.com/objects/user-26a7681f-4b48-4f71-8f9f-93030898d70d/prd/urlapi/",
  218. headers: {
  219. "Content-Type": "application/x-www-form-urlencoded"
  220. },
  221. data: "image=" + encodeURIComponent(imageUrl),
  222. timeout: 8000,
  223. onload: function(response) {
  224. clickImages(response, imageUrl, word, i)
  225. },
  226. onerror: function(e) {
  227. //Using Fallback TensorFlow
  228. if (e && e.status && e.status != 0) {
  229. console.log(e);
  230. console.log("Using Fallback");
  231. }
  232. matchImagesUsingTensorFlow(imageUrl, word, i);
  233.  
  234. },
  235. ontimeout: function() {
  236. //console.log("Timed out. Using Fallback");
  237. matchImagesUsingTensorFlow(imageUrl, word, i);
  238. },
  239. });
  240.  
  241. }
  242.  
  243. function matchImagesUsingTensorFlow(imageUrl, word, i) {
  244. try {
  245. let img = new Image();
  246. img.crossOrigin = "Anonymous";
  247. img.src = imageUrl;
  248. img.onload = () => {
  249. initializeTensorFlowModel().then(model => model.detect(img))
  250. .then(function(predictions) {
  251. var predictionslen = predictions.length;
  252. for (var j = 0; j < predictionslen; j++) {
  253. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  254. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 0 &&
  255. predictions[j].class.includesOneOf(word)) {
  256. qSelectorAll(TASK_IMAGE)[i].click();
  257. break;
  258. }
  259. }
  260. img.removeAttribute("src");
  261. selectedImageCount = selectedImageCount + 1;
  262. });
  263. }
  264. } catch (err) {
  265. console.log(err.message);
  266. }
  267. }
  268.  
  269.  
  270. function matchImagesUsingTensorFlowMobileNet(imageUrl, word, i) {
  271.  
  272. try {
  273. let img = new Image();
  274. img.crossOrigin = "Anonymous";
  275. img.src = imageUrl;
  276. img.onload = () => {
  277. initializeTensorFlowMobilenetModel().then(model => model.classify(img))
  278. .then(function(predictions) {
  279. var predictionslen = predictions.length;
  280. for (var j = 0; j < predictionslen; j++) {
  281. var probability = 0.077;
  282. if (probabilityForObject.get(predictions[j].className)) {
  283. probability = probabilityForObject.get(predictions[j].className);
  284. }
  285.  
  286. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  287. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 0 &&
  288. predictions[j].className.includesOneOf(word) && predictions[j].probability > probability) {
  289. qSelectorAll(TASK_IMAGE)[i].click();
  290. break;
  291. }
  292. }
  293. img.removeAttribute("src");
  294. selectedImageCount = selectedImageCount + 1;
  295. });
  296. }
  297. } catch (err) {
  298. console.log(err.message);
  299. }
  300. }
  301.  
  302.  
  303. // TODO: Generalize this logic
  304. // Identifying this based on the observation of the images seen
  305. // The actual way would be to scan the entire image to find the lake.
  306. // Mobilenet model in browser js identifies the lake but does not provide coordinates
  307. // to identify if it is horizontal or vertical
  308. function matchImageForVerticalRiver(imageUrl, word, i) {
  309.  
  310. Jimp.read(imageUrl).then(function (data) {
  311.  
  312. data.getBase64(Jimp.AUTO, async function (err, src) {
  313. var img = document.createElement("img");
  314. img.setAttribute("src", src);
  315. await img.decode();
  316. var imageHeight = img.height;
  317. var imageWidth = img.width;
  318. var cropHeight = imageHeight - 0.03*imageHeight;
  319. let url = src.replace(/^data:image\/\w+;base64,/, "");
  320. let buffer = new Buffer(url, 'base64');
  321.  
  322. Jimp.read(buffer).then(function (data) {
  323. data.crop(0, cropHeight, imageWidth, imageHeight)
  324. .getBase64(Jimp.AUTO, async function (err, src) {
  325.  
  326. var img = document.createElement("img");
  327. img.src = src;
  328. await img.decode();
  329.  
  330. var c = document.createElement("canvas")
  331. c.width = img.width;
  332. c.height = img.height;
  333. var ctx = c.getContext("2d");
  334. ctx.drawImage(img, 0, 0);
  335.  
  336. var imageData = ctx.getImageData(0, 0, c.width, c.height);
  337. var data = imageData.data;
  338. var count = 0;
  339.  
  340. //Multiple combinations and distances are required for accuracy
  341. for (let i = 0; i < data.length; i+= 4) {
  342. if( (data[i] < 140 && data[i+1] < 110 && data[i+2] > 80 && data[i+3] == 255) ||
  343. (data[i] < 200 && data[i+1] < 200 && data[i+2] > 140 && data[i+3] == 255)){
  344. count++;
  345. }
  346. }
  347.  
  348. if(count > 0.001*(data.length/4) && count < data.length/8) {
  349. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  350. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 0) {
  351. qSelectorAll(TASK_IMAGE)[i].click();
  352. }
  353. }
  354.  
  355. img.removeAttribute("src");
  356. selectedImageCount = selectedImageCount + 1;
  357.  
  358. });
  359. });
  360. img.removeAttribute("src");
  361. });
  362. });
  363. }
  364.  
  365.  
  366. // This approach is naive approch to store the images and retrieve
  367. // The accuracy is 100% as long as you store the selected images
  368. // Browser memory is used to store the images and gets cleared if you delete the browser cache and cookies
  369. // You may use this to store images in remote place and retrive for quick access
  370. // This approach is only used during urgent scenarios before training the images
  371. // Image differnce can also be done with the stored images to identify new image based on the existing if they are nearly equal
  372. function matchImagesUsingTrainer(imageUrl, word, i) {
  373.  
  374. Jimp.read(imageUrl).then(function (data) {
  375.  
  376. data.getBase64(Jimp.AUTO, async function (err, src) {
  377. var trainerInterval = setInterval(function(){
  378.  
  379. if (!qSelectorAll(IMAGE)[i] || !(qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) ){
  380. clearInterval(trainerInterval);
  381. return;
  382. }
  383.  
  384. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  385. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 0 && GM_getValue(src) && GM_getValue(src) == word) {
  386. console.log("Retrieved image from trainer");
  387. selectedImageCount = selectedImageCount + 1;
  388. qSelectorAll(TASK_IMAGE)[i].click();
  389. clearInterval(trainerInterval);
  390. return;
  391. }
  392.  
  393. // Overriding Previously Stored values
  394. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  395. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 1 && GM_getValue(src) && GM_getValue(src) != word) {
  396. console.log("Overriding image in the trainer");
  397. selectedImageCount = selectedImageCount + 1;
  398. GM_setValue(src,word);
  399. console.log("Image Stored into database");
  400. clearInterval(trainerInterval);
  401. return;
  402. }
  403.  
  404. if (qSelectorAll(IMAGE)[i] && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  405. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 1 && !GM_getValue(src)) {
  406. selectedImageCount = selectedImageCount + 1;
  407. GM_setValue(src,word);
  408. console.log("Image Stored into database");
  409. clearInterval(trainerInterval);
  410. return;
  411.  
  412. }
  413.  
  414. },5000);
  415.  
  416. });
  417. });
  418. }
  419.  
  420.  
  421. //Function to sleep or delay
  422. async function delay(ms) {
  423. return new Promise(resolve => setTimeout(resolve, ms))
  424. }
  425.  
  426. //Different Models can be set later based on usecase
  427. //Ref Models: https://github.com/tensorflow/tfjs-models
  428. async function initializeTensorFlowModel() {
  429. if (!tensorFlowModel) {
  430. tensorFlowModel = await cocoSsd.load();
  431. }
  432. return tensorFlowModel;
  433. }
  434.  
  435. //MobileNet ssd model
  436. async function initializeTensorFlowMobilenetModel() {
  437. if (!tensorFlowMobileNetModel) {
  438. tensorFlowMobileNetModel = await mobilenet.load();
  439. }
  440. return tensorFlowMobileNetModel;
  441. }
  442.  
  443.  
  444. //Initialize TesseractWorker
  445. function initializeTesseractWorker() {
  446. if (!worker) {
  447. worker = new Tesseract.TesseractWorker();
  448. }
  449. }
  450.  
  451. function clickImages(response, imageUrl, word, i) {
  452.  
  453. try {
  454. if (response && response.responseText && (qSelectorAll(IMAGE)[i].style.background).includes(imageUrl) &&
  455. qSelectorAll(TASK_IMAGE_BORDER)[i].style.opacity == 0) {
  456. var responseJson = JSON.parse(response.responseText);
  457. if (responseJson.identify && responseJson.identify.title && responseJson.identify.title.includesOneOf(word)) {
  458. qSelectorAll(TASK_IMAGE)[i].click();
  459. } else if (responseJson.identify && responseJson.identify.entity && responseJson.identify.entity.includesOneOf(word)) {
  460. qSelectorAll(TASK_IMAGE)[i].click();
  461. } else if (responseJson.identify && responseJson.identify.alternatives) {
  462. var alternatives = JSON.stringify(responseJson.identify.alternatives);
  463. var alternativesJson = JSON.parse(alternatives);
  464.  
  465. for (var key in alternativesJson) {
  466. if (alternativesJson.hasOwnProperty(key)) {
  467. if ((alternativesJson[key].includesOneOf(word) || key.includesOneOf(word))) {
  468. qSelectorAll(TASK_IMAGE)[i].click();
  469. break;
  470. }
  471. }
  472. }
  473. } else {
  474. //No Match found
  475. }
  476.  
  477. selectedImageCount = selectedImageCount + 1;
  478.  
  479. } else {
  480. //console.log("Using Fallback TensorFlow");
  481. matchImagesUsingTensorFlow(imageUrl, word, i);
  482. }
  483.  
  484. } catch (err) {
  485. //Using Fallback TensorFlow
  486. //console.log(err.message);
  487. //console.log("Using Fallback TensorFlow");
  488. matchImagesUsingTensorFlow(imageUrl, word, i);
  489. }
  490. }
  491.  
  492. function qSelectorAll(selector) {
  493. return document.querySelectorAll(selector);
  494. }
  495.  
  496. function qSelector(selector) {
  497. return document.querySelector(selector);
  498. }
  499.  
  500.  
  501. async function getSynonyms(word) {
  502.  
  503. USE_MOBILE_NET = false;
  504. USE_COLOUR_PATTERN = false;
  505. NEW_WORD_IDENTIFIED = false;
  506.  
  507. //TODO: Format this to JSON string
  508. if (word == MOTORBUS || word == BUS) {
  509. word = ['bus', 'motorbus'];
  510. USE_MOBILE_NET = true;
  511. } else if (word == CAR) {
  512. word = ['=car', 'coupe', 'jeep', 'limo', 'sport utility vehicle', 'station wagon', 'hatchback', 'bumper car', 'modelT', 'electric battery', 'cruiser'];
  513. USE_MOBILE_NET = true;
  514. } else if (word == AIRPLANE) {
  515. word = ['airplane', 'plane', 'aircraft', 'aeroplane', 'hangar', 'Airdock', 'JumboJet', 'jetliner', 'stealth fighter', 'field artillery']
  516. USE_MOBILE_NET = true;
  517. } else if (word == TRAIN) {
  518. word = ['train', 'rail', 'cable car', 'locomotive', 'subway station']
  519. USE_MOBILE_NET = true;
  520. } else if (word == BOAT || word == SURFBOARD) {
  521. word = ['=boat', '=barge', 'houseboat', 'boathouse', 'speedboat', '=submarine', 'bobsled', 'catamaran', 'schooner', 'ocean liner', 'lifeboat', 'fireboat', 'yawl', 'pontoon', 'small boat', 'SnowBlower', 'Sea-coast', 'paddlewheel', 'paddle wheel', 'PaddleSteamer', 'Freighter', 'Sternwheeler', 'kayak', 'canoe', 'deck', 'DockingFacility', 'surfboard', '=ship', '=cruise', 'watercraft', 'sail', 'canvas', '=raft']
  522. USE_MOBILE_NET = true;
  523. } else if (word == BICYCLE) {
  524. word = ['bicycle-built-for-two', 'tandem bicycle', 'bicycle', 'tricycle', 'mountain bike', 'AcceleratorPedal', 'macaw', 'knot']
  525. USE_MOBILE_NET = true;
  526. } else if (word == MOTORCYCLE) {
  527. word = ['moped', 'motor scooter', 'scooter', 'motorcycle', 'windshield', 'dashboard']
  528. USE_MOBILE_NET = true;
  529. } else if (word == TRUCK) {
  530. word = ['truck', 'cargocontainer', 'bazooka']
  531. USE_MOBILE_NET = true;
  532. } else if (word == TRIMARAN || word == SPEEDBOAT || word == SEAPLANE) {
  533. word = ['spatula', 'can opener', 'tin opener', 'monitor', 'screen', 'stretcher', 'printer', 'nail', 'mousetrap', 'TRIMARAN', 'space shuttle', 'ski', 'rotisserie', 'geyser', 'plate rack']
  534. USE_MOBILE_NET = true;
  535. } else if (word.includesOneOf(LIVING_ROOM_TYPES)) {
  536. word = ['bed', 'couch', 'chair', 'potted plant', 'dining table', 'clock', 'tv', 'book']
  537. } else if (word == ZEBRA) {
  538. word = ['zebra']
  539. } else if (word == CAT) {
  540. word = ['cat']
  541. USE_MOBILE_NET = true;
  542. } else if (word == DOG) {
  543. word = ['dog']
  544. } else if (word == VALLEY || word == VERTICAL_RIVER){
  545. word = ['alp','volcano']
  546. USE_COLOUR_PATTERN = true;
  547. } else {
  548. NEW_WORD_IDENTIFIED = true;
  549. console.log("Word does not match. New type identified::" + word);
  550. }
  551.  
  552. return word
  553.  
  554. }
  555.  
  556. function isHidden(el) {
  557. return (el.offsetParent === null)
  558. }
  559.  
  560. if (window.location.href.includes("checkbox")) {
  561. var checkboxInterval = setInterval(function() {
  562. if (!qSelector(CHECK_BOX)) {
  563. //Wait until the checkbox element is visible
  564. } else if (qSelector(CHECK_BOX).getAttribute(ARIA_CHECKED) == "true") {
  565. clearInterval(checkboxInterval);
  566. } else if (!isHidden(qSelector(CHECK_BOX)) && qSelector(CHECK_BOX).getAttribute(ARIA_CHECKED) == "false") {
  567. qSelector(CHECK_BOX).click();
  568. } else {
  569. return;
  570. }
  571.  
  572. }, 5000);
  573. } else {
  574.  
  575. try {
  576. await initializeTesseractWorker();
  577. await initializeTensorFlowModel();
  578. await initializeTensorFlowMobilenetModel();
  579. selectImages();
  580.  
  581. } catch (err) {
  582. console.log(err);
  583. console.log("Tesseract could not be initialized");
  584. }
  585.  
  586. }
  587.  
  588. function selectImagesAfterDelay(delay) {
  589. setTimeout(function() {
  590. selectImages();
  591. }, delay * 1000);
  592. }
  593.  
  594. function triggerEvent(el, type) {
  595. var e = document.createEvent('HTMLEvents');
  596. e.initEvent(type, false, true);
  597. el.dispatchEvent(e);
  598. }
  599.  
  600. function triggerMouseEvent(el, type) {
  601. var e = document.createEvent('MouseEvent');
  602. e.initEvent(type, false, true);
  603. el.dispatchEvent(e);
  604. }
  605.  
  606. // Small hack to select the nodes
  607. function unsure(targetNodeText) {
  608. var targetNode = Array.from(qSelectorAll('div'))
  609. .find(el => el.textContent === targetNodeText);
  610. //Works for now
  611. //TODO: Select clothing
  612. //TODO: Draw boxes around images
  613. if (targetNode) {
  614. triggerMouseEvent(targetNode, 'mousedown');
  615. triggerMouseEvent(targetNode, 'mouseup');
  616. if (qSelector(SUBMIT_BUTTON)) {
  617. qSelector(SUBMIT_BUTTON).click();
  618. }
  619. }
  620. return selectImagesAfterDelay(1);
  621. }
  622.  
  623. function getUrlFromString(urlString) {
  624.  
  625. var imageUrl = urlString.substring(
  626. urlString.indexOf('"') + 1,
  627. urlString.lastIndexOf('"')
  628. );
  629.  
  630. if (!imageUrl || !imageUrl.includes("https")) {
  631. return 0;
  632. }
  633.  
  634. return imageUrl;
  635. }
  636.  
  637.  
  638. function getImageList() {
  639. var imageList = [];
  640. if (qSelectorAll(IMAGE).length > 0) {
  641. for (var i = 0; i < 9; i++) {
  642. var urlString = qSelectorAll(IMAGE)[i].style.background;
  643. var imageUrl = getUrlFromString(urlString);
  644. if (imageUrl == 0) {
  645. //console.log("Image url is empty");
  646. return imageList;
  647. }
  648. imageList[i] = imageUrl;
  649. }
  650. }
  651. return imageList;
  652. }
  653.  
  654. function waitUntilImageSelection() {
  655. var imageIntervalCount = 0;
  656. var imageInterval = setInterval(function() {
  657. imageIntervalCount = imageIntervalCount + 1;
  658. if (selectedImageCount == 9) {
  659. clearInterval(imageInterval);
  660. if (qSelector(SUBMIT_BUTTON)) {
  661. qSelector(SUBMIT_BUTTON).click();
  662. }
  663. return selectImagesAfterDelay(5);
  664. } else if (imageIntervalCount > 8) {
  665. clearInterval(imageInterval);
  666. return selectImages();
  667. } else if(selectedImageCount > 2 && MATCH_IMAGES_USING_TRAINER && NEW_WORD_IDENTIFIED && imageIntervalCount > 4){
  668. clearInterval(imageInterval);
  669. if (qSelector(SUBMIT_BUTTON)) {
  670. qSelector(SUBMIT_BUTTON).click();
  671. }
  672. return selectImagesAfterDelay(5);
  673. } else if(MATCH_IMAGES_USING_TRAINER && NEW_WORD_IDENTIFIED && imageIntervalCount > 6){
  674. clearInterval(imageInterval);
  675. if (qSelector(SUBMIT_BUTTON)) {
  676. qSelector(SUBMIT_BUTTON).click();
  677. }
  678. return selectImagesAfterDelay(5);
  679. }else{
  680.  
  681. }
  682. }, 3000);
  683. }
  684.  
  685. function waitForImagesToAppear() {
  686. var checkImagesSelectedCount = 0;
  687. var waitForImagesInterval = setInterval(function() {
  688. checkImagesSelectedCount = checkImagesSelectedCount + 1;
  689. if (qSelectorAll(IMAGE) && qSelectorAll(IMAGE).length == 9) {
  690. clearInterval(waitForImagesInterval);
  691. return selectImages();
  692. } else if (checkImagesSelectedCount > 60) {
  693. clearInterval(waitForImagesInterval);
  694. } else if (qSelector(CHALLENGE_INPUT_FIELD) && qSelector(NO_SELECTION).getAttribute(ARIA_HIDDEN) != true) {
  695. clearInterval(waitForImagesInterval);
  696. return imageUsingOCR();
  697. } else {
  698. //TODO: Identify Objects for the following (Ex: bed,chair,table etc)
  699. //Ref for clothing: https://www.youtube.com/watch?v=yWwzFnAnrLM, https://www.youtube.com/watch?v=FiNglI1wRNk,https://www.youtube.com/watch?v=oHAkK_9UCQ8
  700. var targetNodeList = ["Yes", "3 or more items of furniture", "Equipped space or room", "Photo is clean, no watermarks, logos or text overlays", "An interior photo of room", "Unsure", "Photo is sharp"];
  701. for (var j = 0; j < targetNodeList.length; j++) {
  702. var targetNode = Array.from(qSelectorAll('div'))
  703. .find(el => el.textContent === targetNodeList[j]);
  704. if (targetNode) {
  705. //console.log("Target Node Found");
  706. clearInterval(waitForImagesInterval);
  707. return unsure(targetNodeList[j]);
  708. }
  709. }
  710. }
  711. }, 5000);
  712. }
  713.  
  714. //TODO: Convert Image to base64 to avoid multiple calls
  715. function preProcessImage(base64Image, imageUrl) {
  716.  
  717. //Darken and Brighten
  718. Jimp.read(base64Image).then(function(data) {
  719. data.color([
  720.  
  721. {
  722. apply: 'darken',
  723. params: [20]
  724. }
  725.  
  726. ]).color([
  727.  
  728. {
  729. apply: 'brighten',
  730. params: [20]
  731. }
  732.  
  733. ])
  734. .greyscale()
  735. .getBase64(Jimp.AUTO, function(err, src) {
  736. var img = document.createElement("img");
  737. img.setAttribute("src", src);
  738.  
  739. worker.recognize(img, LANGUAGE_FOR_OCR).then(function(data) {
  740. //Remove Image After recognizing
  741. img.removeAttribute("src");
  742. //If null change to other methods
  743. if (data && data.text && data.text.length > 0) {
  744. inputChallenge(postProcessImage(data), imageUrl);
  745. return selectImages();
  746. } else {
  747. preProcessImageMethod2(base64Image, imageUrl);
  748. }
  749. });
  750.  
  751. });
  752. });
  753.  
  754. }
  755.  
  756.  
  757. function preProcessImageMethod2(base64Image, trimageUrl) {
  758.  
  759. //Multi Contrast darken and brighten
  760. Jimp.read(base64Image).then(function(data) {
  761. data.color([
  762.  
  763. {
  764. apply: 'darken',
  765. params: [20]
  766. }
  767.  
  768. ]).contrast(1).color([
  769.  
  770. {
  771. apply: 'brighten',
  772. params: [20]
  773. }
  774.  
  775. ]).contrast(1).greyscale().getBase64(Jimp.AUTO, function(err, src) {
  776. var img = document.createElement("img");
  777. img.setAttribute("src", src);
  778.  
  779. worker.recognize(img, LANGUAGE_FOR_OCR).then(function(data) {
  780. //Remove Image After recognizing
  781. img.removeAttribute("src");
  782. if (data && data.text && data.text.length > 0) {
  783. inputChallenge(postProcessImage(data), imageUrl);
  784. return selectImages();
  785. } else {
  786. preProcessImageMethod3(base64Image, imageUrl);
  787. }
  788. });
  789. });
  790. });
  791.  
  792. }
  793.  
  794. function preProcessImageMethod3(base64Image, imageUrl) {
  795. //Multi Contrast only brighten
  796. Jimp.read(base64Image).then(function(data) {
  797. data.contrast(1).color([{
  798. apply: 'brighten',
  799. params: [20]
  800. }
  801.  
  802. ])
  803. .contrast(1)
  804. .greyscale()
  805. .getBase64(Jimp.AUTO, function(err, src) {
  806. var img = document.createElement("img");
  807. img.setAttribute("src", src);
  808.  
  809. worker.recognize(img, LANGUAGE_FOR_OCR).then(function(data) {
  810. //Remove Image After recognizing
  811. img.removeAttribute("src");
  812. if (data && data.text && data.text.length > 0) {
  813. inputChallenge(postProcessImage(data), imageUrl);
  814. return selectImages();
  815. } else {
  816. preProcessImageMethod4(base64Image, imageUrl);
  817. }
  818. });
  819. });
  820. });
  821. }
  822.  
  823. function preProcessImageMethod4(base64Image, imageUrl) {
  824. //Resize the image
  825. Jimp.read(base64Image).then(function(data) {
  826. data.resize(256, Jimp.AUTO)
  827. .quality(60) // set JPEG quality
  828. .greyscale() // set greyscale
  829. .getBase64(Jimp.AUTO, function(err, src) {
  830. var img = document.createElement("img");
  831. img.setAttribute("src", src);
  832.  
  833. worker.recognize(img, LANGUAGE_FOR_OCR).then(function(data) {
  834. //Remove Image After recognizing
  835. img.removeAttribute("src");
  836. inputChallenge(postProcessImage(data), imageUrl);
  837. return selectImages();
  838. });
  839. });
  840. });
  841.  
  842. }
  843.  
  844. function postProcessImage(data) {
  845. var filterValues = ['\n', '{', '}', '[', ']'];
  846. for (var i = 0; i < filterValues.length; i++) {
  847. data.text = data.text.replaceAll(filterValues[i], "");
  848. }
  849. return data;
  850. }
  851.  
  852. // Using Tesseract to recognize images
  853. function imageUsingOCR() {
  854. try {
  855. //console.log("Image using OCR");
  856. var urlString = qSelector(IMAGE_FOR_OCR).style.background;
  857. var imageUrl = getUrlFromString(urlString);
  858. if (imageUrl == 0) {
  859. return selectImagesAfterDelay(1);
  860. }
  861.  
  862. Jimp.read(imageUrl).then(function(data) {
  863.  
  864. data.getBase64(Jimp.AUTO, function(err, src) {
  865.  
  866. var img = document.createElement("img");
  867. img.setAttribute("src", src);
  868. var base64Image = img.src;
  869.  
  870. preProcessImage(base64Image, imageUrl);
  871.  
  872. })});
  873.  
  874. } catch (err) {
  875. console.log(err.message);
  876. return selectImagesAfterDelay(1);
  877. }
  878. }
  879.  
  880.  
  881. async function convertTextToImage(text) {
  882.  
  883. //Convert Text to image
  884. var canvas = document.createElement("canvas");
  885. var textLength = text.length;
  886. canvas.width = 60 * textLength;
  887. canvas.height = 80;
  888. var ctx = canvas.getContext('2d');
  889. ctx.font = "30px Arial";
  890. ctx.fillText(text, 10, 50);
  891. var img = document.createElement("img");
  892. img.src = canvas.toDataURL();
  893.  
  894. return img;
  895. }
  896.  
  897. async function convertImageToText(img) {
  898.  
  899. await initializeTesseractWorker();
  900.  
  901. //Convert Image to Text
  902. var text = "";
  903. await worker.recognize(img, LANGUAGE_FOR_OCR).then(function(data) {
  904. text = data.text;
  905. // console.log("Recognized Text::" + text);
  906. });
  907. return text.trim();
  908. }
  909.  
  910. function areExampleImageUrlsChanged() {
  911.  
  912. var prevExampleUrls = exampleImageList;
  913. currentExampleUrls = [];
  914.  
  915. if (qSelectorAll(CHALLENGE_IMAGE).length > 0) {
  916. for (let i = 0; i < qSelectorAll(CHALLENGE_IMAGE).length; i++) {
  917. var urlString = qSelectorAll(CHALLENGE_IMAGE)[i].style.background;
  918. var imageUrl = getUrlFromString(urlString);
  919. if (imageUrl == 0) {
  920. console.log("Image url is empty, Retrying...");
  921. return true;
  922. }
  923. currentExampleUrls[i] = imageUrl;
  924. }
  925. }
  926.  
  927. if (prevExampleUrls.length != currentExampleUrls.length) {
  928. return true;
  929. }
  930.  
  931. for (let i = 0; i < currentExampleUrls.length; i++) {
  932.  
  933. if (prevExampleUrls[i] != currentExampleUrls[i]) {
  934. return true;
  935. }
  936. }
  937.  
  938. return false;
  939. }
  940.  
  941. async function identifyObjectsFromImages(imageUrlList) {
  942. identifiedObjectsList = [];
  943.  
  944. for (let i = 0; i < imageUrlList.length; i++) {
  945. try {
  946. let img = new Image();
  947. img.crossOrigin = "Anonymous";
  948. img.src = imageUrlList[i];
  949. img.onload = () => {
  950. initializeTensorFlowModel().then(model => model.detect(img))
  951. .then(function(predictions) {
  952. let predictionslen = predictions.length;
  953. let hashSet = new Set();
  954. for (let j = 0; j < predictionslen; j++) {
  955. hashSet.add(predictions[j].class);
  956. }
  957.  
  958. hashSet.forEach((key) => {
  959. identifiedObjectsList.push(key);
  960. });
  961.  
  962. img.removeAttribute("src");
  963.  
  964. if (i == imageUrlList.length - 1) {
  965. identifyObjectsFromImagesCompleted = true;
  966. }
  967.  
  968. })
  969. }
  970. } catch (e) {
  971. console.log(e);
  972. }
  973.  
  974. }
  975.  
  976. }
  977.  
  978. async function identifyObjectsFromImagesUsingMobileNet(imageUrlList) {
  979. identifiedObjectsList = [];
  980.  
  981. for (let i = 0; i < imageUrlList.length; i++) {
  982. try {
  983. let img = new Image();
  984. img.crossOrigin = "Anonymous";
  985. img.src = imageUrlList[i];
  986. img.onload = () => {
  987. initializeTensorFlowMobilenetModel().then(model => model.classify(img))
  988. .then(function(predictions) {
  989.  
  990. let predictionslen = predictions.length;
  991. let hashSet = new Set();
  992. for (let j = 0; j < predictionslen; j++) {
  993. if(predictions[j].className.includes(",")){
  994. var multiPredictions = predictions[j].className.split(',');
  995. for(let k=0; k< multiPredictions.length;k++){
  996. hashSet.add(multiPredictions[k].trim());
  997. }
  998. }else{
  999. hashSet.add(predictions[j].className);
  1000. }
  1001. }
  1002.  
  1003. hashSet.forEach((key) => {
  1004. identifiedObjectsList.push(key);
  1005. });
  1006.  
  1007. img.removeAttribute("src");
  1008.  
  1009. if (i == imageUrlList.length - 1) {
  1010. identifyObjectsFromImagesCompleted = true;
  1011. }
  1012.  
  1013. })
  1014. }
  1015. } catch (e) {
  1016. console.log(e);
  1017. }
  1018.  
  1019. }
  1020.  
  1021. }
  1022.  
  1023. async function getWordFromIdentifiedObjects(identifiedObjectsList) {
  1024.  
  1025. var hashMap = new Map();
  1026. for (var i = 0; i < identifiedObjectsList.length; i++) {
  1027. if (hashMap.has(identifiedObjectsList[i])) {
  1028. hashMap.set(identifiedObjectsList[i], hashMap.get(identifiedObjectsList[i]) + 1)
  1029. } else {
  1030. hashMap.set(identifiedObjectsList[i], 1)
  1031. }
  1032. }
  1033. var maxCount = 0,
  1034. objectKey = -1;
  1035. await hashMap.forEach((value, key) => {
  1036. if (maxCount < value && (key.equalsOneOf(TRANSPORT_TYPES) ||
  1037. key.equalsOneOf(LIVING_ROOM_TYPES) ||
  1038. key.equalsOneOf(ANIMAL_TYPES)|| key == VALLEY)) {
  1039. objectKey = key;
  1040. maxCount = value;
  1041. }
  1042.  
  1043. });
  1044.  
  1045. return objectKey;
  1046. }
  1047.  
  1048.  
  1049. function inputChallenge(data, imageUrl) {
  1050. try {
  1051. if ((qSelector(IMAGE_FOR_OCR).style.background).includes(imageUrl)) {
  1052. console.log(data.text);
  1053. var targetNode = qSelector(CHALLENGE_INPUT_FIELD);
  1054. targetNode.value = data.text.replaceAll("\n", "");
  1055. var challengeInput = qSelector(CHALLENGE_INPUT);
  1056. triggerEvent(challengeInput, 'input');
  1057. // Set a timeout if you want to see the text
  1058. qSelector(SUBMIT_BUTTON).click();
  1059. }
  1060.  
  1061. } catch (err) {
  1062. console.log(err.message);
  1063. }
  1064. }
  1065.  
  1066. async function identifyWordFromExamples() {
  1067.  
  1068. var word = -1;
  1069. if (areExampleImageUrlsChanged()) {
  1070. exampleImageList = currentExampleUrls;
  1071. if (exampleImageList.length == 0) {
  1072. return -1;
  1073. }
  1074. identifyObjectsFromImages(exampleImageList);
  1075. while (!identifyObjectsFromImagesCompleted) {
  1076. await delay(2000)
  1077. }
  1078. identifyObjectsFromImagesCompleted = false;
  1079. word = await getWordFromIdentifiedObjects(identifiedObjectsList);
  1080.  
  1081. //Word has not been identified yet, use mobile net to recognize images
  1082. if (word == -1) {
  1083. //Initialiaze MobileNet Model
  1084. await initializeTensorFlowMobilenetModel();
  1085. identifyObjectsFromImagesUsingMobileNet(exampleImageList);
  1086. while (!identifyObjectsFromImagesCompleted) {
  1087. await delay(2000)
  1088. }
  1089. identifyObjectsFromImagesCompleted = false;
  1090.  
  1091. word = getWordFromIdentifiedObjects(identifiedObjectsList);
  1092. }
  1093. return word;
  1094.  
  1095. } else {
  1096. return getWordFromIdentifiedObjects(identifiedObjectsList);
  1097. }
  1098.  
  1099. return word;
  1100. }
  1101.  
  1102. var prevObject = "";
  1103.  
  1104. function isObjectChanged() {
  1105. if (!prevObject && qSelector(PROMPT_TEXT)) {
  1106. prevObject = qSelector(PROMPT_TEXT).innerText;
  1107. return true;
  1108. }
  1109.  
  1110. if (prevObject && qSelector(PROMPT_TEXT) &&
  1111. prevObject == qSelector(PROMPT_TEXT).innerText) {
  1112. return false;
  1113. }
  1114.  
  1115. return true;
  1116.  
  1117. }
  1118.  
  1119.  
  1120. async function identifyWord() {
  1121. var word = -1;
  1122. try {
  1123. if (window.location.href.includes('&hl=en') || (ENABLE_DEFAULT_LANGUAGE && DEFAULT_LANGUAGE == LANG_ENGLISH)) {
  1124. word = qSelector(PROMPT_TEXT) ? qSelector(PROMPT_TEXT).innerText : word;
  1125. if (word && (word.includes(SENTENCE_TEXT_A) || word.includes(SENTENCE_TEXT_AN))) {
  1126. word = word.replace(SENTENCE_TEXT_A, '');
  1127. word = word.replace(SENTENCE_TEXT_AN, '');
  1128. }
  1129.  
  1130. if (word.equalsOneOf(TRANSPORT_TYPES) || word == VERTICAL_RIVER) {
  1131. return word;
  1132. } else {
  1133. //Using OCR on Text for accurate result
  1134. console.log("New word or different cyrillic");
  1135. var img = await convertTextToImage(word);
  1136. word = await convertImageToText(img);
  1137. word = word.replace(SENTENCE_TEXT_A, '');
  1138. word = word.replace(SENTENCE_TEXT_AN, '');
  1139. if (word.equalsOneOf(TRANSPORT_TYPES) || word == VERTICAL_RIVER) {
  1140. return word;
  1141. } else {
  1142. if(MATCH_IMAGES_USING_TRAINER){
  1143. word = qSelector(PROMPT_TEXT) ? qSelector(PROMPT_TEXT).innerText : -1;
  1144. if(word){
  1145. img = await convertTextToImage(word);
  1146. word = await convertImageToText(img);
  1147. }
  1148. return word;
  1149. }else{
  1150. word = await identifyWordFromExamples();
  1151. }
  1152. }
  1153. }
  1154. } else {
  1155.  
  1156. //If word is not english
  1157. //Identify Images from Example
  1158. word = await identifyWordFromExamples();
  1159. }
  1160.  
  1161. } catch (e) {
  1162. console.log(e);
  1163. }
  1164.  
  1165. return word;
  1166. }
  1167.  
  1168. var prevWord = "";
  1169.  
  1170. async function selectImages() {
  1171.  
  1172. if (ENABLE_DEFAULT_LANGUAGE) {
  1173. for (let i = 0; i < qSelectorAll(LANGUAGE_SELECTOR).length; i++) {
  1174. if (qSelectorAll(LANGUAGE_SELECTOR)[i].innerText == DEFAULT_LANGUAGE) {
  1175. document.querySelectorAll(LANGUAGE_SELECTOR)[i].click();
  1176. await delay(1000);
  1177. }
  1178. }
  1179. }
  1180.  
  1181. if (qSelectorAll(IMAGE) && qSelectorAll(IMAGE).length == 9 && qSelector(NO_SELECTION).getAttribute(ARIA_HIDDEN) != true) {
  1182. selectedImageCount = 0;
  1183. try {
  1184.  
  1185. if (isObjectChanged()) {
  1186. prevWord = await identifyWord();
  1187. }
  1188.  
  1189. var word = prevWord;
  1190.  
  1191. if (word == -1 && skipCount >= MAX_SKIPS) {
  1192. console.log("Max Retries Attempted. Captcha cannot be solved");
  1193. return;
  1194. } else if (word == -1 && skipCount < MAX_SKIPS) {
  1195. skipCount++;
  1196. if (qSelector(SUBMIT_BUTTON)) {
  1197. qSelector(SUBMIT_BUTTON).click();
  1198. }
  1199. return selectImagesAfterDelay(5);
  1200. } else {
  1201. //Get Synonyms for the word
  1202. word = await getSynonyms(word);
  1203. //console.log("words are::" + word);
  1204. }
  1205.  
  1206.  
  1207. } catch (err) {
  1208. console.log(err.message);
  1209. return selectImagesAfterDelay(5);
  1210. }
  1211.  
  1212. var imageList = [];
  1213. try {
  1214. imageList = getImageList();
  1215. if (imageList.length != 9) {
  1216. //console.log("Waiting");
  1217. // Image containers are visible but there are no urls in the image
  1218. // Skip the image
  1219. if (qSelector(SUBMIT_BUTTON)) {
  1220. qSelector(SUBMIT_BUTTON).click();
  1221. }
  1222. return selectImagesAfterDelay(5);
  1223. }
  1224. } catch (err) {
  1225. console.log(err.message);
  1226. return selectImagesAfterDelay(5);
  1227. }
  1228.  
  1229. //Identifying word for seaplane and matching images
  1230. //TODO: Refactor Code to combine different models or use only one model based on accuracy
  1231. if(word && word != -1 && MATCH_IMAGES_USING_TRAINER && NEW_WORD_IDENTIFIED){
  1232. for (let i = 0; i < 9; i++) {
  1233. matchImagesUsingTrainer(imageList[i], word, i);
  1234. }
  1235. }else if(word && word != -1 && USE_COLOUR_PATTERN){
  1236. for (let i = 0; i < 9; i++) {
  1237. matchImageForVerticalRiver(imageList[i], word, i);
  1238. }
  1239. }else if (word && word != -1 && USE_MOBILE_NET) {
  1240. for (let i = 0; i < 9; i++) {
  1241. matchImagesUsingTensorFlowMobileNet(imageList[i], word, i);
  1242. }
  1243. } else if (word && word != -1) {
  1244. for (var i = 0; i < 9; i++) {
  1245. if (ENABLE_TENSORFLOW) {
  1246. matchImagesUsingTensorFlow(imageList[i], word, i);
  1247. } else {
  1248. matchImages(imageList[i], word, i);
  1249. }
  1250. }
  1251. }
  1252. waitUntilImageSelection();
  1253.  
  1254. } else {
  1255. waitForImagesToAppear();
  1256. }
  1257. }
  1258.  
  1259.  
  1260. })();